text stringlengths 1 1.05M |
|---|
envsubst <./script/.env.example >./script/.env
docker run --env-file=./script/.env gopaytech/helm-publisher
|
<reponame>jacopo-degattis/gget
#! /usr/bin/ruby
require "json"
require "net/http"
require_relative "git.rb"
VERSION = "0.1.0"
$AUTHENTICATED = false
def _print_help()
helper = "gget #{VERSION}\nUtility to clone repo subfolders and files\n\nUSAGE:\n\tgget <repo_uri>\n\n\t-a Make authenticated requests\n\n"
puts helper
end
def _check_and_save_creds()
if File.exist?(".gget-cache")
return
end
print "Github username: "
username = STDIN.gets.strip
print "Gitub token: "
token = STDIN.gets.strip
user_data = {
"username" => username,
"token" => token
}
# TODO: find a way to move .gget-cache file in user home folder
File.open(".gget-cache", "w") { |f| f.write(user_data.to_json) }
end
def process_argv(option)
git = Git.new
case option
when "--help"
_print_help()
when "-h"
_print_help()
when "-a"
$AUTHENTICATED = true
_check_and_save_creds()
else
uri = URI.parse(option)
if uri.host.to_s != "github.com"
raise Exception.new "ERROR: Invalid uri, domain must be github.com"
end
if !["http", "https"].include?(uri.scheme)
raise Exception.new "ERROR: Invalid uri provided"
end
git.get_repo(URI.parse(option), $AUTHENTICATED)
end
end
ARGV.each { |option| process_argv(option) }
# t.get_repo("https://github.com/jacopo-degattis/trantor_library_bot/tree/main/src") |
<reponame>neal-siekierski/kwiver
/*ckwg +29
* Copyright 2011-2017 by Kitware, Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* * Neither name of Kitware, Inc. nor the names of any contributors may be used
* to endorse or promote products derived from this software without specific
* prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS ``AS IS''
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef SPROKIT_SCHEDULERS_EXAMPLES_SCHEDULERS_THREAD_POOL_SCHEDULER_H
#define SPROKIT_SCHEDULERS_EXAMPLES_SCHEDULERS_THREAD_POOL_SCHEDULER_H
#include <schedulers/examples/schedulers_examples_export.h>
#include <sprokit/pipeline/scheduler.h>
#include <cstddef>
/**
* \file thread_pool_scheduler.h
*
* \brief Declaration of the thread pool scheduler.
*/
namespace sprokit
{
/**
* \class thread_pool_scheduler
*
* \brief A scheduler which process execution among a group of threads.
*
* \scheduler Manages execution using a set number of threads.
*
* \configs
*
* \config{num_threads} The number of threads to run. A setting of \c 0 means "auto".
*/
class SCHEDULERS_EXAMPLES_NO_EXPORT thread_pool_scheduler
: public scheduler
{
public:
/**
* \brief Constructor.
*
* \param config Contains config for the edge.
* \param pipe The pipeline to scheduler.
*/
thread_pool_scheduler(pipeline_t const& pipe, kwiver::vital::config_block_sptr const& config);
/**
* \brief Destructor.
*/
~thread_pool_scheduler();
protected:
/**
* \brief Starts execution.
*/
void _start();
/**
* \brief Waits until execution is finished.
*/
void _wait();
/**
* \brief Pauses execution.
*/
void _pause();
/**
* \brief Resumes execution.
*/
void _resume();
/**
* \brief Stop execution of the pipeline.
*/
void _stop();
private:
class priv;
std::unique_ptr<priv> d;
};
} // end namespace
#endif // SPROKIT_SCHEDULERS_EXAMPLES_SCHEDULERS_THREAD_POOL_SCHEDULER_H
|
import abc
class Medication(abc.ABC):
@abc.abstractmethod
def drop_dose(self):
...
@property
def num_dose_groups(self):
...
class ExperimentalMedication(Medication):
def __init__(self, initial_dose_groups):
self._dose_groups = initial_dose_groups
def drop_dose(self):
if self._dose_groups > 0:
self._dose_groups -= 1
@property
def num_dose_groups(self):
return self._dose_groups |
package generic
import (
"bytes"
"context"
"errors"
"fmt"
"sync"
"testing"
"time"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/benthosdev/benthos/v4/internal/batch"
bmock "github.com/benthosdev/benthos/v4/internal/bundle/mock"
"github.com/benthosdev/benthos/v4/internal/manager/mock"
"github.com/benthosdev/benthos/v4/internal/message"
ooutput "github.com/benthosdev/benthos/v4/internal/old/output"
_ "github.com/benthosdev/benthos/v4/internal/interop/legacy"
)
func newSwitch(t *testing.T, conf ooutput.Config, mockOutputs []*mock.OutputChanneled) *switchOutput {
t.Helper()
mgr := bmock.NewManager()
conf.Type = "switch"
genType, err := mgr.NewOutput(conf)
require.NoError(t, err)
rType, ok := genType.(*switchOutput)
require.True(t, ok)
for i := 0; i < len(mockOutputs); i++ {
close(rType.outputTSChans[i])
rType.outputs[i] = mockOutputs[i]
rType.outputTSChans[i] = make(chan message.Transaction)
_ = mockOutputs[i].Consume(rType.outputTSChans[i])
}
return rType
}
func TestSwitchNoConditions(t *testing.T) {
ctx, done := context.WithTimeout(context.Background(), time.Second*30)
defer done()
nOutputs, nMsgs := 10, 1000
conf := ooutput.NewConfig()
mockOutputs := []*mock.OutputChanneled{}
for i := 0; i < nOutputs; i++ {
conf.Switch.Cases = append(conf.Switch.Cases, ooutput.NewSwitchConfigCase())
conf.Switch.Cases[i].Continue = true
mockOutputs = append(mockOutputs, &mock.OutputChanneled{})
}
s := newSwitch(t, conf, mockOutputs)
readChan := make(chan message.Transaction)
resChan := make(chan error, 1)
require.NoError(t, s.Consume(readChan))
for i := 0; i < nMsgs; i++ {
content := [][]byte{[]byte(fmt.Sprintf("hello world %v", i))}
select {
case readChan <- message.NewTransaction(message.QuickBatch(content), resChan):
case <-time.After(time.Second):
t.Fatal("Timed out waiting for broker send")
}
resFnSlice := []func(context.Context, error) error{}
for j := 0; j < nOutputs; j++ {
select {
case ts := <-mockOutputs[j].TChan:
if !bytes.Equal(ts.Payload.Get(0).Get(), content[0]) {
t.Errorf("Wrong content returned %s != %s", ts.Payload.Get(0).Get(), content[0])
}
resFnSlice = append(resFnSlice, ts.Ack)
case <-time.After(time.Second):
t.Fatal("Timed out waiting for broker propagate")
}
}
for j := 0; j < nOutputs; j++ {
require.NoError(t, resFnSlice[j](ctx, nil))
}
select {
case res := <-resChan:
require.NoError(t, res)
case <-time.After(time.Second):
t.Fatal("Timed out responding to broker")
}
}
s.CloseAsync()
require.NoError(t, s.WaitForClose(time.Second*5))
}
func TestSwitchNoRetries(t *testing.T) {
ctx, done := context.WithTimeout(context.Background(), time.Second*30)
defer done()
nOutputs, nMsgs := 10, 1000
conf := ooutput.NewConfig()
conf.Switch.RetryUntilSuccess = false
mockOutputs := []*mock.OutputChanneled{}
for i := 0; i < nOutputs; i++ {
conf.Switch.Cases = append(conf.Switch.Cases, ooutput.NewSwitchConfigCase())
conf.Switch.Cases[i].Continue = true
mockOutputs = append(mockOutputs, &mock.OutputChanneled{})
}
s := newSwitch(t, conf, mockOutputs)
readChan := make(chan message.Transaction)
resChan := make(chan error, 1)
require.NoError(t, s.Consume(readChan))
for i := 0; i < nMsgs; i++ {
content := [][]byte{[]byte(fmt.Sprintf("hello world %v", i))}
select {
case readChan <- message.NewTransaction(message.QuickBatch(content), resChan):
case <-time.After(time.Second):
t.Fatal("Timed out waiting for broker send")
}
resFnSlice := []func(context.Context, error) error{}
for j := 0; j < nOutputs; j++ {
select {
case ts := <-mockOutputs[j].TChan:
if !bytes.Equal(ts.Payload.Get(0).Get(), content[0]) {
t.Errorf("Wrong content returned %s != %s", ts.Payload.Get(0).Get(), content[0])
}
resFnSlice = append(resFnSlice, ts.Ack)
case <-time.After(time.Second):
t.Fatal("Timed out waiting for broker propagate")
}
}
for j := 0; j < nOutputs; j++ {
var res error
if j == 1 {
res = errors.New("test")
} else {
res = nil
}
require.NoError(t, resFnSlice[j](ctx, res))
}
select {
case res := <-resChan:
assert.EqualError(t, res, "test")
case <-time.After(time.Second):
t.Fatal("Timed out responding to broker")
}
}
s.CloseAsync()
require.NoError(t, s.WaitForClose(time.Second*5))
}
func TestSwitchBatchNoRetries(t *testing.T) {
conf := ooutput.NewConfig()
conf.Switch.RetryUntilSuccess = false
okOut := ooutput.NewConfig()
okOut.Type = "drop"
conf.Switch.Cases = append(conf.Switch.Cases, ooutput.SwitchConfigCase{
Check: `root = this.id % 2 == 0`,
Output: okOut,
})
errOut := ooutput.NewConfig()
errOut.Type = "reject"
errOut.Reject = "meow"
conf.Switch.Cases = append(conf.Switch.Cases, ooutput.SwitchConfigCase{
Check: `root = true`,
Output: errOut,
})
s, err := newSwitchOutput(conf.Switch, bmock.NewManager())
require.NoError(t, err, 1)
readChan := make(chan message.Transaction)
resChan := make(chan error)
require.NoError(t, s.Consume(readChan))
msg := message.QuickBatch([][]byte{
[]byte(`{"content":"hello world","id":0}`),
[]byte(`{"content":"hello world","id":1}`),
[]byte(`{"content":"hello world","id":2}`),
[]byte(`{"content":"hello world","id":3}`),
[]byte(`{"content":"hello world","id":4}`),
})
select {
case readChan <- message.NewTransaction(msg, resChan):
case <-time.After(time.Second):
t.Fatal("Timed out waiting for broker send")
}
var res error
select {
case res = <-resChan:
case <-time.After(time.Second):
t.Fatal("Timed out responding to broker")
}
err = res
require.Error(t, err)
bOut, ok := err.(*batch.Error)
require.True(t, ok, "should be batch error, got: %v", err)
assert.Equal(t, 2, bOut.IndexedErrors())
errContents := []string{}
bOut.WalkParts(func(i int, p *message.Part, e error) bool {
if e != nil {
errContents = append(errContents, string(p.Get()))
assert.EqualError(t, e, "meow")
}
return true
})
assert.Equal(t, []string{
`{"content":"hello world","id":1}`,
`{"content":"hello world","id":3}`,
}, errContents)
s.CloseAsync()
require.NoError(t, s.WaitForClose(time.Second*5))
}
func TestSwitchBatchNoRetriesBatchErr(t *testing.T) {
ctx, done := context.WithTimeout(context.Background(), time.Second*30)
defer done()
conf := ooutput.NewConfig()
conf.Switch.RetryUntilSuccess = false
mockOutputs := []*mock.OutputChanneled{}
nOutputs := 2
for i := 0; i < nOutputs; i++ {
conf.Switch.Cases = append(conf.Switch.Cases, ooutput.NewSwitchConfigCase())
conf.Switch.Cases[i].Continue = true
mockOutputs = append(mockOutputs, &mock.OutputChanneled{})
}
s := newSwitch(t, conf, mockOutputs)
readChan := make(chan message.Transaction)
resChan := make(chan error, 1)
require.NoError(t, s.Consume(readChan))
msg := message.QuickBatch([][]byte{
[]byte("hello world 0"),
[]byte("hello world 1"),
[]byte("hello world 2"),
[]byte("hello world 3"),
[]byte("hello world 4"),
})
select {
case readChan <- message.NewTransaction(msg, resChan):
case <-time.After(time.Second):
t.Fatal("Timed out waiting for broker send")
}
transactions := []message.Transaction{}
for j := 0; j < nOutputs; j++ {
select {
case ts := <-mockOutputs[j].TChan:
transactions = append(transactions, ts)
case <-time.After(time.Second):
t.Fatal("Timed out waiting for broker propagate")
}
}
for j := 0; j < nOutputs; j++ {
var res error
if j == 0 {
batchErr := batch.NewError(transactions[j].Payload, errors.New("not this"))
batchErr.Failed(1, errors.New("err 1"))
batchErr.Failed(3, errors.New("err 3"))
res = batchErr
} else {
res = nil
}
require.NoError(t, transactions[j].Ack(ctx, res))
}
select {
case res := <-resChan:
err := res
require.Error(t, err)
bOut, ok := err.(*batch.Error)
require.True(t, ok, "should be batch error but got %T", err)
assert.Equal(t, 2, bOut.IndexedErrors())
errContents := []string{}
bOut.WalkParts(func(i int, p *message.Part, e error) bool {
if e != nil {
errContents = append(errContents, string(p.Get()))
assert.EqualError(t, e, fmt.Sprintf("err %v", i))
}
return true
})
assert.Equal(t, []string{
"hello world 1",
"hello world 3",
}, errContents)
case <-time.After(time.Second):
t.Fatal("Timed out responding to broker")
}
s.CloseAsync()
require.NoError(t, s.WaitForClose(time.Second*5))
}
func TestSwitchWithConditions(t *testing.T) {
ctx, done := context.WithTimeout(context.Background(), time.Second*30)
defer done()
nMsgs := 100
mockOutputs := []*mock.OutputChanneled{{}, {}, {}}
conf := ooutput.NewConfig()
for i := 0; i < len(mockOutputs); i++ {
conf.Switch.Cases = append(conf.Switch.Cases, ooutput.NewSwitchConfigCase())
}
conf.Switch.Cases[0].Check = `this.foo == "bar"`
conf.Switch.Cases[1].Check = `this.foo == "baz"`
s := newSwitch(t, conf, mockOutputs)
readChan := make(chan message.Transaction)
resChan := make(chan error, 1)
require.NoError(t, s.Consume(readChan))
wg := sync.WaitGroup{}
wg.Add(1)
go func() {
defer wg.Done()
closed := 0
bar := `{"foo":"bar"}`
baz := `{"foo":"baz"}`
outputLoop:
for closed < len(mockOutputs) {
var ts message.Transaction
var ok bool
select {
case ts, ok = <-mockOutputs[0].TChan:
if !ok {
closed++
continue outputLoop
}
if act := string(ts.Payload.Get(0).Get()); act != bar {
t.Errorf("Expected output 0 msgs to equal %s, got %s", bar, act)
}
case ts, ok = <-mockOutputs[1].TChan:
if !ok {
closed++
continue outputLoop
}
if act := string(ts.Payload.Get(0).Get()); act != baz {
t.Errorf("Expected output 1 msgs to equal %s, got %s", baz, act)
}
case ts, ok = <-mockOutputs[2].TChan:
if !ok {
closed++
continue outputLoop
}
if act := string(ts.Payload.Get(0).Get()); act == bar || act == baz {
t.Errorf("Expected output 2 msgs to not equal %s or %s, got %s", bar, baz, act)
}
case <-time.After(time.Second):
t.Error("Timed out waiting for output to propagate")
break outputLoop
}
if !assert.NoError(t, ts.Ack(ctx, nil)) {
break outputLoop
}
}
}()
for i := 0; i < nMsgs; i++ {
foo := "bar"
if i%3 == 0 {
foo = "qux"
} else if i%2 == 0 {
foo = "baz"
}
content := [][]byte{[]byte(fmt.Sprintf("{\"foo\":%q}", foo))}
select {
case readChan <- message.NewTransaction(message.QuickBatch(content), resChan):
case <-time.After(time.Second):
t.Errorf("Timed out waiting for output send")
return
}
select {
case res := <-resChan:
require.NoError(t, res)
case <-time.After(time.Second):
t.Fatal("Timed out responding to output")
}
}
s.CloseAsync()
assert.NoError(t, s.WaitForClose(time.Second*5))
wg.Wait()
}
func TestSwitchError(t *testing.T) {
ctx, done := context.WithTimeout(context.Background(), time.Second*30)
defer done()
mockOutputs := []*mock.OutputChanneled{{}, {}, {}}
conf := ooutput.NewConfig()
for i := 0; i < len(mockOutputs); i++ {
conf.Switch.Cases = append(conf.Switch.Cases, ooutput.NewSwitchConfigCase())
}
conf.Switch.Cases[0].Check = `this.foo == "bar"`
conf.Switch.Cases[1].Check = `this.foo.not_null() == "baz"`
conf.Switch.Cases[2].Check = `this.foo == "buz"`
s := newSwitch(t, conf, mockOutputs)
readChan := make(chan message.Transaction)
resChan := make(chan error, 1)
require.NoError(t, s.Consume(readChan))
msg := message.QuickBatch([][]byte{
[]byte(`{"foo":"bar"}`),
[]byte(`{"not_foo":"baz"}`),
[]byte(`{"foo":"baz"}`),
[]byte(`{"foo":"buz"}`),
[]byte(`{"foo":"nope"}`),
})
select {
case readChan <- message.NewTransaction(msg, resChan):
case <-time.After(time.Second):
t.Fatal("timed out waiting to send")
}
var ts message.Transaction
for i := 0; i < len(mockOutputs); i++ {
select {
case ts = <-mockOutputs[0].TChan:
assert.Equal(t, 1, ts.Payload.Len())
assert.Equal(t, `{"foo":"bar"}`, string(ts.Payload.Get(0).Get()))
case ts = <-mockOutputs[1].TChan:
assert.Equal(t, 1, ts.Payload.Len())
assert.Equal(t, `{"foo":"baz"}`, string(ts.Payload.Get(0).Get()))
case ts = <-mockOutputs[2].TChan:
assert.Equal(t, 1, ts.Payload.Len())
assert.Equal(t, `{"foo":"buz"}`, string(ts.Payload.Get(0).Get()))
case <-time.After(time.Second):
t.Error("Timed out waiting for output to propagate")
}
require.NoError(t, ts.Ack(ctx, nil))
}
select {
case res := <-resChan:
if res != nil {
t.Errorf("Received unexpected errors from output: %v", res)
}
case <-time.After(time.Second):
t.Error("Timed out responding to output")
}
s.CloseAsync()
assert.NoError(t, s.WaitForClose(time.Second*5))
}
func TestSwitchBatchSplit(t *testing.T) {
ctx, done := context.WithTimeout(context.Background(), time.Second*30)
defer done()
mockOutputs := []*mock.OutputChanneled{{}, {}, {}}
conf := ooutput.NewConfig()
for i := 0; i < len(mockOutputs); i++ {
conf.Switch.Cases = append(conf.Switch.Cases, ooutput.NewSwitchConfigCase())
}
conf.Switch.Cases[0].Check = `this.foo == "bar"`
conf.Switch.Cases[1].Check = `this.foo == "baz"`
conf.Switch.Cases[2].Check = `this.foo == "buz"`
s := newSwitch(t, conf, mockOutputs)
readChan := make(chan message.Transaction)
resChan := make(chan error, 1)
require.NoError(t, s.Consume(readChan))
msg := message.QuickBatch([][]byte{
[]byte(`{"foo":"bar"}`),
[]byte(`{"foo":"baz"}`),
[]byte(`{"foo":"buz"}`),
[]byte(`{"foo":"nope"}`),
})
select {
case readChan <- message.NewTransaction(msg, resChan):
case <-time.After(time.Second):
t.Fatal("timed out waiting to send")
}
var ts message.Transaction
for i := 0; i < len(mockOutputs); i++ {
select {
case ts = <-mockOutputs[0].TChan:
assert.Equal(t, 1, ts.Payload.Len())
assert.Equal(t, `{"foo":"bar"}`, string(ts.Payload.Get(0).Get()))
case ts = <-mockOutputs[1].TChan:
assert.Equal(t, 1, ts.Payload.Len())
assert.Equal(t, `{"foo":"baz"}`, string(ts.Payload.Get(0).Get()))
case ts = <-mockOutputs[2].TChan:
assert.Equal(t, 1, ts.Payload.Len())
assert.Equal(t, `{"foo":"buz"}`, string(ts.Payload.Get(0).Get()))
case <-time.After(time.Second):
t.Error("Timed out waiting for output to propagate")
}
require.NoError(t, ts.Ack(ctx, nil))
}
select {
case res := <-resChan:
require.NoError(t, res)
case <-time.After(time.Second):
t.Error("Timed out responding to output")
}
s.CloseAsync()
assert.NoError(t, s.WaitForClose(time.Second*5))
}
func TestSwitchBatchGroup(t *testing.T) {
ctx, done := context.WithTimeout(context.Background(), time.Second*30)
defer done()
mockOutputs := []*mock.OutputChanneled{{}, {}, {}}
conf := ooutput.NewConfig()
for i := 0; i < len(mockOutputs); i++ {
conf.Switch.Cases = append(conf.Switch.Cases, ooutput.NewSwitchConfigCase())
}
conf.Switch.Cases[0].Check = `json().foo.from(0) == "bar"`
conf.Switch.Cases[1].Check = `json().foo.from(0) == "baz"`
conf.Switch.Cases[2].Check = `json().foo.from(0) == "buz"`
s := newSwitch(t, conf, mockOutputs)
readChan := make(chan message.Transaction)
resChan := make(chan error, 1)
require.NoError(t, s.Consume(readChan))
msg := message.QuickBatch([][]byte{
[]byte(`{"foo":"baz"}`),
[]byte(`{"foo":"bar"}`),
[]byte(`{"foo":"buz"}`),
[]byte(`{"foo":"nope"}`),
})
select {
case readChan <- message.NewTransaction(msg, resChan):
case <-time.After(time.Second):
t.Fatal("timed out waiting to send")
}
var ts message.Transaction
select {
case ts = <-mockOutputs[0].TChan:
t.Error("did not expect message route to 0")
case ts = <-mockOutputs[1].TChan:
assert.Equal(t, 4, ts.Payload.Len())
assert.Equal(t, `{"foo":"baz"}`, string(ts.Payload.Get(0).Get()))
assert.Equal(t, `{"foo":"bar"}`, string(ts.Payload.Get(1).Get()))
assert.Equal(t, `{"foo":"buz"}`, string(ts.Payload.Get(2).Get()))
assert.Equal(t, `{"foo":"nope"}`, string(ts.Payload.Get(3).Get()))
case ts = <-mockOutputs[2].TChan:
t.Error("did not expect message route to 2")
case <-time.After(time.Second):
t.Error("Timed out waiting for output to propagate")
}
require.NoError(t, ts.Ack(ctx, nil))
select {
case <-mockOutputs[0].TChan:
t.Error("did not expect message route to 0")
case <-mockOutputs[2].TChan:
t.Error("did not expect message route to 2")
case res := <-resChan:
if res != nil {
t.Errorf("Received unexpected errors from output: %v", res)
}
case <-time.After(time.Second):
t.Error("Timed out responding to output")
}
s.CloseAsync()
assert.NoError(t, s.WaitForClose(time.Second*5))
}
func TestSwitchNoMatch(t *testing.T) {
mockOutputs := []*mock.OutputChanneled{{}, {}, {}}
conf := ooutput.NewConfig()
for i := 0; i < len(mockOutputs); i++ {
conf.Switch.Cases = append(conf.Switch.Cases, ooutput.NewSwitchConfigCase())
}
conf.Switch.Cases[0].Check = `this.foo == "bar"`
conf.Switch.Cases[1].Check = `this.foo == "baz"`
conf.Switch.Cases[2].Check = `false`
s := newSwitch(t, conf, mockOutputs)
readChan := make(chan message.Transaction)
resChan := make(chan error, 1)
require.NoError(t, s.Consume(readChan))
msg := message.QuickBatch([][]byte{[]byte(`{"foo":"qux"}`)})
select {
case readChan <- message.NewTransaction(msg, resChan):
case <-time.After(time.Second):
t.Fatal("Timed out waiting for output send")
}
select {
case res := <-resChan:
require.NoError(t, res)
case <-time.After(time.Second):
t.Fatal("Timed out responding to output")
}
s.CloseAsync()
require.NoError(t, s.WaitForClose(time.Second*5))
}
func TestSwitchNoMatchStrict(t *testing.T) {
mockOutputs := []*mock.OutputChanneled{{}, {}, {}}
conf := ooutput.NewConfig()
conf.Switch.StrictMode = true
for i := 0; i < len(mockOutputs); i++ {
conf.Switch.Cases = append(conf.Switch.Cases, ooutput.NewSwitchConfigCase())
}
conf.Switch.Cases[0].Check = `this.foo == "bar"`
conf.Switch.Cases[1].Check = `this.foo == "baz"`
conf.Switch.Cases[2].Check = `false`
s := newSwitch(t, conf, mockOutputs)
readChan := make(chan message.Transaction)
resChan := make(chan error, 1)
require.NoError(t, s.Consume(readChan))
msg := message.QuickBatch([][]byte{[]byte(`{"foo":"qux"}`)})
select {
case readChan <- message.NewTransaction(msg, resChan):
case <-time.After(time.Second):
t.Fatal("Timed out waiting for output send")
}
select {
case res := <-resChan:
require.Error(t, res)
case <-time.After(time.Second):
t.Fatal("Timed out responding to output")
}
s.CloseAsync()
require.NoError(t, s.WaitForClose(time.Second*5))
}
func TestSwitchWithConditionsNoFallthrough(t *testing.T) {
ctx, done := context.WithTimeout(context.Background(), time.Second*30)
defer done()
nMsgs := 100
mockOutputs := []*mock.OutputChanneled{{}, {}, {}}
conf := ooutput.NewConfig()
for i := 0; i < len(mockOutputs); i++ {
conf.Switch.Cases = append(conf.Switch.Cases, ooutput.NewSwitchConfigCase())
}
conf.Switch.Cases[0].Check = `this.foo == "bar"`
conf.Switch.Cases[1].Check = `this.foo == "baz"`
s := newSwitch(t, conf, mockOutputs)
readChan := make(chan message.Transaction)
resChan := make(chan error, 1)
require.NoError(t, s.Consume(readChan))
wg := sync.WaitGroup{}
wg.Add(1)
go func() {
defer wg.Done()
closed := 0
bar := `{"foo":"bar"}`
baz := `{"foo":"baz"}`
outputLoop:
for closed < len(mockOutputs) {
resFns := []func(context.Context, error) error{}
for len(resFns) < 1 {
select {
case ts, ok := <-mockOutputs[0].TChan:
if !ok {
closed++
continue outputLoop
}
if act := string(ts.Payload.Get(0).Get()); act != bar {
t.Errorf("Expected output 0 msgs to equal %s, got %s", bar, act)
}
resFns = append(resFns, ts.Ack)
case ts, ok := <-mockOutputs[1].TChan:
if !ok {
closed++
continue outputLoop
}
if act := string(ts.Payload.Get(0).Get()); act != baz {
t.Errorf("Expected output 1 msgs to equal %s, got %s", baz, act)
}
resFns = append(resFns, ts.Ack)
case _, ok := <-mockOutputs[2].TChan:
if !ok {
closed++
continue outputLoop
}
t.Error("Unexpected msg received by output 3")
case <-time.After(time.Second):
t.Error("Timed out waiting for output to propagate")
break outputLoop
}
}
for i := 0; i < len(resFns); i++ {
require.NoError(t, resFns[i](ctx, nil))
}
}
}()
for i := 0; i < nMsgs; i++ {
foo := "bar"
if i%2 == 0 {
foo = "baz"
}
content := [][]byte{[]byte(fmt.Sprintf("{\"foo\":%q}", foo))}
select {
case readChan <- message.NewTransaction(message.QuickBatch(content), resChan):
case <-time.After(time.Second):
t.Fatal("Timed out waiting for output send")
}
select {
case res := <-resChan:
require.NoError(t, res)
case <-time.After(time.Second):
t.Fatal("Timed out responding to output")
}
}
s.CloseAsync()
require.NoError(t, s.WaitForClose(time.Second*5))
wg.Wait()
}
func TestSwitchAtLeastOnce(t *testing.T) {
t.Skip("this doesnt currently work with mocked outputs")
ctx, done := context.WithTimeout(context.Background(), time.Second*30)
defer done()
mockOne := mock.OutputChanneled{}
mockTwo := mock.OutputChanneled{}
mockOutputs := []*mock.OutputChanneled{
&mockOne, &mockTwo,
}
conf := ooutput.NewConfig()
conf.Switch.RetryUntilSuccess = true
for i := 0; i < len(mockOutputs); i++ {
outConf := ooutput.NewSwitchConfigCase()
outConf.Continue = true
conf.Switch.Cases = append(conf.Switch.Cases, outConf)
}
readChan := make(chan message.Transaction)
resChan := make(chan error, 1)
s := newSwitch(t, conf, mockOutputs)
require.NoError(t, s.Consume(readChan))
require.Error(t, s.Consume(readChan))
select {
case readChan <- message.NewTransaction(message.QuickBatch([][]byte{[]byte("hello world")}), resChan):
case <-time.After(time.Second):
t.Fatal("Timed out waiting for output send")
}
var ts1, ts2 message.Transaction
select {
case ts1 = <-mockOne.TChan:
case <-time.After(time.Second):
t.Fatal("Timed out waiting for mockOne")
}
select {
case ts2 = <-mockTwo.TChan:
case <-time.After(time.Second):
t.Fatal("Timed out waiting for mockOne")
}
require.NoError(t, ts1.Ack(ctx, nil))
require.NoError(t, ts2.Ack(ctx, errors.New("this is a test")))
select {
case <-mockOne.TChan:
t.Fatal("Received duplicate message to mockOne")
case ts2 = <-mockTwo.TChan:
case <-resChan:
t.Fatal("Received premature response from output")
case <-time.After(time.Second):
t.Fatal("Timed out waiting for mockTwo")
}
require.NoError(t, ts2.Ack(ctx, nil))
select {
case res := <-resChan:
assert.NoError(t, res)
case <-time.After(time.Second):
t.Fatal("Timed out responding to output")
}
close(readChan)
require.NoError(t, s.WaitForClose(time.Second*5))
}
func TestSwitchShutDownFromErrorResponse(t *testing.T) {
ctx, done := context.WithTimeout(context.Background(), time.Second*30)
defer done()
mockOutputs := []*mock.OutputChanneled{{}, {}}
conf := ooutput.NewConfig()
for i := 0; i < len(mockOutputs); i++ {
outConf := ooutput.NewSwitchConfigCase()
outConf.Continue = true
conf.Switch.Cases = append(conf.Switch.Cases, outConf)
}
readChan := make(chan message.Transaction)
resChan := make(chan error, 1)
s := newSwitch(t, conf, mockOutputs)
require.NoError(t, s.Consume(readChan))
select {
case readChan <- message.NewTransaction(message.QuickBatch([][]byte{[]byte("foo")}), resChan):
case <-time.After(time.Second):
t.Fatal("Timed out waiting for msg send")
}
var ts message.Transaction
var open bool
select {
case ts, open = <-mockOutputs[0].TChan:
require.True(t, open)
case <-time.After(time.Second):
t.Fatal("Timed out waiting for msg rcv")
}
select {
case _, open = <-mockOutputs[1].TChan:
require.True(t, open)
case <-time.After(time.Second):
t.Fatal("Timed out waiting for msg rcv")
}
require.NoError(t, ts.Ack(ctx, errors.New("test")))
s.CloseAsync()
require.NoError(t, s.WaitForClose(time.Second))
select {
case _, open := <-mockOutputs[0].TChan:
assert.False(t, open)
case <-time.After(time.Second):
t.Error("Timed out waiting for msg rcv")
}
}
func TestSwitchShutDownFromReceive(t *testing.T) {
mockOutputs := []*mock.OutputChanneled{{}, {}}
conf := ooutput.NewConfig()
for i := 0; i < len(mockOutputs); i++ {
outConf := ooutput.NewSwitchConfigCase()
outConf.Continue = true
conf.Switch.Cases = append(conf.Switch.Cases, outConf)
}
readChan := make(chan message.Transaction)
resChan := make(chan error, 1)
s := newSwitch(t, conf, mockOutputs)
require.NoError(t, s.Consume(readChan))
select {
case readChan <- message.NewTransaction(message.QuickBatch([][]byte{[]byte("foo")}), resChan):
case <-time.After(time.Second):
t.Fatal("Timed out waiting for msg send")
}
select {
case _, open := <-mockOutputs[0].TChan:
require.True(t, open)
case <-time.After(time.Second):
t.Fatal("Timed out waiting for msg rcv")
}
s.CloseAsync()
require.NoError(t, s.WaitForClose(time.Second))
select {
case _, open := <-mockOutputs[0].TChan:
assert.False(t, open)
case <-time.After(time.Second):
t.Error("Timed out waiting for msg rcv")
}
}
func TestSwitchShutDownFromSend(t *testing.T) {
mockOutputs := []*mock.OutputChanneled{{}, {}}
conf := ooutput.NewConfig()
for i := 0; i < len(mockOutputs); i++ {
outConf := ooutput.NewSwitchConfigCase()
outConf.Continue = true
conf.Switch.Cases = append(conf.Switch.Cases, outConf)
}
readChan := make(chan message.Transaction)
resChan := make(chan error, 1)
s := newSwitch(t, conf, mockOutputs)
require.NoError(t, s.Consume(readChan))
select {
case readChan <- message.NewTransaction(message.QuickBatch([][]byte{[]byte("foo")}), resChan):
case <-time.After(time.Second):
t.Fatal("Timed out waiting for msg send")
}
s.CloseAsync()
require.NoError(t, s.WaitForClose(time.Second))
select {
case _, open := <-mockOutputs[0].TChan:
assert.False(t, open)
case <-time.After(time.Second):
t.Error("Timed out waiting for msg rcv")
}
}
func TestSwitchBackPressure(t *testing.T) {
ctx, done := context.WithTimeout(context.Background(), time.Second*30)
defer done()
t.Parallel()
mockOutputs := []*mock.OutputChanneled{{}, {}}
conf := ooutput.NewConfig()
for i := 0; i < len(mockOutputs); i++ {
outConf := ooutput.NewSwitchConfigCase()
outConf.Continue = true
conf.Switch.Cases = append(conf.Switch.Cases, outConf)
}
readChan := make(chan message.Transaction)
resChan := make(chan error, 1)
s := newSwitch(t, conf, mockOutputs)
require.NoError(t, s.Consume(readChan))
wg := sync.WaitGroup{}
wg.Add(1)
doneChan := make(chan struct{})
go func() {
defer wg.Done()
// Consume as fast as possible from mock one
for {
select {
case ts := <-mockOutputs[0].TChan:
require.NoError(t, ts.Ack(ctx, nil))
case <-doneChan:
return
}
}
}()
i := 0
bpLoop:
for ; i < 1000; i++ {
select {
case readChan <- message.NewTransaction(message.QuickBatch([][]byte{[]byte("hello world")}), resChan):
case <-time.After(time.Millisecond * 200):
break bpLoop
}
}
if i > 500 {
t.Error("We shouldn't be capable of dumping this many messages into a blocked broker")
}
close(readChan)
close(doneChan)
wg.Wait()
}
|
#!/bin/bash
if which hadolint &> /dev/null $? != 0 ; then
echo "Hadolint must be installed"
exit 1
fi
hadolint "$@"
exit $?
|
#!/bin/bash
# profiles = xccdf_org.ssgproject.content_profile_ospp
. ../../dconf_test_functions.sh
yum -y install dconf
clean_dconf_settings
|
def char_frequency(string):
"""
This function takes in a string and returns a dictionary with each character and its frequency.
Argument:
string (str): The string to be analyzed.
Returns:
dict: A dictionary mapping each character to its frequency.
"""
freq = {}
for char in string:
if char in freq:
freq[char] += 1
else:
freq[char] = 1
return freq |
<reponame>kenfinnigan/monster-combat
package dev.ebullient.dnd.combat;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import dev.ebullient.dnd.mechanics.Dice;
import dev.ebullient.dnd.mechanics.Type;
class EncounterRoundResult implements RoundResult {
static final Logger logger = LoggerFactory.getLogger(EncounterRoundResult.class);
List<EncounterCombatant> initiativeOrder;
List<EncounterCombatant> survivors;
List<EncounterAttackEvent> events;
final int numCombatants;
final int numTypes;
final int crDelta;
final int sizeDelta;
final EncounterTargetSelector selector;
final Dice.Method method;
final String encounterId;
EncounterRoundResult(List<EncounterCombatant> initiativeOrder,
EncounterTargetSelector selector, Dice.Method method, String encounterId) {
// Remember details about this matchup
EncounterCombatant first = initiativeOrder.iterator().next();
int maxCR = first.beast.getCR();
int minCR = maxCR;
int maxSize = first.beast.getSize().ordinal();
int minSize = maxSize;
Set<Type> types = new HashSet<>();
for (EncounterCombatant x : initiativeOrder) {
types.add(x.beast.getType());
maxCR = Math.max(x.beast.getCR(), maxCR);
minCR = Math.min(x.beast.getCR(), minCR);
maxSize = Math.max(x.beast.getSize().ordinal(), maxSize);
minSize = Math.min(x.beast.getSize().ordinal(), minSize);
}
this.events = new ArrayList<>();
this.initiativeOrder = initiativeOrder;
this.survivors = new ArrayList<>(initiativeOrder);
this.numCombatants = initiativeOrder.size();
this.crDelta = maxCR - minCR;
this.sizeDelta = maxSize - minSize;
this.numTypes = types.size();
this.selector = selector;
this.encounterId = encounterId;
this.method = method;
}
public List<EncounterAttackEvent> getEvents() {
return events;
}
public List<EncounterCombatant> getSurvivors() {
return survivors;
}
public int getNumCombatants() {
return numCombatants;
}
public int getSizeDelta() {
return sizeDelta;
}
public int getCrDelta() {
return crDelta;
}
public int getNumTypes() {
return numTypes;
}
public String getSelector() {
return EncounterTargetSelector.targetSelectorToString(selector, numCombatants);
}
void go() {
for (EncounterCombatant actor : initiativeOrder) {
if (actor.isAlive()) {
EncounterCombatant target = selector.chooseTarget(actor, initiativeOrder);
// Single or many attacks
List<Attack> attacks = actor.getAttacks();
// A condition can impose a single attack constraint
if (attacks.size() == 1 || actor.attackLimit()) {
makeAttack(actor, attacks.get(0), target);
} else {
for (Attack a : attacks) {
makeAttack(actor, a, target);
}
}
// Highlander
if (target.hitPoints <= 0) {
survivors.remove(target);
}
}
}
}
void makeAttack(EncounterCombatant actor, Attack a, EncounterCombatant target) {
// only attack the target if it hasn't already been killed
// (e.g. by a previous attack.
if (target.isAlive()) {
EncounterAttackEvent evt = new EncounterAttackEvent(actor, target, a, method, encounterId);
// one event may create more than one result (additional effects)
List<EncounterAttackEvent> evts = evt.attack();
events.addAll(evts);
logger.debug("attack: {}", evts);
}
}
}
|
#!/usr/bin/env bash
DISTRO=${DISTRO:-alpine}
OCAML_VERSIONS=${OCAML_VERSIONS:-4.06 4.07}
TEST_DISTRIB=${TEST_DISTRIB:-no}
export OPAMYES=1
export OPAMJOBS=3
set -ex
case $AGENT_OS in
Linux)
sudo add-apt-repository ppa:avsm/ppa
sudo apt-get update
sudo apt-get -y install opam
opam init --auto-setup https://github.com/ocaml/opam-repository.git
opam install depext
git config --global user.name "Azure Pipelines CI"
git config --global user.email "bactrian@ocaml.org"
TEST_CURRENT_SWITCH_ONLY=yes
eval $(opam env)
;;
LinuxDocker)
sudo chown -R opam /home/opam/src
cd /home/opam/src
git -C /home/opam/opam-repository pull origin master
opam update
opam install depext
eval $(opam env)
;;
Windows_NT)
echo Preparing Cygwin environment
SWITCH=${OPAM_SWITCH:-'4.07.1+mingw64c'}
OPAM_DL_SUB_LINK=0.0.0.2
case $AGENT_OSARCHITECTURE in
"X64")
OPAM_URL="https://github.com/fdopen/opam-repository-mingw/releases/download/${OPAM_DL_SUB_LINK}/opam64.tar.xz"
OPAM_ARCH=opam64 ;;
"X86")
OPAM_URL="https://github.com/fdopen/opam-repository-mingw/releases/download/${OPAM_DL_SUB_LINK}/opam32.tar.xz"
OPAM_ARCH=opam32 ;;
*)
echo "Unsupported architecture $AGENT_OSARCHITECTURE"
exit 1 ;;
esac
if [ $# -gt 0 ] && [ -n "$1" ]; then
SWITCH=$1
fi
export OPAM_LINT="false"
export CYGWIN='winsymlinks:native'
export OPAMYES=1
set -eu
curl -fsSL -o "${OPAM_ARCH}.tar.xz" "${OPAM_URL}"
tar -xf "${OPAM_ARCH}.tar.xz"
"${OPAM_ARCH}/install.sh" --quiet
# if a msvc compiler must be compiled from source, we have to modify the
# environment first
case "$SWITCH" in
*msvc32)
eval $(ocaml-env cygwin --ms=vs2015 --no-opam --32) ;;
*msvc64)
eval $(ocaml-env cygwin --ms=vs2015 --no-opam --64) ;;
esac
opam init -c "ocaml-variants.${SWITCH}" --disable-sandboxing --enable-completion --enable-shell-hook --auto-setup default "https://github.com/fdopen/opam-repository-mingw.git#opam2"
opam config set jobs "$OPAMJOBS"
opam update
is_msvc=0
case "$SWITCH" in
*msvc*)
is_msvc=1
eval $(ocaml-env cygwin --ms=vs2015)
;;
*mingw*)
eval $(ocaml-env cygwin)
;;
*)
echo "ocamlc reports a dubious system: ${ocaml_system}. Good luck!" >&2
eval $(opam env)
esac
if [ $is_msvc -eq 0 ]; then
opam install depext-cygwinports depext
else
opam install depext
fi
cd ${BUILD_SOURCES_DIR}
git config --global user.name "Azure Pipelines CI"
git config --global user.email "bactrian@ocaml.org"
TEST_CURRENT_SWITCH_ONLY=yes
SKIP_ASYNC_ON_WIN32=yes
;;
Darwin)
brew install opam ocaml
opam init --auto-setup https://github.com/ocaml/opam-repository.git
opam install depext
git config --global user.name "Azure Pipelines CI"
git config --global user.email "bactrian@ocaml.org"
TEST_CURRENT_SWITCH_ONLY=yes
TEST_DISTRIB=yes
eval $(opam env)
;;
*)
echo Unknown OS $AGENT_OS
exit 1
esac
PACKAGES=`ls -1 *.opam|xargs -I% -n 1 basename % .opam`
if [ "$SKIP_ASYNC_ON_WIN32" != "" ]; then
echo Skipping Async on Windows
PACKAGES=`echo $PACKAGES | sed -e 's/cstruct-async//g'`
fi
echo Processing $PACKAGES
# git pins fails under cygwin, need to debug
opam pin add -n -k path .
opam --yes depext -y $PACKAGES
opam install --with-test --with-doc --deps-only $PACKAGES
case $TEST_CURRENT_SWITCH_ONLY in
yes)
echo Testing current switch only
opam switch
# hack until async builds on windows in a few weeks
rm -rf async cstruct-async.opam
dune build
dune runtest
;;
*)
echo "(lang dune 1.0)" > dune-workspace.dev
for v in $OCAML_VERSIONS; do
echo "(context (opam (switch $v)))" >> dune-workspace.dev
opam install --deps-only -t --switch $v .
done
dune build --workspace dune-workspace.dev
dune runtest --workspace dune-workspace.dev
;;
esac
if [ "$TEST_DISTRIB" = "yes" ]; then
opam install -y dune-release odoc
dune build @doc
dune-release distrib
dune runtest --force
fi
|
pip install kafka-python
python /scripts/python_producer.py my-cluster-kafka-plain-0 example-kafka-consumer
python /scripts/python_consumer.py my-cluster-kafka-plain-0 example-kafka-producer 10
|
testrpc --account="0x4aec3faf5e3ff8c867d8821d321688b084e78ca4662c250c2dc5695465a06a81,10000000000000000000000000000" --gasPrice 0 --blocktime 1
|
import requests
from bs4 import BeautifulSoup
#make a GET request
response = requests.get('https://www.example.com')
#parse HTML
soup = BeautifulSoup(response.text, 'html.parser') |
#!/bin/bash
##==============================================================================
usage()
{
cat << EOF
mrtrixConversionAndProcessing.sh
- Script to carry out:
CONVERT COMBINED dwi, bvec and bval TO MRtrix mif FORMAT
EXTRACT BZERO AFTER EDDY, RUN BET ON THE EXTRACTED BZERO AND CREATE MASK
CREATE RESPONSE FILES
CREATE ODFs
GENERATE TRACTOGRAPHY FOR 10,000 STREAMLINES
Inputs: SUBJECT_ID (suffix)
Folder for processing
Outputs: Build odf, test by running small (10K) streamlines
COMPULSORY:
-o working folder name
-s SUBJECT_ID (suffix)
OPTIONS:
-h Show this message
EOF
}
##==============================================================================
OPTIND=1
while [[ $# > 0 ]]; do
key="$1"
shift
case $key in
-o|--workingDIR)
workingDIR="$1"
shift # past argument
;;
-s|--suffix)
SUFFIX="$1"
shift # past argument
;;
-h|--help)
usage
exit 1
;;
*)
echo " ERROR - Unknown input: ", $key
usage
exit 1
;;
esac
done
##==============================================================================
if [ -z ${workingDIR+x} ]; then
echo " ERROR : workingDIR is unset "
usage
exit 1
fi
if [ -z ${SUFFIX+x} ]; then
echo " ERROR : suffix is unset "
usage
exit 1
fi
##==============================================================================
### CHECK ALL INPUT ....
if ! [ -d "$workingDIR" ] ; then
echo "*** ERROR *** workingDIR not found: " "$workingDIR"
exit 1
fi
# Begin processing
##==============================================================================
echo " ======= CONVERT dwi, bvec and bval TO MRtrix mif FORMAT ======= "
echo "START: " $(date)
ConvertEddyDwiToMrtrix3Format_REVISED.sh -i $workingDIR -s $SUFFIX
echo "END: " $(date) " ===============////====================="
##==============================================================================
echo " ======= EXTRACT BZERO AFTER EDDY, RUN BET AND CREATE MASK ======= "
echo "START: " $(date)
ExtractBzeroAfterEddy_REVISED.sh -i $workingDIR -s $SUFFIX
echo "END: " $(date) " ===============////====================="
##==============================================================================
echo " ======= CREATE RESPONSE FILES ======= "
echo "START: " $(date)
CalculateResponseFiles_REVISED.sh -i $workingDIR -s $SUFFIX
echo "END: " $(date) " ===============////====================="
##==============================================================================
# CREATE ODFs
echo " ======= CREATE ODFs ======= "
echo "START: " $(date)
CalculateODFmultishell_REVISED.sh -i $workingDIR -s $SUFFIX
echo "END: " $(date) " ===============////====================="
##==============================================================================
# MTNORMALISE
echo " ======= NORMALISE FODs ======= "
echo "START: " $(date)
NormaliseFOD.sh -i $workingDIR
echo "END: " $(date) " ===============////====================="
##==============================================================================
# GENERATE TRACTOGRAPHY FOR 10,000 STREAMLINES
echo " ======= GENERATE TRACTOGRAPHY FOR 10,000 STREAMLINES ======= "
echo "START: " $(date)
GenerateTractography_REVISED.sh -i $workingDIR -s $SUFFIX
echo "END: " $(date) " ===============////====================="
|
// Book Div and Template const
// eslint-disable-next-line max-classes-per-file
const temp = document.querySelector('.book');
const bookshelf = document.querySelector('#bookshelf');
const inputTitle = document.querySelector('#title');
const inputAuthor = document.querySelector('#author');
const indexBook = document.querySelector('.index-book');
const addBook = document.querySelector('.add-book');
const contact = document.querySelector('.contact');
// eslint-disable-next-line no-undef
const dt = luxon.DateTime.now().toLocaleString(luxon.DateTime.DATETIME_MED);
document.querySelector('#datetime').innerHTML = dt;
class Book {
constructor(id, title, author) {
this.id = id;
this.title = title;
this.author = author;
}
}
class Library {
constructor() {
this.library = [];
this.startLibrary();
}
startLibrary() {
this.reloadLibrary();
this.idBook = this.library.length;
}
addBook() {
const title = inputTitle.value;
const author = inputAuthor.value;
this.idBook += 1;
const book = new Book(this.idBook, title, author);
this.library.push(book);
localStorage.library = JSON.stringify(this.library);
this.reloadLibrary();
inputTitle.value = '';
inputAuthor.value = '';
}
displayBook(book) {
this.clon = temp.content.cloneNode(true);
this.clon.querySelectorAll('p')[0].innerHTML = book.title;
this.clon.querySelectorAll('p')[1].innerHTML = book.author;
this.clon.querySelector('button').addEventListener('click', () => { this.deleteBook(book.id); });
bookshelf.appendChild(this.clon);
}
deleteBook(id) {
this.library = this.library.filter((book) => book.id !== id);
localStorage.library = JSON.stringify(this.library);
this.reloadLibrary();
}
reloadLibrary() {
if (localStorage.getItem('library') !== null) {
this.library = (JSON.parse(localStorage.library));
}
bookshelf.innerHTML = '';
bookshelf.appendChild(temp);
for (let i = 0; i < this.library.length; i += 1) {
this.displayBook(this.library[i]);
}
}
}
const library = new Library();
document.querySelector('#add-book').addEventListener('click', () => { library.addBook(); });
function GoToIndex() {
indexBook.style.display = 'block';
addBook.style.display = 'none';
contact.style.display = 'none';
}
function GoToAdd() {
indexBook.style.display = 'none';
addBook.style.display = 'block';
contact.style.display = 'none';
}
function GoToContact() {
indexBook.style.display = 'none';
addBook.style.display = 'none';
contact.style.display = 'block';
}
document.querySelector('#go-to-index').addEventListener('click', () => GoToIndex());
document.querySelector('#go-to-add').addEventListener('click', () => GoToAdd());
document.querySelector('#go-to-contact').addEventListener('click', () => GoToContact());
|
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/512+512+512-N-IP/model --tokenizer_name model-configs/1536-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/512+512+512-N-IP/1024+0+512-shuffled-N-VB-256 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function shuffle_remove_all_but_nouns_and_verbs_first_two_thirds_sixth --eval_function last_sixth_eval |
#!/bin/bash
#===============================================================================
#
# FILE: funcCheckFileString.sh
#
# USAGE: ./funcCheckFileString.sh
#
# DESCRIPTION:
#
# OPTIONS: ---
# REQUIREMENTS: ---
# BUGS: ---
# NOTES: ---
# AUTHOR: Brett Salemink (), admin@roguedesigns.us
# ORGANIZATION: Rogue Designs
# CREATED: 07/07/2018 02:46
# REVISION: ---
#===============================================================================
set -o nounset # Treat unset variables as an error
DELIMITER="#########################################################"
USAGE="Usage CheckFileString [STRINGTOCHECK] [PATH/TO/FILETOCHECK] ex. Must be the full path and the file."
function CheckFileString ()
{
##To check for a particular string in a file
if [ {grep -q $1 $2} && echo $? = 0]
then
return 0
else
exit 1
fi
} # end function
|
<reponame>act-1/admin<gh_stars>0
import React from 'react';
import { Route, Switch } from 'react-router-dom';
import { AppRoute } from '../types/router';
import routes from './routes';
function RouteWithSubRoutes(route: AppRoute) {
return <Route path={route.path} exact={route.exact} render={(props) => <route.component {...props} />} />;
}
export function RenderRoutes() {
return (
<Switch>
{routes.map((route: AppRoute) => {
return <RouteWithSubRoutes {...route} />;
})}
</Switch>
);
}
|
<reponame>mvalles36/saas
package handlers
import (
"context"
"net/http"
"strconv"
"geeks-accelerator/oss/saas-starter-kit/internal/account"
"geeks-accelerator/oss/saas-starter-kit/internal/platform/auth"
"geeks-accelerator/oss/saas-starter-kit/internal/platform/web"
"geeks-accelerator/oss/saas-starter-kit/internal/platform/web/webcontext"
"geeks-accelerator/oss/saas-starter-kit/internal/platform/web/weberror"
"github.com/pkg/errors"
"gopkg.in/go-playground/validator.v9"
)
// Account represents the Account API method handler set.
type Accounts struct {
Repository *account.Repository
// ADD OTHER STATE LIKE THE LOGGER AND CONFIG HERE.
}
// Read godoc
// @Summary Get account by ID
// @Description Read returns the specified account from the system.
// @Tags account
// @Accept json
// @Produce json
// @Security OAuth2Password
// @Param id path string true "Account ID"
// @Success 200 {object} account.AccountResponse
// @Failure 400 {object} weberror.ErrorResponse
// @Failure 404 {object} weberror.ErrorResponse
// @Failure 500 {object} weberror.ErrorResponse
// @Router /accounts/{id} [get]
func (h *Accounts) Read(ctx context.Context, w http.ResponseWriter, r *http.Request, params map[string]string) error {
claims, ok := ctx.Value(auth.Key).(auth.Claims)
if !ok {
return errors.New("claims missing from context")
}
// Handle include-archived query value if set.
var includeArchived bool
if v := r.URL.Query().Get("include-archived"); v != "" {
b, err := strconv.ParseBool(v)
if err != nil {
err = errors.WithMessagef(err, "unable to parse %s as boolean for include-archived param", v)
return web.RespondJsonError(ctx, w, weberror.NewError(ctx, err, http.StatusBadRequest))
}
includeArchived = b
}
res, err := h.Repository.Read(ctx, claims, account.AccountReadRequest{
ID: params["id"],
IncludeArchived: includeArchived,
})
if err != nil {
cause := errors.Cause(err)
switch cause {
case account.ErrNotFound:
return web.RespondJsonError(ctx, w, weberror.NewError(ctx, err, http.StatusNotFound))
default:
return errors.Wrapf(err, "ID: %s", params["id"])
}
}
return web.RespondJson(ctx, w, res.Response(ctx), http.StatusOK)
}
// Read godoc
// @Summary Update account by ID
// @Description Update updates the specified account in the system.
// @Tags account
// @Accept json
// @Produce json
// @Security OAuth2Password
// @Param data body account.AccountUpdateRequest true "Update fields"
// @Success 204
// @Failure 400 {object} weberror.ErrorResponse
// @Failure 403 {object} weberror.ErrorResponse
// @Failure 500 {object} weberror.ErrorResponse
// @Router /accounts [patch]
func (h *Accounts) Update(ctx context.Context, w http.ResponseWriter, r *http.Request, params map[string]string) error {
v, err := webcontext.ContextValues(ctx)
if err != nil {
return err
}
claims, ok := ctx.Value(auth.Key).(auth.Claims)
if !ok {
return errors.New("claims missing from context")
}
var req account.AccountUpdateRequest
if err := web.Decode(ctx, r, &req); err != nil {
if _, ok := errors.Cause(err).(*weberror.Error); !ok {
err = weberror.NewError(ctx, err, http.StatusBadRequest)
}
return web.RespondJsonError(ctx, w, err)
}
err = h.Repository.Update(ctx, claims, req, v.Now)
if err != nil {
cause := errors.Cause(err)
switch cause {
case account.ErrForbidden:
return web.RespondJsonError(ctx, w, weberror.NewError(ctx, err, http.StatusForbidden))
default:
_, ok := cause.(validator.ValidationErrors)
if ok {
return web.RespondJsonError(ctx, w, weberror.NewError(ctx, err, http.StatusBadRequest))
}
return errors.Wrapf(err, "Id: %s Account: %+v", req.ID, &req)
}
}
return web.RespondJson(ctx, w, nil, http.StatusNoContent)
}
|
package models;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.EnumType;
import javax.persistence.Enumerated;
import javax.persistence.GeneratedValue;
import javax.persistence.Id;
import javax.persistence.Table;
import io.ebean.Model;
@Entity
@Table(name = "test")
public class ExampleEntity extends Model {
@Id
@GeneratedValue
public Long id;
@Column(name = "my_str")
public String myStr;
@Column(name="my_enum")
@Enumerated(EnumType.STRING)
public ExampleEnum myEnum;
public ExampleEntity() {
}
public ExampleEntity(String myStr, ExampleEnum myEnum) {
this.myStr = myStr;
this.myEnum = myEnum;
}
}
|
SELECT authors.*
FROM authors
INNER JOIN books on authors.id = books.author_id
WHERE books.year BETWEEN DATE_SUB(CURDATE(), INTERVAL 10 YEAR) AND CURDATE(); |
<filename>resources/js/components/HomeStartControls.tsx<gh_stars>0
import React, {useEffect, useState} from "react";
// Import Children
import HomeStartTimer from "./HomeStartTimer";
// Import Dependencies
import axios from "axios";
//Interface Props
interface Props {
userData: any, //eslint-disable-line
userLastLog?: any,
}
const HomeStartControls: React.FC<Props> = props => {
const {userData, userLastLog} = props;
//State
const [statusOperator, setStatusOperator] = useState(Boolean);
const [lastLog, setLastLog] = useState<any>(userLastLog);
//Methods
useEffect(() => {
defineStatus();
}, []);
const defineStatus = () => {
if(userLastLog) {
if(lastLog.log_status) {
setStatusOperator(false);
} else {
setStatusOperator(true);
}
} else {
setStatusOperator(true);
}
};
const statusOperatorHandle = () => {
if (statusOperator) {
setStatusOperator(false);
}
if (! statusOperator) {
setStatusOperator(true);
}
};
const onButtonClick = () => {
if(window.confirm('really bro ?')) {
statusOperatorHandle();
if(statusOperator) {
emitDataHandle();
} else {
updateDataHandle();
}
}
};
const updateDataHandle = () => {
let dataId = lastLog.id;
const dataPayload = {
log_status: statusOperator,
}
axios.patch("/api/logger/"+dataId, dataPayload).then(response => {
setLastLog(response.data.data_last_log);
}).catch(error => {
console.error(error);
})
}
const emitDataHandle = () => {
const dataPayload = {
user_id: userData.id,
log_status: statusOperator,
};
axios.post("/api/logger", dataPayload)
.then(response => {
setLastLog(response.data.data_last_log);
}).catch(error => {
console.error(error)
})
};
//Template
return (
<article className="col-span-3 start-controls flex flex-col justify-start items-center p-4">
<h1 className="text-4xl text-gray-100 mb-6 text-center">Click to {statusOperator? "Start" : "Stop"}</h1>
<button onClick={onButtonClick} className={statusOperator ? "control-button-green" : "control-button-red"}>
{statusOperator? "Start" : "Stop"}
</button>
<HomeStartTimer lastLog={lastLog} />
</article>
);
};
export default HomeStartControls; |
const discord = require("discord.js");
const fs = require("fs");
function loadAllCommands(client) {
client.commands = new discord.Collection();
let dir = fs.readdirSync("./src/commands").filter(file => file.endsWith(".js"));
for (const file of fs.readdirSync("./src/commands")) {
const command = require(`./commands/${file}`);
client.commands.set(command.name, command);
console.debug(`${command.name} pronto.`);
}
// evitar ficar atrás dos aliases no evento message.
for (let command in client.commands) {
if (command.aliases !== null || command.aliases !== undefined) {
for (let alias in command.aliases) {
commands.set(alias, command);
}
}
}
}
module.exports = {
loadAllCommands
}
|
package com.bhm.sdk.rxlibrary.rxjava;
import android.content.Context;
import com.trello.rxlifecycle3.components.support.RxAppCompatActivity;
import com.trello.rxlifecycle3.components.support.RxFragment;
import androidx.annotation.NonNull;
/**
* Created by bhm on 2018/5/11.
*/
public class RxBaseFragment extends RxFragment{
protected RxAppCompatActivity activity;
protected RxManager rxManager = new RxManager();
@Override
public void onAttach(@NonNull Context context) {
super.onAttach(context);
activity = (RxAppCompatActivity) context;
}
@Override
public void onDestroy() {
super.onDestroy();
rxManager.unSubscribe();
}
}
|
#!/bin/bash
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
REPO_ROOT=`pwd`
CMD="$REPO_ROOT/multi_classification.py"
WORLD_SIZE=2
python $REPO_ROOT/distributed_launcher.py --world_size "$WORLD_SIZE" "$CMD" "$@"
|
#!/bin/bash
wget https://www.cloudflare.com/ips-v4 -O ips-v4
wget https://www.cloudflare.com/ips-v6 -O ips-v6
# Allow all traffic from Cloudflare IPs (Restrict to ports 80 & 443)
for cfip in `cat ips-v4`; do ufw allow proto tcp from $cfip to any port 80,443 comment 'Cloudflare IP'; done
for cfip in `cat ips-v6`; do ufw allow proto tcp from $cfip to any port 80,443 comment 'Cloudflare IP'; done
ufw reload > /dev/null
|
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
import org.eastsideprep.spacecritters.alieninterfaces.*;
import java.util.ArrayList;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
*
* @author flucco
*/
public class BigBoyAlien implements Alien {
Context cntxt;
int id;
int numSpawned = 0;
Boolean boy = false;
@Override
public void init(Context cntxt, int i, int i1, String string) {
this.cntxt = cntxt;
this.id = i;
}
@Override
public void communicate() {
try {
cntxt.broadcastAndListen("TO ALL MY FELLOW ALIENS, from " + id, 0, true);
} catch (NotEnoughTechException | NotEnoughEnergyException ex) {
}
}
@Override
public void receive(String[] messages) {
for (String s : messages) {
cntxt.debugOut(s);
}
}
@Override
public Direction getMove() {
int x = 0;
int y = 0;
try {
if (cntxt.getView(2).getSpaceObjectAtPos(cntxt.getPosition().add(new Direction((int) x, (int) y))) != null) {
x = 0;
y = 0;
}
} catch (NotEnoughEnergyException | NotEnoughTechException | View.CantSeeSquareException ex) {
cntxt.debugOut("i caught an exception, whee!");
}
IntegerPosition ClosestAlien = new IntegerPosition(5, 5);
IntegerPosition NextClosestAlien = new IntegerPosition(5, 5);
try {
List<AlienSpecies> l = cntxt.getView((int) cntxt.getTech()).getClosestSpecificAliens(null); // changed to "null", I know I told you otherwise, but this is more efficient
if (l != null && l.size() > 1) {
ClosestAlien = l.get(1).position;
cntxt.debugOut("seeing a son at " + ClosestAlien);
boy = true;
}
if (l != null && l.size() > 2) {
NextClosestAlien = l.get(2).position;
cntxt.debugOut("there are multiple boys!");
}
} catch (NotEnoughEnergyException | NotEnoughTechException e) {
//ClosestAlien = new Position (5, 5);
//change to wherever spawn point is
}
if (ClosestAlien == null) {
ClosestAlien = new IntegerPosition (5, 5);
}
cntxt.debugOut("the nearest boy is " + ClosestAlien + " and the second nearest boy is" + NextClosestAlien);
if (cntxt.getEnergy() > (ClosestAlien.y - cntxt.getPosition().y + ClosestAlien.x - cntxt.getPosition().x) + 5
&& cntxt.getTech() > (Math.abs(ClosestAlien.y - cntxt.getPosition().y) + Math.abs(ClosestAlien.x - cntxt.getPosition().x)) + 7
&& boy == true) {
Double Energy = cntxt.getEnergy();
int Tech = cntxt.getTech();
cntxt.debugOut("necessary tech is:" + ((Math.abs(ClosestAlien.y - cntxt.getPosition().y) + Math.abs(ClosestAlien.x - cntxt.getPosition().x)) + 7));
if (cntxt.getDistance(cntxt.getIntegerPosition(), ClosestAlien) < 3) {
cntxt.debugOut("moving away");
return new Direction(ClosestAlien.y - cntxt.getPosition().y - 3, ClosestAlien.x - cntxt.getPosition().x - 3);
}
if (cntxt.getDistance(cntxt.getIntegerPosition(), NextClosestAlien) > 3) {
cntxt.debugOut("moving closer!!");
return new Direction(NextClosestAlien.y - cntxt.getPosition().y - 3, NextClosestAlien.x - cntxt.getPosition().x - 3);
}
}
cntxt.debugOut("not going anywhere, frances, no matter how much you beg");
return new Direction(x, y);
}
@Override
public Action getAction() {
if (cntxt.getEnergy() < 100) {
cntxt.debugOut("Gaining" + cntxt.getStateString());
return new Action(Action.ActionCode.Gain);
}
if (cntxt.getTech() < 30) {
cntxt.debugOut("Researching" + cntxt.getStateString());
return new Action(Action.ActionCode.Research);
}
if (boy == false) {
if (cntxt.getEnergy() > cntxt.getSpawningCost() + 20) {
cntxt.debugOut("Spawning");
numSpawned++;
return new Action(Action.ActionCode.Spawn, 5);
}
}
return new Action(Action.ActionCode.Gain);
}
@Override
public void processResults() {
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
}
|
#!/bin/bash
set -e -u -o pipefail -x
packages_to_install=(
apt-transport-https
ca-certificates
curl
ntp
software-properties-common
)
packages_to_remove=(
apport
aptitude
firewalld
man-db
snapd
ufw
unattended-upgrades
)
# wait for apt lock
while fuser -s /var/{lib/{dpkg,apt/lists},cache/apt/archives}/lock; do
sleep 2
done
apt-get update -qq
apt-get install -yq "${packages_to_install[@]}"
apt-get purge -yq "${packages_to_remove[@]}"
apt-get autoremove -yq
apt-get full-upgrade -yq
|
<filename>spec/models/retrospectives_user_spec.rb
require 'rails_helper'
describe RetrospectivesUser do
end
|
#!/usr/bin/env bash
# set up PYTHONPATH
#source env.sh
MODEL_DIR="$HOME/research/trav_second_vanilla/pointpillars_model/"
#export CUDA_VISIBLE_DEVICES=2
python pytorch/train.py generate_labels --config_path=./configs/pointpillars/car/tracking/xyres_16_0000.config --model_dir=$MODEL_DIR --sequence=0
python pytorch/train.py generate_labels --config_path=./configs/pointpillars/car/tracking/xyres_16_0001.config --model_dir=$MODEL_DIR --sequence=1
python pytorch/train.py generate_labels --config_path=./configs/pointpillars/car/tracking/xyres_16_0002.config --model_dir=$MODEL_DIR --sequence=2
python pytorch/train.py generate_labels --config_path=./configs/pointpillars/car/tracking/xyres_16_0003.config --model_dir=$MODEL_DIR --sequence=3
python pytorch/train.py generate_labels --config_path=./configs/pointpillars/car/tracking/xyres_16_0004.config --model_dir=$MODEL_DIR --sequence=4
python pytorch/train.py generate_labels --config_path=./configs/pointpillars/car/tracking/xyres_16_0005.config --model_dir=$MODEL_DIR --sequence=5
python pytorch/train.py generate_labels --config_path=./configs/pointpillars/car/tracking/xyres_16_0006.config --model_dir=$MODEL_DIR --sequence=6
python pytorch/train.py generate_labels --config_path=./configs/pointpillars/car/tracking/xyres_16_0007.config --model_dir=$MODEL_DIR --sequence=7
python pytorch/train.py generate_labels --config_path=./configs/pointpillars/car/tracking/xyres_16_0008.config --model_dir=$MODEL_DIR --sequence=8
python pytorch/train.py generate_labels --config_path=./configs/pointpillars/car/tracking/xyres_16_0009.config --model_dir=$MODEL_DIR --sequence=9
python pytorch/train.py generate_labels --config_path=./configs/pointpillars/car/tracking/xyres_16_0010.config --model_dir=$MODEL_DIR --sequence=10
python pytorch/train.py generate_labels --config_path=./configs/pointpillars/car/tracking/xyres_16_0011.config --model_dir=$MODEL_DIR --sequence=11
python pytorch/train.py generate_labels --config_path=./configs/pointpillars/car/tracking/xyres_16_0012.config --model_dir=$MODEL_DIR --sequence=12
python pytorch/train.py generate_labels --config_path=./configs/pointpillars/car/tracking/xyres_16_0013.config --model_dir=$MODEL_DIR --sequence=13
python pytorch/train.py generate_labels --config_path=./configs/pointpillars/car/tracking/xyres_16_0014.config --model_dir=$MODEL_DIR --sequence=14
python pytorch/train.py generate_labels --config_path=./configs/pointpillars/car/tracking/xyres_16_0015.config --model_dir=$MODEL_DIR --sequence=15
python pytorch/train.py generate_labels --config_path=./configs/pointpillars/car/tracking/xyres_16_0016.config --model_dir=$MODEL_DIR --sequence=16
python pytorch/train.py generate_labels --config_path=./configs/pointpillars/car/tracking/xyres_16_0017.config --model_dir=$MODEL_DIR --sequence=17
python pytorch/train.py generate_labels --config_path=./configs/pointpillars/car/tracking/xyres_16_0018.config --model_dir=$MODEL_DIR --sequence=18
python pytorch/train.py generate_labels --config_path=./configs/pointpillars/car/tracking/xyres_16_0019.config --model_dir=$MODEL_DIR --sequence=19
python pytorch/train.py generate_labels --config_path=./configs/pointpillars/car/tracking/xyres_16_0020.config --model_dir=$MODEL_DIR --sequence=20
|
export interface IKinshipInfos {
KinshipInfoID: number;
StudentID: number;
Name: string;
Class: number;
Relation: string;
}
|
package io.smallrye.mutiny.streams.operators;
import org.eclipse.microprofile.reactive.streams.operators.spi.Stage;
import io.smallrye.mutiny.streams.Engine;
public class PublisherOperator<T extends Stage> extends Operator<T> {
private PublisherStageFactory<T> factory;
public PublisherOperator(Class<T> clazz, PublisherStageFactory<T> factory) {
super(clazz);
this.factory = factory;
}
public PublisherStage create(Engine engine, T stage) {
return factory.create(engine, stage);
}
}
|
def detect_question(sent):
data = sent.split()
if data[-1][-1] == '?':
return True
else:
return False |
<reponame>eseiler/dream_yara_old
// ==========================================================================
// d_update_filter.cpp
// ==========================================================================
// Copyright (c) 2017-2022, <NAME>, FU Berlin
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
// * Neither the name of <NAME> or the FU Berlin nor the names of
// its contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL <NAME> OR THE FU BERLIN BE LIABLE
// FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
// DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
// OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
// DAMAGE.
//
// ==========================================================================
// Author: <NAME> <<EMAIL>>
// ==========================================================================
#define UPDATE_FILTER
// ----------------------------------------------------------------------------
// STL headers
// ----------------------------------------------------------------------------
#include <string>
#include <vector>
#include <mutex>
#include <condition_variable>
#include <future>
#include <thread>
// ----------------------------------------------------------------------------
// SeqAn headers
// ----------------------------------------------------------------------------
#include <seqan/index.h>
// ----------------------------------------------------------------------------
// App headers
// ----------------------------------------------------------------------------
#include "store_seqs.h"
#include "misc_timer.h"
#include "misc_types.h"
#include "bits_matches.h"
#include "misc_options.h"
#include "d_misc_options.h"
#include "d_kdx_filter.h"
#include "d_bloom_filter.h"
using namespace seqan;
// ----------------------------------------------------------------------------
// Class Options
// ----------------------------------------------------------------------------
struct Options
{
CharString filterFile;
std::map<uint32_t, CharString> binContigs;
unsigned threadsCount;
bool verbose;
FilterType filterType;
std::vector<std::string> filterTypeList;
Options() :
threadsCount(1),
verbose(false),
filterType(BLOOM),
filterTypeList({"bloom", "kmer_direct", "none"})
{}
};
// ==========================================================================
// Functions
// ==========================================================================
// ----------------------------------------------------------------------------
// Function setupArgumentParser()
// ----------------------------------------------------------------------------
void setupArgumentParser(ArgumentParser & parser, Options const & options)
{
setAppName(parser, "dream_yara_update_filter");
setShortDescription(parser, "Update Bloom Filter for DREAM-Yara");
setCategory(parser, "Read Mapping");
setDateAndVersion(parser);
setDescription(parser);
addUsageLine(parser, "[\\fIOPTIONS\\fP] <\\fIBLOOM-FILTER FILE \\fP> <\\fI4.fna\\fP> <\\fI7.fna\\fP>");
addArgument(parser, ArgParseArgument(ArgParseArgument::INPUT_FILE, "BLOOM FILTER"));
setValidValues(parser, 0, "filter");
setHelpText(parser, 0, "The path of the bloom filter to be updated.");
addArgument(parser, ArgParseArgument(ArgParseArgument::INPUT_FILE, "FASTA FILES", true));
setValidValues(parser, 1, SeqFileIn::getFileExtensions());
setHelpText(parser, 1, "The fasta files of the bins to updated. File names should be exactly the same us bin number (0-indexing). e.g. 0.fna");
addOption(parser, ArgParseOption("ft", "filter-type", "type of filter to build",
ArgParseOption::STRING));
setValidValues(parser, "filter-type", options.filterTypeList);
setDefaultValue(parser, "filter-type", options.filterTypeList[options.filterType]);
addOption(parser, ArgParseOption("t", "threads", "Specify the number of threads to use (valid for bloom filter only).", ArgParseOption::INTEGER));
setMinValue(parser, "threads", "1");
setMaxValue(parser, "threads", "2048");
setDefaultValue(parser, "threads", options.threadsCount);
addOption(parser, ArgParseOption("v", "verbose", "Displays verbose output."));
}
// ----------------------------------------------------------------------------
// Function parseCommandLine()
// ----------------------------------------------------------------------------
ArgumentParser::ParseResult
parseCommandLine(Options & options, ArgumentParser & parser, int argc, char const ** argv)
{
ArgumentParser::ParseResult res = parse(parser, argc, argv);
if (res != ArgumentParser::PARSE_OK)
return res;
// Parse verbose output option.
getOptionValue(options.verbose, parser, "verbose");
// Parse bloom filter path.
getArgumentValue(options.filterFile, parser, 0);
getOptionValue(options.filterType, parser, "filter-type", options.filterTypeList);
if (isSet(parser, "threads")) getOptionValue(options.threadsCount, parser, "threads");
// std::map<uint32_t, CharString> binContigs;
// Parse read input files.
uint32_t updateCount = getArgumentValueCount(parser, 1);
for (uint32_t i = 0; i < updateCount; ++i)
{
CharString currentFile;
uint32_t currentBinNo;
getArgumentValue(currentFile, parser, 1, i);
if (getBinNoFromFile(currentBinNo, currentFile))
options.binContigs[currentBinNo] = currentFile;
else
{
std::cerr << "File: " << currentFile << "\ndoesn't have a valid name\n";
exit(1);
}
}
return ArgumentParser::PARSE_OK;
}
// ----------------------------------------------------------------------------
// Function verifyFnaFiles()
// ----------------------------------------------------------------------------
inline bool verifyFnaFiles(std::map<uint32_t,CharString> const & fileList)
{
for (auto fastaFile : fileList)
{
if(!verifyFnaFile(fastaFile.second))
return false;
}
return true;
}
// ----------------------------------------------------------------------------
// Function update_filter()
// ----------------------------------------------------------------------------
template <typename TFilter>
inline void update_filter(Options & options, TFilter & filter)
{
uint32_t noOfBins = filter.getNumberOfBins();
// clear the bins to updated;
std::vector<uint32_t> bins2update = {};
typedef std::map<uint32_t,CharString>::iterator mapIter;
for(mapIter iter = options.binContigs.begin(); iter != options.binContigs.end(); ++iter)
{
if(iter->first >= noOfBins)
{
std::cerr <<"The provided bloom filter has only " << noOfBins <<" Bins.\nRetry after removing " << iter->second << " from arguments!" << std::endl;
exit(1);
}
bins2update.push_back(iter->first);
}
filter.clearBins(bins2update, options.threadsCount);
Semaphore thread_limiter(options.threadsCount);
std::vector<std::future<void>> tasks;
Timer<double> timer;
Timer<double> globalTimer;
start (timer);
start (globalTimer);
// add the new kmers from the new files
//iterate over the maps
for(mapIter iter = options.binContigs.begin(); iter != options.binContigs.end(); ++iter)
{
tasks.emplace_back(std::async([=, &thread_limiter, &filter] {
Critical_section _(thread_limiter);
Timer<double> binTimer;
start (binTimer);
filter.addFastaFile(iter->second, iter->first);
stop(binTimer);
if (options.verbose)
{
mtx.lock();
std::cerr <<"[bin " << iter->first << "] updated using " << iter->second << "!\t\t" << binTimer << std::endl;
mtx.unlock();
}
}));
}
for (auto &&task : tasks)
{
task.get();
}
stop(timer);
if (options.verbose)
std::cerr <<"All given bins are updated!\t\t" << timer << std::endl;
start(timer);
filter.save(toCString(options.filterFile));
stop(timer);
if (options.verbose)
std::cerr <<"Done saving filter (" << filter.size_mb() <<" MB)\t\t" << timer << std::endl;
stop(globalTimer);
std::cerr <<"\nFinshed in \t\t\t" << globalTimer << std::endl;
}
// ----------------------------------------------------------------------------
// Function main()
// ----------------------------------------------------------------------------
int main(int argc, char const ** argv)
{
ArgumentParser parser;
Options options;
setupArgumentParser(parser, options);
ArgumentParser::ParseResult res = parseCommandLine(options, parser, argc, argv);
if (res != ArgumentParser::PARSE_OK)
return res == ArgumentParser::PARSE_ERROR;
// verify all the new fasta files
if (!verifyFnaFiles(options.binContigs))
return 1;
try
{
if (options.filterType == BLOOM)
{
SeqAnBloomFilter<> filter (toCString(options.filterFile));
update_filter(options, filter);
}
else if (options.filterType == KMER_DIRECT)
{
SeqAnKDXFilter<> filter (toCString(options.filterFile));
update_filter(options, filter);
}
}
catch (Exception const & e)
{
std::cerr << getAppName(parser) << ": " << e.what() << std::endl;
return 1;
}
return 0;
}
|
// 15686. 치킨 배달
// 2020.03.09
// 브루트 포스
#include<iostream>
#include<queue>
#include<algorithm>
using namespace std;
int map[51][51];
int n, m;
vector<pair<int, int>> chickens; // 치킨집들의 위치를 모아놓는 벡터
vector<pair<int, int>> houses; // 집들의 위치를 모아놓는 벡터
int arr[13];
int visit[13]; // m개 고를때 중복을 방지하기 위한 배열
int ans = 987654321;
// 치킨집들중 m개를 고르기
void go(int cnt, int start)
{
if (cnt == m)
{
int sum = 0;
// 각각의 집에서 치킨집과의 거리중 최솟값을 sum에 더해줌
for (int i = 0; i < houses.size(); i++)
{
int tmp = 100000;
for (int j = 0; j < m; j++)
{
// 거리 계산
tmp = min(tmp, abs(chickens[arr[j]].first - houses[i].first) + abs(chickens[arr[j]].second - houses[i].second));
}
sum += tmp;
}
ans = min(ans, sum);
return;
}
for (int i = start; i < chickens.size(); i++)
{
if (!visit[i])
{
arr[cnt] = i;
visit[i] = true;
go(cnt + 1, i);
visit[i] = false;
}
}
}
int main()
{
cin >> n >> m;
for (int i = 0; i < n; i++)
{
for (int j = 0; j < n; j++)
{
cin >> map[i][j];
if (map[i][j] == 2) // 치킨집
{
chickens.push_back({ i,j });
}
if (map[i][j] == 1) // 집
{
houses.push_back({ i,j });
}
}
}
go(0, 0);
cout << ans << endl;
return 0;
}
|
#!/bin/bash
# Copyright (c) The Diem Core Contributors
# SPDX-License-Identifier: Apache-2.0
TRACE_PATH=$HOME/trace
[ ! -e "$TRACE_PATH" ] || rm -f "$TRACE_PATH"
export MOVE_VM_TRACE=$TRACE_PATH
echo "Rebuilding stdlib..."
pushd ../../diem-framework || exit 1
cargo run
popd || exit 1
#cargo test -p ir-testsuite -p language-e2e-tests -p move-lang-functional-tests
echo "---------------------------------------------------------------------------"
echo "Running IR testsuite..."
echo "---------------------------------------------------------------------------"
pushd ../../ir-testsuite || exit 1
cargo test
popd || exit 1
echo "---------------------------------------------------------------------------"
echo "Running e2e testsuite..."
echo "---------------------------------------------------------------------------"
pushd ../../e2e-testsuite || exit 1
cargo test -- --skip account_universe --skip fuzz_scripts
popd || exit 1
echo "---------------------------------------------------------------------------"
echo "Running Move testsuite..."
echo "---------------------------------------------------------------------------"
pushd ../../move-lang/functional-tests/tests || exit 1
cargo test
popd || exit 1
echo "---------------------------------------------------------------------------"
echo "Building Move modules and source maps.."
echo "---------------------------------------------------------------------------"
pushd ../../move-lang || exit 1
rm -rf build
cargo run --bin move-build -- ../diem-framework/core/sources -m
popd || exit 1
echo "---------------------------------------------------------------------------"
echo "Converting trace file..."
echo "---------------------------------------------------------------------------"
cargo run --bin move-trace-conversion -- -f "$TRACE_PATH" -o trace.mvcov
echo "---------------------------------------------------------------------------"
echo "Producing coverage summaries..."
echo "---------------------------------------------------------------------------"
cargo run --bin coverage-summaries -- -t trace.mvcov -s ../../diem-framework/DPN/releases/artifacts/current/modules
echo "==========================================================================="
echo "You can check source coverage for a module by running:"
echo "> cargo run --bin source-coverage -- -t trace.mvcov -b ../../move-lang/build/modules/<LOOK_FOR_MODULE_HERE>.mv -s ../../diem-framework/core/modules/<SOURCE_MODULE>.move"
echo "---------------------------------------------------------------------------"
echo "You can can also get a finer-grained coverage summary for each function by running:"
echo "> cargo run --bin coverage-summaries -- -t trace.mvcov -s ../../diem-framework/DPN/releases/artifacts/current/stdlib.mv"
echo "==========================================================================="
unset MOVE_VM_TRACE
echo "DONE"
|
<reponame>rossng/automata
package eu.rossng.automata.primitive;
import com.sun.istack.internal.NotNull;
/**
* A transition between two AutomatonStates
*/
public class DeterministicTransition {
@NotNull
private final State from, to;
@NotNull
private final Symbol on;
public DeterministicTransition(@NotNull State from, @NotNull State to, @NotNull Symbol on) {
this.from = from;
this.to = to;
this.on = on;
}
public State from() {
return from;
}
public State to() {
return to;
}
public Symbol on() {
return on;
}
@Override
public boolean equals(Object object) {
return object instanceof DeterministicTransition
&& (this.from().equals(((DeterministicTransition) object).from()))
&& (this.on().equals(((DeterministicTransition) object).on()))
&& (this.to().equals(((DeterministicTransition) object).to()));
}
@Override
public int hashCode() {
return this.from().hashCode() + this.to().hashCode() + this.on().hashCode();
}
}
|
sudo apt-get install xorriso
sudo apt-get install grub
sudo apt-get install nasm
sudo apt-get install virtualbox
sudo apt-get install qemu |
from .easy_logger import set_log_level, log_message
class CustomLogger:
def __init__(self, log_file):
self.log_file = log_file
def debug(self, message):
set_log_level("DEBUG")
log_message(f"[DEBUG] {message}", "DEBUG")
def info(self, message):
set_log_level("INFO")
log_message(f"[INFO] {message}", "INFO")
def warning(self, message):
set_log_level("WARNING")
log_message(f"[WARNING] {message}", "WARNING")
def error(self, message):
set_log_level("ERROR")
log_message(f"[ERROR] {message}", "ERROR")
def critical(self, message):
set_log_level("CRITICAL")
log_message(f"[CRITICAL] {message}", "CRITICAL") |
<reponame>xfyre/tapestry-5
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package org.apache.tapestry5.internal.pageload;
import org.apache.tapestry5.SymbolConstants;
import org.apache.tapestry5.ioc.OperationTracker;
import org.apache.tapestry5.ioc.annotations.Symbol;
import org.apache.tapestry5.ioc.internal.util.CollectionFactory;
import org.apache.tapestry5.ioc.services.ThreadLocale;
import org.apache.tapestry5.ioc.util.ExceptionUtils;
import org.apache.tapestry5.services.ComponentSource;
import org.apache.tapestry5.services.LocalizationSetter;
import org.apache.tapestry5.services.pageload.PagePreloader;
import org.slf4j.Logger;
import java.util.Collection;
import java.util.List;
public class PagePreloaderImpl implements PagePreloader
{
private final Logger logger;
private final List<String> pageNames = CollectionFactory.newList();
private final OperationTracker tracker;
private final ComponentSource componentSource;
private final ThreadLocale threadLocale;
private final LocalizationSetter localizationSetter;
public PagePreloaderImpl(Logger logger,
OperationTracker tracker,
ComponentSource componentSource, Collection<String> configuration,
ThreadLocale threadLocale,
LocalizationSetter localizationSetter)
{
this.tracker = tracker;
this.componentSource = componentSource;
this.logger = logger;
this.threadLocale = threadLocale;
this.localizationSetter = localizationSetter;
pageNames.addAll(configuration);
}
@Override
public void preloadPages()
{
if (pageNames.isEmpty())
{
return;
}
logger.info(String.format("Preloading %,d pages.", pageNames.size()));
threadLocale.setLocale(localizationSetter.getSupportedLocales().get(0));
final long startNanos = System.nanoTime();
try
{
for (final String pageName : pageNames)
{
tracker.run(String.format("Preloading page '%s'.", pageName), new Runnable()
{
@Override
public void run()
{
componentSource.getPage(pageName);
}
}
);
}
} catch (Exception ex)
{
// Report the exception, and just give up at this point.
logger.error(ExceptionUtils.toMessage(ex), ex);
return;
}
final double elapsedNanos = System.nanoTime() - startNanos;
logger.info(String.format("Preloaded %,d pages in %.2f seconds.",
pageNames.size(),
elapsedNanos * 10E-10d));
}
}
|
export CLASSPATH=./classes:./testclasses:./libs/asm-all-5.0.3.jar:./libs/junit.jar:$CLASSPATH
echo making dir: ./classes
rm -rf ./classes
rm -rf ./testclasses
mkdir ./classes
mkdir ./testclasses
echo Compiling java source ===========================================
javac -Xlint:unchecked -g -d ./classes `find . -name "*.java" `
echo Compiling .j files for testing ==================================
java -ea kilim.tools.Asm -nf -d ./classes `find . -name "*.j"`
echo Weaving =========================================================
# Weave all files under ./classes, compiling the tests to
# ./testclasses while excluding any that match "ExInvalid". These are
# negative tests for the Weaver.
java -ea kilim.tools.Weaver -d ./classes -x "ExInvalid|test" ./classes
java -ea kilim.tools.Weaver -d ./testclasses -x "ExInvalid" ./classes
|
<filename>elden_pause/include/mem/slice.h
/*
Copyright 2018 Brick
Permission is hereby granted, free of charge, to any person obtaining a copy of this software
and associated documentation files (the "Software"), to deal in the Software without restriction,
including without limitation the rights to use, copy, modify, merge, publish, distribute,
sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or
substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING
BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
#ifndef MEM_SLICE_BRICK_H
#define MEM_SLICE_BRICK_H
#include "defines.h"
namespace mem
{
template <typename T, typename U>
struct copy_cv
{
typedef U type;
};
template <typename T, typename U>
struct copy_cv<const T, U>
{
typedef const U type;
};
template <typename T, typename U>
struct copy_cv<volatile T, U>
{
typedef volatile U type;
};
template <typename T, typename U>
struct copy_cv<const volatile T, U>
{
typedef const volatile U type;
};
template <typename T>
class slice
{
private:
T* start_ {nullptr};
std::size_t size_ {0};
public:
constexpr slice() noexcept = default;
constexpr slice(T* begin, T* end) noexcept;
constexpr slice(T* start_, std::size_t size_) noexcept;
constexpr T& operator[](std::size_t index) const noexcept;
constexpr T* data() const noexcept;
constexpr T* begin() const noexcept;
constexpr T* end() const noexcept;
constexpr std::size_t size() const noexcept;
constexpr bool empty() const noexcept;
slice<typename copy_cv<T, byte>::type> as_bytes() const noexcept;
using value_type = T;
using size_type = std::size_t;
using difference_type = std::ptrdiff_t;
using reference = value_type&;
using const_reference = const value_type&;
using pointer = value_type*;
using const_pointer = const value_type*;
using iterator = value_type*;
using const_iterator = const value_type*;
};
template <typename T>
MEM_STRONG_INLINE constexpr slice<T>::slice(T* begin, T* end) noexcept
: start_(begin)
, size_(end - begin)
{}
template <typename T>
MEM_STRONG_INLINE constexpr slice<T>::slice(T* start, std::size_t size) noexcept
: start_(start)
, size_(size)
{}
template <typename T>
MEM_STRONG_INLINE constexpr T& slice<T>::operator[](std::size_t index) const noexcept
{
return start_[index];
}
template <typename T>
MEM_STRONG_INLINE constexpr T* slice<T>::data() const noexcept
{
return start_;
}
template <typename T>
MEM_STRONG_INLINE constexpr T* slice<T>::begin() const noexcept
{
return start_;
}
template <typename T>
MEM_STRONG_INLINE constexpr T* slice<T>::end() const noexcept
{
return start_ + size_;
}
template <typename T>
MEM_STRONG_INLINE constexpr std::size_t slice<T>::size() const noexcept
{
return size_;
}
template <typename T>
MEM_STRONG_INLINE constexpr bool slice<T>::empty() const noexcept
{
return size_ == 0;
}
template <typename T>
MEM_STRONG_INLINE slice<typename copy_cv<T, byte>::type> slice<T>::as_bytes() const noexcept
{
return {reinterpret_cast<typename copy_cv<T, byte>::type*>(start_), size_ * sizeof(T)};
}
} // namespace mem
#endif // MEM_SLICE_BRICK_H
|
const { getChannels, getRoutes, payment, getInfo, createInvoice } = require('../lnd');
const {
getQuery,
groupChannels,
recalculatePath,
getEdges,
addLastHopsToRoutes,
} = require('../routing');
const { finalCLTVDelta } = require('../conf');
module.exports = async options => {
if(typeof options.dst_channel_id === 'undefined') {
console.log('Destination channel Id is required');
process.exit(0);
}
try {
const { current_block_height, public_key } = await getInfo();
const { channels } = await getChannels();
const query = getQuery({
channels,
sourceChannelId: options.src_channel_id,
destChannelId: options.dst_channel_id,
});
if (options.amount) {
const amt = parseInt(options.amount);
query.tokens = amt;
query.mtokens = amt * 1000;
}
if (!!query) {
const { id } = await createInvoice({ tokens: query.tokens });
console.log('Finding path to destination...');
let { routes } = await getRoutes({
public_key: query.dest_public_key,
tokens: query.tokens,
});
// if any, we remove those routes where destination channel is the first hop
routes = routes.filter(route => { // pendiente si es necesario quitar esto ya
return route.hops[0].channel_id !== query.dest_channel_id
});
let aRoutes = [];
// if the user indicates source channel
if (typeof options.src_channel_id !== 'undefined') {
// we get only the routes where the first hop is the source
routes = routes.filter(route => {
return route.hops[0].channel_id === options.src_channel_id
});
} else {
// we use only routes from 'a' group
const grouped = groupChannels(channels);
for (let route of routes) {
for (let channel of grouped.a) {
if (route.hops[0].channel_id === channel.id) {
aRoutes.push(route);
}
}
}
}
const lhRoutes = addLastHopsToRoutes({
routes,
channel_capacity: query.dest_channel_capacity,
channel_id: query.dest_channel_id,
tokens: query.tokens,
mtokens: query.mtokens,
public_key,
});
let newRoutes = [];
for (let route of lhRoutes) {
const withEdge = await getEdges(route);
const newPath = recalculatePath({
route: withEdge,
currentHeight: parseInt(current_block_height),
amtToSend: query.mtokens,
finalCLTVDelta,
});
newRoutes.push(newPath);
}
// finally we create the payment path
const path = {
id,
routes: newRoutes,
}
console.log('Sending payment...')
const pay = await payment({ path });
if (pay.is_confirmed) {
console.log('Channel rebalanced!');
console.log(pay);
process.exit(0);
}
}
} catch (e) {
console.log(e);
}
};
|
/**
* table_heap.h
*
* doubly-linked list of heap pages
*/
#pragma once
#include "buffer/buffer_pool_manager.h"
#include "logging/log_manager.h"
#include "page/table_page.h"
#include "table/table_iterator.h"
#include "table/tuple.h"
namespace cmudb {
class TableHeap {
friend class TableIterator;
public:
~TableHeap() {}
// open a table heap
TableHeap(BufferPoolManager *buffer_pool_manager, LockManager *lock_manager,
LogManager *log_manager, page_id_t first_page_id);
// create table heap
TableHeap(BufferPoolManager *buffer_pool_manager, LockManager *lock_manager,
LogManager *log_manager, Transaction *txn);
// for insert, if tuple is too large (>~page_size), return false
bool InsertTuple(const Tuple &tuple, RID &rid, Transaction *txn);
bool MarkDelete(const RID &rid, Transaction *txn); // for delete
// if the new tuple is too large to fit in the old page, return false (will
// delete and insert)
bool UpdateTuple(const Tuple &tuple, const RID &rid, Transaction *txn);
// commit/abort time
void ApplyDelete(const RID &rid,
Transaction *txn); // when commit delete or rollback insert
void RollbackDelete(const RID &rid, Transaction *txn); // when rollback delete
bool GetTuple(const RID &rid, Tuple &tuple, Transaction *txn);
bool DeleteTableHeap();
TableIterator begin(Transaction *txn);
TableIterator end();
inline page_id_t GetFirstPageId() const { return first_page_id_; }
private:
/**
* Members
*/
BufferPoolManager *buffer_pool_manager_;
LockManager *lock_manager_;
LogManager *log_manager_;
page_id_t first_page_id_;
};
} // namespace cmudb
|
<filename>osbot_browser/view_helpers/View_Examples.py
from pbx_gs_python_utils.utils.Files import Files
from pbx_gs_python_utils.utils.Unzip_File import Unzip_File
from pbx_gs_python_utils.utils.Zip_Folder import Zip_Folder
from osbot_browser.browser.Render_Page import Render_Page
class View_Examples:
def __init__(self,tmp_img=None, clip=None, headless=False):
self.headless = headless
self.path_views = Files.path_combine(Files.parent_folder(__file__),'../../osbot_browser/web_root')
self.render_page = Render_Page(headless=self.headless, web_root=self.path_views)
self.tmp_img = tmp_img
self.clip = clip
def _open_file_and_get_html(self, filename):
file = '{0}{1}{2}'.format(self.path_views, '/examples/',filename)
return self.render_page.render_file(file)
def _open_file_and_take_screenshot(self, filename):
file = '{0}{1}{2}'.format(self.path_views, '/examples/',filename)
return self.render_page.screenshot_file(file, self.tmp_img, self.clip)
def set_clip (self, clip):
self.clip = clip
return self
def render_file_from_zip(self, target):
with Zip_Folder(self.path_views) as zip_file:
with Unzip_File(zip_file,'/tmp/test_render_from_zip', True) as web_root:
return self.render_page.screenshot_file_in_folder(web_root, target, self.tmp_img)
def open_file_in_browser(self, path,js_code=None):
with self.render_page.web_server as web_server:
url = web_server.url(path)
return self.render_page.get_page_html_via_browser(url,js_code)
# def open_file_in_browser_and_invoke_js(self, path, js_to_invoke):
# with self.render_page.web_server as web_server:
# url = web_server.url(path)
# return self.render_page.get_page_html_via_browser(url)
def hello_world__html(self): return self._open_file_and_get_html ('hello-world.html')
def hello_world (self): return self._open_file_and_take_screenshot ('hello-world.html')
def bootstrap_cdn (self): return self._open_file_and_take_screenshot ('bootstrap-cdn.html')
def bootstrap_local (self): return self.render_file_from_zip ('/examples/bootstrap-local.html')
def folder_root (self): return self.render_page.screenshot_folder (self.path_views, self.tmp_img)
#self.render_page.screenshot_file_in_folder(web_root, target, self.tmp_img)
#def zipped_views(self):
# return Files.zip_folder(self.path_views) |
<gh_stars>0
import memberDepositDetail from '../../models/transaction'
import { Database } from '../../../database'
import moment from 'moment'
const getExpired = async () => new Promise(async (resolve, reject) => {
// const now = new Date().toISOString().replace(/T/, ' ').replace(/\..+/, '')
let now = moment().format('YYYY-MM-DD HH:mm:ss')
const data = memberDepositDetail.findAll({
where: {
member_status_payment: 'pending',
member_due_datetime: {
[Database.Op.ne]: '',
[Database.Op.lt]: now
// op.lte digunakan untuk operator <=
// more info : http://docs.sequelizejs.com/manual/tutorial/models-usage.html
}
}
})
resolve(data)
})
const getExpiredAndUpdate = async () => new Promise(async (resolve, reject) => {
let getEx = ''
getEx = await getExpired()
if (getEx) {
for (let i in getEx) {
memberDepositDetail.update({
member_status_payment: 'failed',
member_invalid_note: 'expired transaction'
}, {
where: {
member_id: getEx[i].member_id
}
})
}
}
resolve(getEx)
})
const getOkMaxOneHour = async () => new Promise(async (resolve, reject) => {
// const now = new Date().toISOString().replace(/T/, ' ').replace(/\..+/, '')
// let plusOneHour = moment().add(1, 'hour').format('YYYY-MM-DD HH:mm:ss')
let now = moment().format('YYYY-MM-DD HH:mm:ss')
let minOneHour = moment().subtract(1, 'hour').format('YYYY-MM-DD HH:mm:ss')
// console.log(minOneHour)
const data = memberDepositDetail.findAll({
where: {
member_type_payment: 'transfer',
member_status_payment: 'ok',
member_updated_at: {
[Database.Op.between]: [minOneHour, now]
},
member_nominal_outcome: {
[Database.Op.gt]: 0
}
}
})
resolve(data)
})
export default {
getExpired,
getExpiredAndUpdate,
getOkMaxOneHour
}
|
import mongoose, { Document, Schema, SchemaDefinition } from "mongoose"
import { AuthenticityTokenEntity } from "../../../domain/entity/AuthenticityToken"
export const schemaVersion = 1
export interface AuthenticityTokenSchema extends Document {
_id: mongoose.Types.ObjectId
session_id: string
token: string
schema_version: number
toEntity: () => AuthenticityTokenEntity
}
function defineSchema(): SchemaDefinition {
return {
session_id: {
type: String,
unique: true,
},
token: {
type: String,
unique: true,
},
schema_version: {
type: Number,
default: schemaVersion,
},
}
}
const schema: Schema<AuthenticityTokenSchema> = new Schema(defineSchema(), {
collection: "authenticity_token",
})
schema.index({ user_id: -1 })
schema.methods.toEntity = function () {
return new AuthenticityTokenEntity({
sessionId: this.session_id,
token: this.token,
})
}
export const AuthenticityTokenModel = mongoose.model<AuthenticityTokenSchema>(
"AuthenticityToken",
schema
)
|
const EventHandler = require('../structures/EventHandler')
const DBL = require("dblapi.js")
const fetch = require("node-fetch")
module.exports = class ClientOnReady extends EventHandler {
constructor(client) {
super(client, 'ready')
}
run() {
const PRESENCE_INTERVAL = 60 * 1000
this.client.user.setPresence({
status: "dnd"
})
const presences = [
{
name: `Mayfi | Version ${process.env.MAYFI_VERSION}`,
type: 'WATCHING'
},
{
name: `Mayfi | ${this.client.guilds.size} Guilds`,
type: "LISTENING"
},
{
name: `Mayfi | ${this.client.users.size} Users`,
type: "LISTENING"
}
]
setInterval(() => {
const presence = presences[Math.floor(Math.random() * presences.length)]
this.client.user.setPresence({ game: presence })
console.log(`🤖 Changed presence to "${presence.name}", type "${presence.type}"`)
}, PRESENCE_INTERVAL)
const dbl = new DBL(process.env.DBL_TOKEN, this.client)
dbl.on("posted", () => {
console.log("[DBL] Posted statistics successfully")
})
dbl.on("error", e => {
console.log("[DBL] Failed to post statistics", e)
})
function postStats(client) {
dbl.postStats(client.guilds.size)
}
postStats(this.client)
setInterval(postStats, 1800000, this.client)
setInterval(() => {
fetch(`https://botsfordiscord.com/api/bots/${this.client.user.id}`, {
method: 'POST',
headers: { Authorization: process.env.BOTSFORDISCORD_TOKEN },
body: { server_count: this.Mclient.guilds.size }
})
.then(() => console.log('[BFD] Posted statistics successfully'))
.catch(() => console.log('[BFD] Failed to post statistics'))
}, 1800000)
}
}
|
#!/bin/bash
#Add color for info.
green='\e[1;32m'
red='\e[0;31m'
yellow='\e[1;33m'
NC='\033[0m'
CLEAR="tput sgr0"
case "$1" in
(*)
SELECTED_DIR=$1
;;
esac
do_run_app() {
echo -e ${green}"You are up to date."
echo -e "Launching Program!"$NC
sleep 1
exit 0
/usr/bin/ELSM "${LOCK[@]}"
}
do_upgrade() {
clear
echo -e ${red}"An Updated Version was found, Grabbing files"
sleep 2
cd /opt/ELSM/Files/
echo "$SERVER_SHA" > updater_data.cfg
wget -q https://raw.githubusercontent.com/$GIT_REPO_USER/ECO_LINUX_SERVER_MANAGER/$DEFAULT_BRANCH/Files/upgrade -O upgrade
chmod +x upgrade
/opt/ELSM/Files/upgrade "${LOCK[@]}"
do_run_app
}
do_upgrade_gui() {
if (whiptail --fb --title "Update available !" --yesno "Found new update : \n Branch : ${DEFAULT_BRANCH} \n Local hash : ${LOCAL_SHA} \n Updated version : ${SERVER_SHA} \n Message: ${UPDATE_MESSAGE}\nUpdate ?" 25 80) then
do_upgrade
else
do_run_app
fi
}
do_check_updates() {
source $GLOBAL_CONFIG/conf.cfg
SERVER_SHA=$(curl -s https://api.github.com/repos/"${GIT_REPO_USER}"/"${REPO_NAME}"/commits/"${DEFAULT_BRANCH}" | jq '.sha' | sed 's/"//g');
UPDATE_MESSAGE=$(curl -s https://api.github.com/repos/kicker22004/ECO_LINUX_SERVER_MANAGER/commits/Beta | jq '.commit.message' | sed 's/"//g' | sed 's/\\n\\n/ -> /g');
echo ${GIT_REPO_USER}" / "${REPO_NAME}" / "${DEFAULT_BRANCH}
LOCAL_SHA=$(<$GLOBAL_CONFIG/updater_data.cfg)
echo -e ${yellow}"Version found online: "${green}"$SERVER_SHA"${NC}
echo -e ${yellow}"Currently Installed Version: "${green}"$LOCAL_SHA"${NC}
echo -e ${green}"Created and Maintained by: Kicker22004 and all the contributors <3"${NC}
##Getting Null responses if you run the update too often, this forces a crash on the updater.
if [ $SERVER_SHA = "null" ]; then
do_run_app
fi
if [ $SERVER_SHA = $LOCAL_SHA ]; then
do_run_app
else
do_upgrade_gui
fi
}
do_run() {
#Dir variables, do not touch
DIR="/opt/ELSM/Server"
GLOBAL_CONFIG="/opt/ELSM/Files"
LOCK=$SELECTED_DIR
do_check_updates
}
do_run
|
def reverse_array(arr):
start = 0
end = len(arr) - 1
while start < end:
arr[start], arr[end] = arr[end], arr[start]
start += 1
end -= 1
return arr
# Driver code
arr = [1, 2, 3, 4, 5]
print(reverse_array(arr)) |
<gh_stars>1-10
package dev.arkav.openoryx.game.models;
import dev.arkav.openoryx.net.data.WorldPosData;
public class Vector2 {
public float x;
public float y;
public Vector2(float x, float y) {
this.x = x;
this.y = y;
}
public WorldPosData toWorldPos() {
return new WorldPosData(this.x, this.y);
}
public Vector2 clone() {
return new Vector2(this.x, this.y);
}
/**
* Clones into an existing vector
* @param v The vector to clone into
*/
public void clone(Vector2 v) {
v.x = this.x;
v.y = this.y;
}
public String toString() {
return "x: " + this.x + " y: " + this.y;
}
public double distanceTo(float x, float y) {
return Math.sqrt(Math.pow(this.x - x, 2) + Math.pow(this.y - y, 2));
}
public double distanceTo(Vector2 target) {
return this.distanceTo(target.x, target.y);
}
public Vector2 subtract(Vector2 v2) {
this.x -= v2.x;
this.y -= v2.y;
return this;
}
public Vector2 divide(float divisor) {
this.x /= divisor;
this.y /= divisor;
return this;
}
public Vector2 normalize() {
float norm = x > y ? x : y;
x /= norm;
y /= norm;
return this;
}
}
|
cbp2make -in default.workspace
|
echo "installing pip..."
apt install python3-pip
echo "installing libs..."
pip3 install -r requirements.txt
ln -sf $(pwd)/4xxbypasser.py /usr/local/bin/4xxbypasser
echo "done !!!"
|
// Create a Router
const router = express.Router();
// Get all books
router.get('/books', (req, res) => {
//Query the database for all books
});
// Get a single book
router.get('/books/:id', (req, res) => {
//Query the database for a single book
});
// Add a book
router.post('/books', (req, res) => {
//Create a new book with the data passed in the request body
});
// Update a book
router.put('/books/:id', (req, res) => {
//Update the book with the data passed in the request body
});
// Delete a book
router.delete('/books/:id', (req, res) => {
//Delete the book with the specified id
});
// Export the Router
module.exports = router; |
const useContext = ({ SERVICE_DIR, service, name }) => {
return `<?php
namespace ${SERVICE_DIR.fUC()}\\${service}\\Requests;
use Illuminate\\Contracts\\Validation\\Validator;
use Illuminate\\Foundation\\Http\\FormRequest;
use Illuminate\\Http\\Exceptions\\HttpResponseException;
use Illuminate\\Validation\\Factory as ValidationFactory;
use Illuminate\\Validation\\Rule;
use Services\\Ticket\\Enum\\AttachmentType;
class ${name}Requests extends FormRequest
{
/**
* Determine if the user is authorized to make this request.
*
* @return bool
*/
public function authorize()
{
return true;
}
/**
* Get the validation rules that apply to the request.
*
* @return array
*/
public function rules()
{
// TODO ADD NEW RULE
return [
'name' => 'required|max:194',
'phone' => 'required|max:50',
];
}
protected function failedValidation(Validator $validator)
{
throw new HttpResponseException(BadRequest400());
}
} `;
};
const setContext = async ({ SERVICE_DIR, storeg, micro, name }) => {
storeg.directoryUpdateOrNew(`${SERVICE_DIR}/${micro}/Requests`);
await storeg.write(
`${SERVICE_DIR}/${micro}/Requests/${name}Request.php`,
useContext({ SERVICE_DIR, service: micro, name }),
true
);
};
module.exports = {
useContext,
setContext,
};
|
#!/bin/zsh
# Install R
apt update
apt install -y r-base
|
package jwt
import (
"context"
"errors"
"net/http"
"github.com/aukbit/pluto/v6/reply"
"github.com/aukbit/pluto/v6/server/router"
"google.golang.org/grpc"
)
var (
errInvalidBearer = errors.New("invalid bearer authorization header")
)
// WrapBearerToken adds token to the context.
func WrapBearerToken(h router.HandlerFunc) router.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
// Get jwt token from Authorization header
t, ok := BearerAuth(r)
if !ok {
reply.Json(w, r, http.StatusUnauthorized, errInvalidBearer)
return
}
ctx := context.WithValue(r.Context(), TokenContextKey, t)
// pass execution to the original handler
h.ServeHTTP(w, r.WithContext(ctx))
}
}
// WrapBearerTokenErr adds token to the context.
func WrapBearerTokenErr(h router.WrapErr) router.WrapErr {
return func(w http.ResponseWriter, r *http.Request) *router.Err {
// Get jwt token from Authorization header
t, ok := BearerAuth(r)
if !ok {
return &router.Err{
Err: errInvalidBearer,
Status: http.StatusBadRequest,
Type: "authentication_error",
Message: grpc.ErrorDesc(errInvalidBearer),
}
}
ctx := context.WithValue(r.Context(), TokenContextKey, t)
// pass execution to the original handler
h.ServeHTTP(w, r.WithContext(ctx))
return nil
}
}
|
package aima.search.framework;
public class TreeSearch extends QueueSearch {
protected void addExpandedNodesToFringe(NodeStore fringe, Node node,
Problem problem) {
fringe.add(expandNode(node, problem));
}
} |
module.exports = {
globals: {
"ts-jest": {
tsConfig: "tsconfig.json"
}
},
moduleFileExtensions: ["ts", "js"],
transform: {
"^.+\\.(ts|tsx)$": "ts-jest"
},
collectCoverageFrom: [
"src/*.ts",
"src/**/*.ts",
"!src/database/*/*.ts",
"!src/middlewares/*.ts",
"!src/libs/*.ts",
"!src/server.ts"
],
testMatch: ["<rootDir>/test/**/*.spec.(ts|js)"],
testEnvironment: "node",
clearMocks: true,
collectCoverage: true,
coverageDirectory: "coverage"
};
|
<reponame>COLEN-Zeng/easy-wechat
module.exports = class ImageResource{
constructor(mediaId){
this._mediaId = mediaId;
this._type = "image";
}
toWechatAttr(){
return [
{
MsgType: {_cdata:this._type}
},
{
MediaId: {_cdata: this._mediaId}
}
]
}
} |
<reponame>snappmarket/frontend-toolbox
import React from 'react';
import { render } from '@testing-library/react';
import '@testing-library/jest-dom/extend-expect';
import { Wrapper } from '../../../test/test.helpers';
import { StyledCarouselDemo } from '../src/core/styles.demo';
import { StyledSimpleCarousel } from '../src/core/styles';
describe('CarouselDemo ui component tests', () => {
it('Should get all style', () => {
const { getByTestId } = render(
<Wrapper>
<StyledCarouselDemo>
<div id="slider" className="slider loaded" data-testid="carousel">
<div className="wrapper" data-testid="carousel-wrapper">
<div className="slides" data-testid="carousel-slides">
<span className="slide active" data-testid="carousel-slide" data-page="1">Slide 1</span>
<span className="slide active" data-page="1">Slide 2</span>
<span className="slide" data-page="2">Slide 3</span>
<span className="slide" data-page="2">Slide 4</span>
<span className="slide" data-page="3">Slide 5</span>
</div>
</div>
<span className="control next" data-testid="carousel-next"></span>
<span className="control prev" data-testid="carousel-next"></span>
<ul className="dots" data-testid="carousel-dots">
<li className="dots-item active" data-testid="carousel-dot-item" data-dot-index="1">1</li>
<li className="dots-item" data-dot-index="2">2</li>
<li className="dots-item" data-dot-index="3">3</li>
</ul>
</div>
</StyledCarouselDemo>
</Wrapper>,
);
expect(getByTestId('carousel')).toHaveStyle({
width: '100%',
height: '300px',
boxShadow: '3px 3px 10px rgba(0,0,0,0.2)',
});
expect(getByTestId('carousel-wrapper')).toHaveStyle({
overflow: 'hidden',
position: 'relative',
width: '100%',
height: '300px',
zIndex: 1,
});
expect(getByTestId('carousel-slide')).toHaveStyle({
height: '300px',
background: '#FFCF47',
borderRadius: '2px',
padding: '15px',
alignItems: 'center',
justifyContent: 'center',
});
expect(getByTestId('carousel-dot-item')).toHaveStyle({
background: '#666',
});
});
});
describe('CarouselDemoMain ui component tests', () => {
it('Should get all style', () => {
const { getByTestId } = render(
<Wrapper>
<StyledSimpleCarousel>
<div id="slider" className="slider loaded" data-testid="carousel">
<div className="wrapper" data-testid="carousel-wrapper">
<div className="slides" data-testid="carousel-slides">
<span className="slide active" data-testid="carousel-slide" data-page="1">Slide 1</span>
<span className="slide active" data-page="1">Slide 2</span>
<span className="slide" data-page="2">Slide 3</span>
<span className="slide" data-page="2">Slide 4</span>
<span className="slide" data-page="3">Slide 5</span>
</div>
</div>
<span className="control next" data-testid="carousel-next"></span>
<span className="control prev" data-testid="carousel-next"></span>
<ul className="dots" data-testid="carousel-dots">
<li className="dots-item active" data-testid="carousel-dot-item" data-dot-index="1">1</li>
<li className="dots-item" data-dot-index="2">2</li>
<li className="dots-item" data-dot-index="3">3</li>
</ul>
</div>
</StyledSimpleCarousel>
</Wrapper>,
);
expect(getByTestId('carousel')).toHaveStyle({
position: 'relative',
});
expect(getByTestId('carousel-wrapper')).toHaveStyle({
overflow: 'hidden',
});
expect(getByTestId('carousel-slide')).toHaveStyle({
cursor: 'pointer',
display: 'flex',
flexDirection: 'row',
position: 'relative',
});
expect(getByTestId('carousel-dot-item')).toHaveStyle({
backgroundColor: '#efefef',
fontSize: 0,
width: '15px',
height: '15px',
borderRadius: '50%',
display: 'inline-block',
margin: '0 5px',
});
});
}); |
<filename>client/config.js
import Visualizer from './classes/visualizer'
import RotatingFractalMirror from './viz/rotating_fractal_mirror'
import Steven from './viz/steven'
import Matt from './viz/matt'
import Griffin from './viz/griffin_favorite'
import FractalMirror from './viz/fractal_mirror'
import Shapes from './viz/shapes'
import Dots from './viz/kaleidoscope_dots'
import RotatingDots from './viz/rotating_kaleidoscope_dots'
export default class Visualization extends Visualizer {
//
}
// const rotatingFractalMirror = new RotatingFractalMirror()
// const steven = new Steven()
// const matt = new Matt()
// const griffin = new Griffin()
// const fractalMirror = new FractalMirror()
const shapes = new Shapes()
// const kaleidoscope = new Dots()
// const rotatingKaleidoscope = new RotatingDots()
|
(function () {
'use strict';
var PaintVO = function (alpha) {
this.setAlpha(alpha || 1);
};
PaintVO.MIN_ALPHA = 0;
PaintVO.MAX_ALPHA = 1;
var p = PaintVO.prototype;
p.getAlpha = function () {
return this._penAlpha;
};
p.setAlpha = function (value) {
this._penAlpha = value;
this._penAlpha = (this._penAlpha < PaintVO.MIN_ALPHA) ?
PaintVO.MIN_ALPHA : this._penAlpha;
this._penAlpha = (this._penAlpha > PaintVO.MAX_ALPHA) ?
PaintVO.MAX_ALPHA : this._penAlpha;
};
p.toString = function () {
return '[PaintVO] alpha: ' + this._penAlpha;
};
nts.Painter.PaintVO = PaintVO;
})(); |
#pragma once
namespace device
{
struct SYSTICK
{
enum : unsigned
{
BASE = 0xE000E010, /*!< (@ 0xE000E010) STK Structure */
CTRL = BASE, /*!< (@ 0x00000000) SysTick control and status register */
LOAD = BASE + 0x04, /*!< (@ 0x00000004) SysTick reload value register */
VAL = BASE + 0x08, /*!< (@ 0x00000008) SysTick current value register */
CALIB = BASE + 0x0C /*!< (@ 0x0000000C) SysTick calibration value register */
};
enum : unsigned
{
ENABLE = 0,
TICKINT = 1,
CLKSOURCE = 2
};
};
} // namespace device |
#!/usr/bin/env -S bash -euET -o pipefail -O inherit_errexit
SCRIPT=$(readlink -f "$0") && cd $(dirname "$SCRIPT")
# --- Script Init ---
mkdir -p log
rm -R -f log/*
# --- Setup run dirs ---
find output -type f -not -name '*summary-info*' -not -name '*.json' -exec rm -R -f {} +
rm -R -f /tmp/%FIFO_DIR%/fifo/*
rm -R -f work/*
mkdir work/kat/
mkfifo /tmp/%FIFO_DIR%/fifo/il_P14
mkfifo /tmp/%FIFO_DIR%/fifo/il_S1_summary_P14
mkfifo /tmp/%FIFO_DIR%/fifo/il_S1_pltcalc_P14
# --- Do insured loss computes ---
pltcalc -s < /tmp/%FIFO_DIR%/fifo/il_S1_pltcalc_P14 > work/kat/il_S1_pltcalc_P14 & pid1=$!
tee < /tmp/%FIFO_DIR%/fifo/il_S1_summary_P14 /tmp/%FIFO_DIR%/fifo/il_S1_pltcalc_P14 > /dev/null & pid2=$!
summarycalc -m -f -1 /tmp/%FIFO_DIR%/fifo/il_S1_summary_P14 < /tmp/%FIFO_DIR%/fifo/il_P14 &
eve 14 20 | getmodel | gulcalc -S100 -L100 -r -a1 -i - | fmcalc -a2 > /tmp/%FIFO_DIR%/fifo/il_P14 &
wait $pid1 $pid2
# --- Do insured loss kats ---
kat work/kat/il_S1_pltcalc_P14 > output/il_S1_pltcalc.csv & kpid1=$!
wait $kpid1
|
for i in "$@"
do
case $i in
-g=*|--gpudevice=*)
GPUDEVICE="${i#*=}"
shift
;;
-n=*|--numgpus=*)
NUMGPUS="${i#*=}"
shift
;;
-t=*|--taskname=*)
TASKNAME="${i#*=}"
shift
;;
-r=*|--randomseed=*)
RANDOMSEED="${i#*=}"
shift
;;
-p=*|--predicttag=*)
PREDICTTAG="${i#*=}"
shift
;;
-m=*|--modeldir=*)
MODELDIR="${i#*=}"
shift
;;
-d=*|--datadir=*)
DATADIR="${i#*=}"
shift
;;
-o=*|--outputdir=*)
OUTPUTDIR="${i#*=}"
shift
;;
--numturn=*)
NUMTURN="${i#*=}"
shift
;;
--seqlen=*)
SEQLEN="${i#*=}"
shift
;;
--querylen=*)
QUERYLEN="${i#*=}"
shift
;;
--answerlen=*)
ANSWERLEN="${i#*=}"
shift
;;
--batchsize=*)
BATCHSIZE="${i#*=}"
shift
;;
--learningrate=*)
LEARNINGRATE="${i#*=}"
shift
;;
--trainsteps=*)
TRAINSTEPS="${i#*=}"
shift
;;
--warmupsteps=*)
WARMUPSTEPS="${i#*=}"
shift
;;
--savesteps=*)
SAVESTEPS="${i#*=}"
shift
;;
--answerthreshold=*)
ANSWERTHRESHOLD="${i#*=}"
shift
;;
esac
done
echo "gpu device = ${GPUDEVICE}"
echo "num gpus = ${NUMGPUS}"
echo "task name = ${TASKNAME}"
echo "random seed = ${RANDOMSEED}"
echo "predict tag = ${PREDICTTAG}"
echo "model dir = ${MODELDIR}"
echo "data dir = ${DATADIR}"
echo "output dir = ${OUTPUTDIR}"
echo "num turn = ${NUMTURN}"
echo "seq len = ${SEQLEN}"
echo "query len = ${QUERYLEN}"
echo "answer len = ${ANSWERLEN}"
echo "batch size = ${BATCHSIZE}"
echo "learning rate = ${LEARNINGRATE}"
echo "train steps = ${TRAINSTEPS}"
echo "warmup steps = ${WARMUPSTEPS}"
echo "save steps = ${SAVESTEPS}"
echo "answer threshold = ${ANSWERTHRESHOLD}"
alias python=python3
mkdir ${OUTPUTDIR}
start_time=`date +%s`
CUDA_VISIBLE_DEVICES=${GPUDEVICE} python run_quac.py \
--spiece_model_file=${MODELDIR}/spiece.model \
--model_config_path=${MODELDIR}/xlnet_config.json \
--init_checkpoint=${MODELDIR}/xlnet_model.ckpt \
--task_name=${TASKNAME} \
--random_seed=${RANDOMSEED} \
--predict_tag=${PREDICTTAG} \
--lower_case=false \
--data_dir=${DATADIR}/ \
--output_dir=${OUTPUTDIR}/data \
--model_dir=${OUTPUTDIR}/checkpoint \
--export_dir=${OUTPUTDIR}/export \
--num_turn=${NUMTURN} \
--max_seq_length=${SEQLEN} \
--max_query_length=${QUERYLEN} \
--max_answer_length=${ANSWERLEN} \
--train_batch_size=${BATCHSIZE} \
--predict_batch_size=${BATCHSIZE} \
--num_hosts=1 \
--num_core_per_host=${NUMGPUS} \
--learning_rate=${LEARNINGRATE} \
--train_steps=${TRAINSTEPS} \
--warmup_steps=${WARMUPSTEPS} \
--save_steps=${SAVESTEPS} \
--do_train=true \
--do_predict=false \
--do_export=false \
--overwrite_data=false
CUDA_VISIBLE_DEVICES=${GPUDEVICE} python run_quac.py \
--spiece_model_file=${MODELDIR}/spiece.model \
--model_config_path=${MODELDIR}/xlnet_config.json \
--init_checkpoint=${MODELDIR}/xlnet_model.ckpt \
--task_name=${TASKNAME} \
--random_seed=${RANDOMSEED} \
--predict_tag=${PREDICTTAG} \
--lower_case=false \
--data_dir=${DATADIR}/ \
--output_dir=${OUTPUTDIR}/data \
--model_dir=${OUTPUTDIR}/checkpoint \
--export_dir=${OUTPUTDIR}/export \
--num_turn=${NUMTURN} \
--max_seq_length=${SEQLEN} \
--max_query_length=${QUERYLEN} \
--max_answer_length=${ANSWERLEN} \
--train_batch_size=${BATCHSIZE} \
--predict_batch_size=${BATCHSIZE} \
--num_hosts=1 \
--num_core_per_host=1 \
--learning_rate=${LEARNINGRATE} \
--train_steps=${TRAINSTEPS} \
--warmup_steps=${WARMUPSTEPS} \
--save_steps=${SAVESTEPS} \
--do_train=false \
--do_predict=true \
--do_export=false \
--overwrite_data=false
python tool/convert_quac.py \
--input_file=${OUTPUTDIR}/data/predict.${PREDICTTAG}.summary.json \
--output_file=${OUTPUTDIR}/data/predict.${PREDICTTAG}.span.json \
--answer_threshold=${ANSWERTHRESHOLD}
rm ${OUTPUTDIR}/data/predict.${PREDICTTAG}.eval.json
python tool/eval_quac.py \
--val_file=${DATADIR}/dev-${TASKNAME}.json \
--model_output=${OUTPUTDIR}/data/predict.${PREDICTTAG}.span.json \
--o ${OUTPUTDIR}/data/predict.${PREDICTTAG}.eval.json
end_time=`date +%s`
echo execution time was `expr $end_time - $start_time` s.
read -n 1 -s -r -p "Press any key to continue..." |
#!/bin/bash
cd ./node_modules/sqlite3
npm i nan@2.3.3
npm i node-pre-gyp@0.6.28
npm i -g node-gyp@3.3.1
node-gyp configure --module_name=node_sqlite3 --module_path=../lib/binding/electron-v1.8-linux-x64
node-gyp rebuild --target=1.8.2 --arch=x64 --target_platform=linux --dist-url=https://atom.io/download/electron --module_name=node_sqlite3 --module_path=../lib/binding/electron-v1.8-linux-x64
|
#!/usr/bin/env bash
set -e
# Unofficial bash strict mode.
# See: http://redsymbol.net/articles/unofficial-bash-strict-mode/
set -u
set -o pipefail
# This script sets up a cluster that starts out in Byron, and can transition to Mary.
#
# The script generates all the files needed for the setup, and prints commands
# to be run manually (to start the nodes, post transactions, etc.).
#
# There are three ways of triggering the transition to Shelley:
# 1. Trigger transition at protocol version 2.0.0 (as on mainnet)
# The system starts at 0.0.0, and we can only increase it by 1 in the major
# version, so this does require to
# a) post an update proposal and votes to transition to 1.0.0
# b) wait for the protocol to change (end of the epoch, or end of the last
# epoch if it's posted near the end of the epoch)
# c) change configuration.yaml to have 'LastKnownBlockVersion-Major: 2',
# and restart the nodes
# d) post an update proposal and votes to transition to 2.0.0
# This is what will happen on the mainnet, so it's vital to test this, but
# it does contain some manual steps.
# 2. Trigger transition at protocol version 2.0.0
# For testing purposes, we can also modify the system to do the transition to
# Shelley at protocol version 1.0.0, by uncommenting the line containing
# 'TestShelleyHardForkAtVersion' below. Then, we just need to execute step a)
# above in order to trigger the transition.
# This is still close to the procedure on the mainnet, and requires less
# manual steps.
# 3. Schedule transition in the configuration
# To do this, uncomment the line containing 'TestShelleyHardForkAtEpoch'
# below. It's good for a quick test, and does not rely on posting update
# proposals to the chain.
# This is quite convenient, but it does not test that we can do the
# transition by posting update proposals to the network. For even more convenience
# if you want to start a node in Shelley, Allegra or Mary from epoch 0, supply the script
# with a shelley, allegra or mary string argument. E.g mkfiles.sh mary.
ROOT=example
BFT_NODES="node-bft1 node-bft2"
BFT_NODES_N="1 2"
NUM_BFT_NODES=2
POOL_NODES="node-pool1"
ALL_NODES="${BFT_NODES} ${POOL_NODES}"
INIT_SUPPLY=10020000000
FUNDS_PER_GENESIS_ADDRESS=$((${INIT_SUPPLY} / ${NUM_BFT_NODES}))
FUNDS_PER_BYRON_ADDRESS=$((${FUNDS_PER_GENESIS_ADDRESS} - 1000000))
# We need to allow for a fee to transfer the funds out of the genesis.
# We don't care too much, 1 ada is more than enough.
NETWORK_MAGIC=42
SECURITY_PARAM=10
UNAME=$(uname -s) DATE=
case $UNAME in
Darwin ) DATE="gdate";;
Linux ) DATE="date";;
MINGW64_NT* ) UNAME="Windows_NT"
DATE="date";;
esac
UNAME=$(uname -s) SED=
case $UNAME in
Darwin ) SED="gsed";;
Linux ) SED="sed";;
esac
sprocket() {
if [ "$UNAME" == "Windows_NT" ]; then
# Named pipes names on Windows must have the structure: "\\.\pipe\PipeName"
# See https://docs.microsoft.com/en-us/windows/win32/ipc/pipe-names
echo -n '\\.\pipe\'
echo "$1" | sed 's|/|\\|g'
else
echo "$1"
fi
}
START_TIME="$(${DATE} -d "now + 30 seconds" +%s)"
if ! mkdir "${ROOT}"; then
echo "The ${ROOT} directory already exists, please move or remove it"
exit
fi
# copy and tweak the configuration
cp configuration/defaults/byron-mainnet/configuration.yaml ${ROOT}/
$SED -i ${ROOT}/configuration.yaml \
-e 's/Protocol: RealPBFT/Protocol: Cardano/' \
-e '/Protocol/ aPBftSignatureThreshold: 0.6' \
-e 's/minSeverity: Info/minSeverity: Debug/' \
-e 's|GenesisFile: genesis.json|ByronGenesisFile: byron/genesis.json|' \
-e '/ByronGenesisFile/ aShelleyGenesisFile: shelley/genesis.json' \
-e '/ByronGenesisFile/ aAlonzoGenesisFile: shelley/genesis.alonzo.json' \
-e 's/RequiresNoMagic/RequiresMagic/' \
-e 's/LastKnownBlockVersion-Major: 0/LastKnownBlockVersion-Major: 1/' \
-e 's/LastKnownBlockVersion-Minor: 2/LastKnownBlockVersion-Minor: 0/'
# Options for making it easier to trigger the transition to Shelley
# If neither of those are used, we have to
# - post an update proposal + votes to go to protocol version 1
# - after that's activated, change the configuration to have
# 'LastKnownBlockVersion-Major: 2', and restart the nodes
# - post another proposal + vote to go to protocol version 2
#uncomment this for an automatic transition after the first epoch
# echo "TestShelleyHardForkAtEpoch: 1" >> ${ROOT}/configuration.yaml
#uncomment this to trigger the hardfork with protocol version 1
#echo "TestShelleyHardForkAtVersion: 1" >> ${ROOT}/configuration.yaml
pushd ${ROOT}
# create the node directories
for NODE in ${ALL_NODES}; do
mkdir "${NODE}" "${NODE}/byron" "${NODE}/shelley"
done
# Make topology files
#TODO generalise this over the N BFT nodes and pool nodes
cat > node-bft1/topology.json <<EOF
{
"Producers": [
{
"addr": "127.0.0.1",
"port": 3002,
"valency": 1
}
, {
"addr": "127.0.0.1",
"port": 3003,
"valency": 1
}
]
}
EOF
echo 3001 > node-bft1/port
cat > node-bft2/topology.json <<EOF
{
"Producers": [
{
"addr": "127.0.0.1",
"port": 3001,
"valency": 1
}
, {
"addr": "127.0.0.1",
"port": 3003,
"valency": 1
}
]
}
EOF
echo 3002 > node-bft2/port
cat > node-pool1/topology.json <<EOF
{
"Producers": [
{
"addr": "127.0.0.1",
"port": 3001,
"valency": 1
}
, {
"addr": "127.0.0.1",
"port": 3002,
"valency": 1
}
]
}
EOF
echo 3003 > node-pool1/port
cat > byron.genesis.spec.json <<EOF
{
"heavyDelThd": "300000000000",
"maxBlockSize": "2000000",
"maxTxSize": "4096",
"maxHeaderSize": "2000000",
"maxProposalSize": "700",
"mpcThd": "20000000000000",
"scriptVersion": 0,
"slotDuration": "1000",
"softforkRule": {
"initThd": "900000000000000",
"minThd": "600000000000000",
"thdDecrement": "50000000000000"
},
"txFeePolicy": {
"multiplier": "43946000000",
"summand": "155381000000000"
},
"unlockStakeEpoch": "18446744073709551615",
"updateImplicit": "10000",
"updateProposalThd": "100000000000000",
"updateVoteThd": "1000000000000"
}
EOF
cardano-cli byron genesis genesis \
--protocol-magic ${NETWORK_MAGIC} \
--start-time "${START_TIME}" \
--k ${SECURITY_PARAM} \
--n-poor-addresses 0 \
--n-delegate-addresses ${NUM_BFT_NODES} \
--total-balance ${INIT_SUPPLY} \
--delegate-share 1 \
--avvm-entry-count 0 \
--avvm-entry-balance 0 \
--protocol-parameters-file byron.genesis.spec.json \
--genesis-output-dir byron
mv byron.genesis.spec.json byron/genesis.spec.json
# Symlink the BFT operator keys from the genesis delegates, for uniformity
for N in ${BFT_NODES_N}; do
ln -s ../../byron/delegate-keys.00$((${N} - 1)).key "node-bft${N}/byron/delegate.key"
ln -s ../../byron/delegation-cert.00$((${N} - 1)).json "node-bft${N}/byron/delegate.cert"
done
# Create keys, addresses and transactions to withdraw the initial UTxO into
# regular addresses.
for N in ${BFT_NODES_N}; do
cardano-cli byron key keygen \
--secret byron/payment-keys.00$((${N} - 1)).key \
cardano-cli byron key signing-key-address \
--testnet-magic ${NETWORK_MAGIC} \
--secret byron/payment-keys.00$((${N} - 1)).key > byron/address-00$((${N} - 1))
cardano-cli byron key signing-key-address \
--testnet-magic ${NETWORK_MAGIC} \
--secret byron/genesis-keys.00$((${N} - 1)).key > byron/genesis-address-00$((${N} - 1))
cardano-cli byron transaction issue-genesis-utxo-expenditure \
--genesis-json byron/genesis.json \
--testnet-magic ${NETWORK_MAGIC} \
--tx tx$((${N} - 1)).tx \
--wallet-key byron/delegate-keys.00$((${N} - 1)).key \
--rich-addr-from "$(head -n 1 byron/genesis-address-00$((${N} - 1)))" \
--txout "(\"$(head -n 1 byron/address-00$((${N} - 1)))\", $FUNDS_PER_BYRON_ADDRESS)"
done
# Update Proposal and votes
cardano-cli byron governance create-update-proposal \
--filepath update-proposal \
--testnet-magic "${NETWORK_MAGIC}" \
--signing-key byron/delegate-keys.000.key \
--protocol-version-major 1 \
--protocol-version-minor 0 \
--protocol-version-alt 0 \
--application-name "cardano-sl" \
--software-version-num 1 \
--system-tag "linux" \
--installer-hash 0
for N in ${BFT_NODES_N}; do
cardano-cli byron governance create-proposal-vote \
--proposal-filepath update-proposal \
--testnet-magic ${NETWORK_MAGIC} \
--signing-key byron/delegate-keys.00$((${N} - 1)).key \
--vote-yes \
--output-filepath update-vote.00$((${N} - 1))
done
cardano-cli byron governance create-update-proposal \
--filepath update-proposal-1 \
--testnet-magic ${NETWORK_MAGIC} \
--signing-key byron/delegate-keys.000.key \
--protocol-version-major 2 \
--protocol-version-minor 0 \
--protocol-version-alt 0 \
--application-name "cardano-sl" \
--software-version-num 1 \
--system-tag "linux" \
--installer-hash 0
for N in ${BFT_NODES_N}; do
cardano-cli byron governance create-proposal-vote \
--proposal-filepath update-proposal-1 \
--testnet-magic ${NETWORK_MAGIC} \
--signing-key byron/delegate-keys.00$((${N} - 1)).key \
--vote-yes \
--output-filepath update-vote-1.00$((${N} - 1))
done
echo "====================================================================="
echo "Generated genesis keys and genesis files:"
echo
ls -1 byron/*
echo "====================================================================="
# Set up our template
mkdir shelley
# Copy the QA testnet alonzo genesis which is equivalent to the mainnet
cp ../configuration/cardano/shelley_qa-alonzo-genesis.json shelley/genesis.alonzo.spec.json
cardano-cli genesis create --testnet-magic ${NETWORK_MAGIC} --genesis-dir shelley
# We're going to use really quick epochs (300 seconds), by using short slots 0.2s
# and K=10, but we'll keep long KES periods so we don't have to bother
# cycling KES keys
$SED -i shelley/genesis.spec.json \
-e 's/"slotLength": 1/"slotLength": 0.1/' \
-e 's/"activeSlotsCoeff": 5.0e-2/"activeSlotsCoeff": 0.1/' \
-e 's/"securityParam": 2160/"securityParam": 10/' \
-e 's/"epochLength": 432000/"epochLength": 500/' \
-e 's/"maxLovelaceSupply": 0/"maxLovelaceSupply": 1000000000000/' \
-e 's/"minFeeA": 1/"minFeeA": 44/' \
-e 's/"minFeeB": 0/"minFeeB": 155381/' \
-e 's/"minUTxOValue": 0/"minUTxOValue": 1000000/' \
-e 's/"decentralisationParam": 1.0/"decentralisationParam": 0.7/' \
-e 's/"major": 0/"major": 2/' \
-e 's/"rho": 0.0/"rho": 0.1/' \
-e 's/"tau": 0.0/"tau": 0.1/' \
-e 's/"updateQuorum": 5/"updateQuorum": 2/'
# Now generate for real:
cardano-cli genesis create \
--testnet-magic ${NETWORK_MAGIC} \
--genesis-dir shelley/ \
--gen-genesis-keys ${NUM_BFT_NODES} \
--gen-utxo-keys 1
cardano-cli stake-address key-gen \
--verification-key-file shelley/utxo-keys/utxo-stake.vkey \
--signing-key-file shelley/utxo-keys/utxo-stake.skey
cardano-cli address key-gen \
--verification-key-file shelley/utxo-keys/utxo2.vkey \
--signing-key-file shelley/utxo-keys/utxo2.skey
cardano-cli stake-address key-gen \
--verification-key-file shelley/utxo-keys/utxo2-stake.vkey \
--signing-key-file shelley/utxo-keys/utxo2-stake.skey
echo "====================================================================="
echo "Generated genesis keys and genesis files:"
echo
ls -1 shelley/*
echo "====================================================================="
echo "Generated shelley/genesis.json:"
echo
cat shelley/genesis.json
echo
echo "====================================================================="
# Make the pool operator cold keys
# This was done already for the BFT nodes as part of the genesis creation
for NODE in ${POOL_NODES}; do
cardano-cli node key-gen \
--cold-verification-key-file ${NODE}/shelley/operator.vkey \
--cold-signing-key-file ${NODE}/shelley/operator.skey \
--operational-certificate-issue-counter-file ${NODE}/shelley/operator.counter
cardano-cli node key-gen-VRF \
--verification-key-file ${NODE}/shelley/vrf.vkey \
--signing-key-file ${NODE}/shelley/vrf.skey
done
# Symlink the BFT operator keys from the genesis delegates, for uniformity
for N in ${BFT_NODES_N}; do
ln -s ../../shelley/delegate-keys/delegate${N}.skey node-bft${N}/shelley/operator.skey
ln -s ../../shelley/delegate-keys/delegate${N}.vkey node-bft${N}/shelley/operator.vkey
ln -s ../../shelley/delegate-keys/delegate${N}.counter node-bft${N}/shelley/operator.counter
ln -s ../../shelley/delegate-keys/delegate${N}.vrf.vkey node-bft${N}/shelley/vrf.vkey
ln -s ../../shelley/delegate-keys/delegate${N}.vrf.skey node-bft${N}/shelley/vrf.skey
done
# Make hot keys and for all nodes
for NODE in ${ALL_NODES}; do
cardano-cli node key-gen-KES \
--verification-key-file ${NODE}/shelley/kes.vkey \
--signing-key-file ${NODE}/shelley/kes.skey
cardano-cli node issue-op-cert \
--kes-period 0 \
--kes-verification-key-file ${NODE}/shelley/kes.vkey \
--cold-signing-key-file ${NODE}/shelley/operator.skey \
--operational-certificate-issue-counter-file ${NODE}/shelley/operator.counter \
--out-file ${NODE}/shelley/node.cert
done
echo "Generated node operator keys (cold, hot) and operational certs:"
echo
ls -1 ${ALL_NODES}
echo "====================================================================="
# Make some payment and stake addresses
# user1..n: will own all the funds in the system, we'll set this up from
# initial utxo the
# pool-owner1..n: will be the owner of the pools and we'll use their reward
# account for pool rewards
USER_ADDRS="user1"
POOL_ADDRS="pool-owner1"
ADDRS="${USER_ADDRS} ${POOL_ADDRS}"
mkdir addresses
for ADDR in ${ADDRS}; do
# Payment address keys
cardano-cli address key-gen \
--verification-key-file addresses/${ADDR}.vkey \
--signing-key-file addresses/${ADDR}.skey
# Stake address keys
cardano-cli stake-address key-gen \
--verification-key-file addresses/${ADDR}-stake.vkey \
--signing-key-file addresses/${ADDR}-stake.skey
# Payment addresses
cardano-cli address build \
--payment-verification-key-file addresses/${ADDR}.vkey \
--stake-verification-key-file addresses/${ADDR}-stake.vkey \
--testnet-magic ${NETWORK_MAGIC} \
--out-file addresses/${ADDR}.addr
# Stake addresses
cardano-cli stake-address build \
--stake-verification-key-file addresses/${ADDR}-stake.vkey \
--testnet-magic ${NETWORK_MAGIC} \
--out-file addresses/${ADDR}-stake.addr
# Stake addresses registration certs
cardano-cli stake-address registration-certificate \
--stake-verification-key-file addresses/${ADDR}-stake.vkey \
--out-file addresses/${ADDR}-stake.reg.cert
done
# user N will delegate to pool N
USER_POOL_N="1"
for N in ${USER_POOL_N}; do
# Stake address delegation certs
cardano-cli stake-address delegation-certificate \
--stake-verification-key-file addresses/user${N}-stake.vkey \
--cold-verification-key-file node-pool${N}/shelley/operator.vkey \
--out-file addresses/user${N}-stake.deleg.cert
ln -s ../addresses/pool-owner${N}-stake.vkey node-pool${N}/owner.vkey
ln -s ../addresses/pool-owner${N}-stake.skey node-pool${N}/owner.skey
done
echo "Generated payment address keys, stake address keys,"
echo "stake address registration certs, and stake address delegation certs"
echo
ls -1 addresses/
echo "====================================================================="
# Next is to make the stake pool registration cert
for NODE in ${POOL_NODES}; do
cardano-cli stake-pool registration-certificate \
--testnet-magic ${NETWORK_MAGIC} \
--pool-pledge 0 --pool-cost 0 --pool-margin 0 \
--cold-verification-key-file ${NODE}/shelley/operator.vkey \
--vrf-verification-key-file ${NODE}/shelley/vrf.vkey \
--reward-account-verification-key-file ${NODE}/owner.vkey \
--pool-owner-stake-verification-key-file ${NODE}/owner.vkey \
--out-file ${NODE}/registration.cert
done
echo "Generated stake pool registration certs:"
ls -1 node-*/registration.cert
echo "====================================================================="
echo "So you can now do various things:"
echo " * Start the nodes"
echo " * Initiate successive protocol updates"
echo " * Query the node's ledger state"
echo
echo "To start the nodes, in separate terminals use the following scripts:"
echo
mkdir -p run
for NODE in ${BFT_NODES}; do
(
echo "#!/usr/bin/env bash"
echo ""
echo "cardano-node run \\"
echo " --config ${ROOT}/configuration.yaml \\"
echo " --topology ${ROOT}/${NODE}/topology.json \\"
echo " --database-path ${ROOT}/${NODE}/db \\"
echo " --socket-path '$(sprocket "${ROOT}/${NODE}/node.sock")' \\"
echo " --shelley-kes-key ${ROOT}/${NODE}/shelley/kes.skey \\"
echo " --shelley-vrf-key ${ROOT}/${NODE}/shelley/vrf.skey \\"
echo " --shelley-operational-certificate ${ROOT}/${NODE}/shelley/node.cert \\"
echo " --port $(cat ${NODE}/port) \\"
echo " --delegation-certificate ${ROOT}/${NODE}/byron/delegate.cert \\"
echo " --signing-key ${ROOT}/${NODE}/byron/delegate.key \\"
echo " | tee -a ${ROOT}/${NODE}/node.log"
) > run/${NODE}.sh
chmod a+x run/${NODE}.sh
echo $ROOT/run/${NODE}.sh
done
for NODE in ${POOL_NODES}; do
(
echo "#!/usr/bin/env bash"
echo ""
echo "cardano-node run \\"
echo " --config ${ROOT}/configuration.yaml \\"
echo " --topology ${ROOT}/${NODE}/topology.json \\"
echo " --database-path ${ROOT}/${NODE}/db \\"
echo " --socket-path '$(sprocket "${ROOT}/${NODE}/node.sock")' \\"
echo " --shelley-kes-key ${ROOT}/${NODE}/shelley/kes.skey \\"
echo " --shelley-vrf-key ${ROOT}/${NODE}/shelley/vrf.skey \\"
echo " --shelley-operational-certificate ${ROOT}/${NODE}/shelley/node.cert \\"
echo " --port $(cat ${NODE}/port) \\"
echo " | tee -a ${ROOT}/${NODE}/node.log"
) > run/${NODE}.sh
chmod a+x run/${NODE}.sh
echo $ROOT/run/${NODE}.sh
done
echo "#!/usr/bin/env bash" > run/all.sh
echo "" >> run/all.sh
chmod a+x run/all.sh
for NODE in ${BFT_NODES}; do
echo "$ROOT/run/${NODE}.sh &" >> run/all.sh
done
for NODE in ${POOL_NODES}; do
echo "$ROOT/run/${NODE}.sh &" >> run/all.sh
done
echo "" >> run/all.sh
echo "wait" >> run/all.sh
chmod a+x run/all.sh
echo
echo "Alternatively, you can run all the nodes in one go:"
echo
echo "$ROOT/run/all.sh"
echo
echo "In order to do the protocol updates, proceed as follows:"
echo
echo " 0. invoke ./scripts/byron-to-alonzo/mkfiles.sh"
echo " 1. wait for the nodes to start producing blocks"
echo " 2. invoke ./scripts/byron-to-alonzo/update-1.sh <N>"
echo " if you are early enough in the epoch N = current epoch"
echo " if not N = current epoch + 1. This applies for all update proposals"
echo " wait for the next epoch for the update to take effect"
echo
echo " 3. invoke ./scripts/byron-to-alonzo/update-2.sh"
echo " 4. restart the nodes"
echo " wait for the next epoch for the update to take effect"
echo " you should be in the Shelley era if the update was successful"
echo
echo " 5. invoke ./scripts/byron-to-alonzo/update-3.sh <N>"
echo " Here, <N> the current epoch (2 if you're quick)."
echo " If you provide the wrong epoch, you will see an error"
echo " that will tell you the current epoch, and can run"
echo " the script again."
echo " 6. restart the nodes"
echo " wait for the next epoch for the update to take effect"
echo " you should be in the Allegra era if the update was successful"
echo " 7. invoke ./scripts/byron-to-alonzo/update-4.sh <N>"
echo " 8. restart the nodes"
echo " wait for the next epoch for the update to take effect"
echo " you should be in the Mary era if the update was successful"
echo " 9. invoke ./scripts/byron-to-alonzo/update-5.sh <N>"
echo " wait for the next epoch for the update to take effect"
echo " you should be in the Alonzo era if the update was successful"
echo
echo "You can observe the status of the updates by grepping the logs, via"
echo
echo " grep LedgerUpdate ${ROOT}/node-pool1/node.log"
echo
echo "When in Shelley (after 3, and before 4), you should be able "
echo "to look at the protocol parameters, or the ledger state, "
echo "using commands like"
echo
echo "CARDANO_NODE_SOCKET_PATH=${ROOT}/node-bft1/node.sock \\"
echo " cardano-cli query protocol-parameters \\"
echo " --cardano-mode --testnet-magic 42"
echo
echo "This will fail outside of the Shelley era. In particular, "
echo "after step 3, you will get an error message that tells you "
echo "that you are in the Allegra era. You must then use the --allegra-era flag:"
echo
echo "CARDANO_NODE_SOCKET_PATH=${ROOT}/node-bft1/node.sock \\"
echo " cardano-cli query protocol-parameters \\"
echo " --cardano-mode --allegra-era --testnet-magic 42"
echo
echo "Similarly, use --mary-era in the Mary era."
popd
# For an automatic transition at epoch 0, specifying mary, allegra or shelley
# will start the node in the appropriate era.
echo ""
# These are needed for cardano-submit-api
echo "EnableLogMetrics: False" >> ${ROOT}/configuration.yaml
echo "EnableLogging: True" >> ${ROOT}/configuration.yaml
if [ "$1" = "alonzo" ]; then
echo "TestShelleyHardForkAtEpoch: 0" >> ${ROOT}/configuration.yaml
echo "TestAllegraHardForkAtEpoch: 0" >> ${ROOT}/configuration.yaml
echo "TestMaryHardForkAtEpoch: 0" >> ${ROOT}/configuration.yaml
echo "TestAlonzoHardForkAtEpoch: 0" >> ${ROOT}/configuration.yaml
echo "TestEnableDevelopmentHardForkEras: True" >> ${ROOT}/configuration.yaml
echo "TestEnableDevelopmentNetworkProtocols: True" >> ${ROOT}/configuration.yaml
$SED -i ${ROOT}/configuration.yaml \
-e 's/LastKnownBlockVersion-Major: 1/LastKnownBlockVersion-Major: 5/'
# Copy the cost model
echo "Nodes will start in Alonzo era from epoch 0"
elif [ "$1" = "mary" ]; then
echo "TestShelleyHardForkAtEpoch: 0" >> ${ROOT}/configuration.yaml
echo "TestAllegraHardForkAtEpoch: 0" >> ${ROOT}/configuration.yaml
echo "TestMaryHardForkAtEpoch: 0" >> ${ROOT}/configuration.yaml
$SED -i ${ROOT}/configuration.yaml \
-e 's/LastKnownBlockVersion-Major: 1/LastKnownBlockVersion-Major: 4/'
echo "Nodes will start in Mary era from epoch 0"
elif [ "$1" = "allegra" ]; then
echo "TestShelleyHardForkAtEpoch: 0" >> ${ROOT}/configuration.yaml
echo "TestAllegraHardForkAtEpoch: 0" >> ${ROOT}/configuration.yaml
$SED -i ${ROOT}/configuration.yaml \
-e 's/LastKnownBlockVersion-Major: 1/LastKnownBlockVersion-Major: 3/'
echo "Nodes will start in Allegra era from epoch 0"
elif [ "$1" = "shelley" ]; then
echo "TestShelleyHardForkAtEpoch: 0" >> ${ROOT}/configuration.yaml
$SED -i ${ROOT}/configuration.yaml \
-e 's/LastKnownBlockVersion-Major: 1/LastKnownBlockVersion-Major: 2/'
echo "Nodes will start in Shelley era from epoch 0"
else
echo "Default yaml configuration applied."
fi
|
import './main.css';
const todoList = [{
description: 'wash the dishes',
completed: false,
id: 1,
},
{
description: 'complete the dishes',
completed: false,
id: 2,
},
{
description: 'complete To Do list project',
completed: false,
id: 3,
},
];
const card = document.querySelector('.cardlist_main');
card.innerHTML += `<li class="cardlist--sub main">Today's To Do</li>
<li ><input type="text" class="cardlist--sub input" placeholder="Add to your list..."></li>`;
todoList.forEach(({ description }) => {
const inputElement = document.createElement('input');
inputElement.setAttribute('type', 'name');
card.innerHTML += `
<li class="cardlist--sub"><input type="checkbox" name="" class="cardlist--check" id="check">${description}</li>`;
});
card.innerHTML += '<li class="cardlist--sub delete"><a href="#" class="delete_text">clear all completed</a></li>';
|
import { Component, OnDestroy } from '@angular/core';
import { LeagueService } from '../../../services';
import { League } from '../../../models/league.model';
import { Observable } from 'rxjs/Rx';
@Component(
{
template: `
<h1>Leagues</h1>
<table class="table">
<thead>
<tr>
<th>Name</th>
</tr>
</thead>
<tbody>
<tr *ngFor="let league of leagues$ | async">
<td><a routerLink="./{{league._id}}"
routerLinkActive="active" [routerLinkActiveOptions]= "{exact: true}">
{{ league.name }}
</a></td>
</tr>
</tbody>
</table>
`,
selector: '<league-list></league-list>'
}
)
export class LeagueListComponent {
private leagues$: Observable<Array<League>>;
constructor(private leagueService: LeagueService){
this.leagues$ = this.leagueService.getAll();
}
}
|
<filename>TextureTangram/Core/Build-in Layouts/Waterflow Layout/TangramWaterFlowLayoutComponent.h
// Copyright ZZinKin
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
#import "TangramLayoutComponent.h"
@interface TangramWaterFlowLayoutComponent : TangramLayoutComponent
@property (nonatomic) NSUInteger maximumColumn; ///< 最大列数,默认1
@property (nonatomic) CGFloat verticalInterItemsSpace; ///< 垂直方向每个组件的间距;vGap
@property (nonatomic) CGFloat horizontalInterItemsSpace; ///< 垂直方向每个组件的距离; hGap
@end
|
<gh_stars>0
import React, { useEffect } from 'react';
import { useDispatch, useSelector } from 'react-redux';
import { Link, useParams } from 'react-router-dom';
import { articleActions } from '../slices/articleSlice';
function ArticleList() {
const params = useParams();
const { articleList, status, statusText } = useSelector((state) => state.articleReducer);
const boardList = useSelector((state) => state.boardReducer.boardList);
const dispatch = useDispatch();
useEffect(() => {
dispatch(articleActions.getArticleList(params?.boardId ?? 0));
}, [dispatch, params?.boardId]);
return (
<>
{
status === 200 ?
<>
<div>
<span>게시판: </span>
<span>
{
boardList.length > 0 &&
boardList.find((board) => board.id === parseInt(params?.boardId))?.name
}
</span>
</div>
{articleList.length > 0 ?
<div>
<div>
{
articleList.map((article, index) =>
<div key={article?.id ?? index}>
<Link to={{ pathname: `/article/${article?.id ?? 0}` }}>
<span>{article?.title ?? ""}</span>
</Link>
</div>
)
}
</div>
</div>
:
<div> 게시글이 없습니다. </div>
}
</>
:
<div>
<div>
<span>{status}</span>
</div>
<div>
<span>{statusText}</span>
</div>
</div>
}
</>
);
}
export default ArticleList;
|
def interpret_input(user_input):
if user_input == 'hello':
print('Hi there!')
elif user_input == 'what is the weather like outside?'
print('It is sunny and warm!')
else:
print('I am not sure how to answer that.') |
var searchData=
[
['abs_5fdifference',['abs_difference',['../namespacebenchmark.html#a7b82fd607dd2ed83b95f08f32312b3cd',1,'benchmark']]]
];
|
<gh_stars>0
package edu.unitn.pbam.androidproject.model.dao.db;
import static edu.unitn.pbam.androidproject.model.dao.db.DaoDbUtils.DC_CAT_ID;
import static edu.unitn.pbam.androidproject.model.dao.db.DaoDbUtils.DC_DOC_ID;
import static edu.unitn.pbam.androidproject.model.dao.db.DaoDbUtils.DD_DLIST_ID;
import static edu.unitn.pbam.androidproject.model.dao.db.DaoDbUtils.DD_DOC_ID;
import static edu.unitn.pbam.androidproject.model.dao.db.DaoDbUtils.DOCS_CATS_TABLE;
import static edu.unitn.pbam.androidproject.model.dao.db.DaoDbUtils.DOCS_DLISTS_TABLE;
import static edu.unitn.pbam.androidproject.model.dao.db.DaoDbUtils.RATING_COL;
import static edu.unitn.pbam.androidproject.model.dao.db.DaoDbUtils.SYNC_COL;
import static edu.unitn.pbam.androidproject.model.dao.db.DaoDbUtils.TITLE_COL;
import static edu.unitn.pbam.androidproject.model.dao.db.DaoDbUtils.generateContentValuesDoc;
import static edu.unitn.pbam.androidproject.model.dao.db.DaoDbUtils.generateDocCatCV;
import static edu.unitn.pbam.androidproject.model.dao.db.DaoDbUtils.generateDocListCV;
import java.util.ArrayList;
import java.util.List;
import android.content.ContentValues;
import android.database.Cursor;
import android.database.DatabaseUtils;
import android.database.sqlite.SQLiteQueryBuilder;
import edu.unitn.pbam.androidproject.model.Book;
import edu.unitn.pbam.androidproject.model.Category;
import edu.unitn.pbam.androidproject.model.DList;
import edu.unitn.pbam.androidproject.model.Document;
import edu.unitn.pbam.androidproject.model.dao.BookDao;
import edu.unitn.pbam.androidproject.utilities.App;
import edu.unitn.pbam.androidproject.utilities.Constants;
public class BookDaoDb extends ModelDaoDb implements BookDao {
public static final String BOOKS_TABLE = "book";
public static final String DOC_TABLE = "document";
public static final String DOC_BOOK_TABLE = "doc_book";
public static final String BOOKS_TO_READ_TABLE = "books_to_read";
public static final String BOOKS_READ_TABLE = "books_read";
public static final String ID_COL = "_id";
public static final String AUTH_COL = "author";
public static final String PUBL_COL = "publishing";
public static final String LANG_COL = "language";
public static final String PAGES_COL = "pages";
@Override
public Book getById(long id) {
/*
* Book b1; String chars = "abcdefghijklnopqrstuvwxyz"; Random r = new
* Random(); char text[] = new char[8]; List<Category> cats = new
* ArrayList<Category>(); cats.add(App.cDao.getById(1)); List<DList>
* lists = new ArrayList<DList>(); lists.add(App.dlDao.getById(1)); for
* (int i=0; i<5000; i++) { b1 = new Book(); b1.setCode(i+"up"); for
* (int j=0; j<8; j++) { text[j] =
* chars.charAt(r.nextInt(chars.length())); } b1.setAuthor("aaa" + new
* String(text)); b1.setTitle("Asdq"+i); b1.setCategories(cats);
* b1.setLists(lists); App.bDao.save(b1); }
*/
try {
db.beginTransaction();
String[] args = { id + "" };
Cursor c = db.query(DOC_BOOK_TABLE, null, ID_COL + "=?", args,
null, null, null);
c.moveToFirst();
Book b = generateBook(c);
updateCategories(b);
updateDLists(b);
c.close();
db.setTransactionSuccessful();
return b;
} finally {
db.endTransaction();
}
}
@Override
public Cursor getAll() {
Cursor c = db.query(DOC_BOOK_TABLE, null, null, null, null, null,
TITLE_COL);
return c;
}
@Override
public long save(Book element) {
if (element.getTitle()!=null)
element.setTitle(element.getTitle().trim());
if (element.getAuthor()!=null && element.getAuthor().length() == 0)
element.setAuthor(null);
ContentValues valuesDoc = generateContentValuesDoc(element);
ContentValues valuesBook = generateContentValuesBook(element);
long book_id = 0;
try {
db.beginTransaction();
// nuovo libro non ancora presente nel db
if (element.getId() == 0) {
book_id = db.insert(DOC_TABLE, null, valuesDoc);
element.setId(book_id);
valuesBook.put(ID_COL, book_id);
db.insert(BOOKS_TABLE, null, valuesBook);
} else {
String[] args = { element.getId() + "" };
db.update(DOC_TABLE, valuesDoc, ID_COL + "=?", args);
db.update(BOOKS_TABLE, valuesBook, ID_COL + "=?", args);
book_id = element.getId();
// elimino tutte le precedenti associazioni libro/categoria
db.delete(DOCS_CATS_TABLE, DC_DOC_ID + "=?", args);
db.delete(DOCS_DLISTS_TABLE, DD_DOC_ID + "=?", args);
}
// creo tutte le associazioni libro/categoria
for (Category c : element.getCategories()) {
// se la categoria è nuova la salvo nel db
if (c.getId() == 0) {
App.cDao.save(c);
}
ContentValues v = generateDocCatCV(element, c);
db.insert(DOCS_CATS_TABLE, null, v);
}
for (DList l : element.getLists()) {
ContentValues v = generateDocListCV(element, l);
db.insert(DOCS_DLISTS_TABLE, null, v);
}
db.setTransactionSuccessful();
App.getAppContext().getContentResolver()
.notifyChange(Constants.URI_BOOKS, null);
CoverDaoDb.saveCover(element);
return book_id;
} finally {
db.endTransaction();
}
}
@Override
public void delete(Book element) {
CoverDaoDb.deleteCover(element.getCover());
String[] args = { element.getId() + "" };
db.delete(DOC_TABLE, ID_COL + "=?", args);
App.getAppContext().getContentResolver()
.notifyChange(Constants.URI_BOOKS, null);
}
@Override
public Cursor getByCategory(long catId) {
SQLiteQueryBuilder sb = new SQLiteQueryBuilder();
sb.setTables(DOCS_CATS_TABLE + " \"b_c\" JOIN " + DOC_BOOK_TABLE
+ " \"books\" ON b_c." + DC_DOC_ID + "=books." + ID_COL);
String where = "b_c." + DC_CAT_ID + "=?";
String[] whereArgs = { "" + catId };
String query = sb.buildQuery(null, where, null, null, TITLE_COL, null);
Cursor c = db.rawQuery(query, whereArgs);
return c;
}
@Override
public Cursor getByDList(long listId) {
SQLiteQueryBuilder sb = new SQLiteQueryBuilder();
sb.setTables(DOCS_DLISTS_TABLE + " \"b_d\" JOIN " + DOC_BOOK_TABLE
+ " \"books\" ON b_d." + DD_DOC_ID + "=books." + ID_COL);
String where = "b_d." + DD_DLIST_ID + "=?";
String[] whereArgs = { "" + listId };
String query = sb.buildQuery(null, where, null, null, TITLE_COL, null);
Cursor c = db.rawQuery(query, whereArgs);
return c;
}
@Override
public Cursor getByAuthor(String author) {
String where = AUTH_COL + "=?";
String[] whereArgs = { author };
Cursor c = db.query(DOC_BOOK_TABLE, null, where, whereArgs, null, null,
TITLE_COL);
return c;
}
@Override
public Cursor getByRating(int rating) {
double lowerBound = (rating - 1) * 10;
double upperBound = rating * 10;
String where = RATING_COL + " > ? and " + RATING_COL + " <= ?";
String[] args = { "" + lowerBound, "" + upperBound };
Cursor c = db.query(DOC_BOOK_TABLE, null, where, args, null, null,
TITLE_COL);
return c;
}
@Override
public Cursor getNotSync() {
String where = SYNC_COL + "= ?";
String[] args = { Document.SyncType.NOSYNC.ordinal() + "" };
Cursor c = db.query(DOC_BOOK_TABLE, null, where, args, null, null,
TITLE_COL);
return c;
}
@Override
public Cursor getAllAuthors() {
// Cursor c = db.rawQuery("select " + AUTH_COL + ", min(" + ID_COL +
// ") as _id from " + BOOKS_TABLE + " group by " + AUTH_COL +
// " order by " + AUTH_COL, null);
String[] cols = { AUTH_COL, "min(" + ID_COL + ") as _id" };
Cursor c = db.query(BOOKS_TABLE, cols, null, null, AUTH_COL, null,
AUTH_COL);
return c;
}
@Override
public Cursor getAuthorsMatching(String pattern) {
String[] cols = { AUTH_COL, "min(" + ID_COL + ") as _id" };
// poichè la condizione è verificata all'interno della clausola
// having, non posso usare i placeholder e devo effettuare
// manualmente l'escape della stringa
pattern = DatabaseUtils.sqlEscapeString("%" + pattern + "%");
String query = SQLiteQueryBuilder.buildQueryString(false, BOOKS_TABLE,
cols, null, AUTH_COL, AUTH_COL + " like " + pattern, AUTH_COL,
null);
Cursor c = db.rawQuery(query, null);
// Cursor c = db.rawQuery("select " + AUTH_COL + ", min(" + ID_COL +
// ") as _id from " + BOOKS_TABLE + " group by " + AUTH_COL +
// " having " + AUTH_COL + " like '" + pattern + "%' order by " +
// AUTH_COL, null);
return c;
}
@Override
public Cursor getFiltered(String pattern) {
String where = AUTH_COL + " LIKE ? or " + TITLE_COL + " LIKE ?";
pattern = "%" + pattern + "%";
String[] whereArgs = { pattern, pattern };
Cursor c = db.query(DOC_BOOK_TABLE, null, where, whereArgs, null, null,
TITLE_COL);
return c;
}
@Override
public int getNumberOfBooks() {
int ris = 0;
String[] cols = { "count(*)" };
Cursor c = db.query(DOC_BOOK_TABLE, cols, null, null, null, null, null);
c.moveToFirst();
if (!c.isAfterLast()) {
ris = c.getInt(0);
}
c.close();
return ris;
}
@Override
public int getToReadBooks() {
int ris = 0;
Cursor c = db.query(BOOKS_TO_READ_TABLE, null, null, null, null, null,
null);
c.moveToFirst();
if (!c.isAfterLast()) {
ris = c.getInt(0);
}
c.close();
return ris;
}
@Override
public int getReadBooks() {
int ris = 0;
Cursor c = db.query(BOOKS_READ_TABLE, null, null, null, null, null,
null);
c.moveToFirst();
if (!c.isAfterLast()) {
ris = c.getInt(0);
}
c.close();
return ris;
}
@Override
public double getAverageRating() {
double ris = 0;
String[] cols = { "avg(rating)" };
Cursor c = db.query(DOC_BOOK_TABLE, cols, null, null, null, null, null);
c.moveToFirst();
if (!c.isAfterLast()) {
ris = c.getDouble(0);
}
c.close();
return ris;
}
public static Book generateBook(Cursor c) {
Book book = new Book();
DaoDbUtils.updateDocument(c, book);
book.setAuthor(c.getString(c.getColumnIndex(AUTH_COL)));
book.setPublishing(c.getString(c.getColumnIndex(PUBL_COL)));
book.setLanguage(c.getString(c.getColumnIndex(LANG_COL)));
book.setPages(c.getInt(c.getColumnIndex(PAGES_COL)));
return book;
}
private List<Category> getCategoriesByBookId(long id) {
SQLiteQueryBuilder sb = new SQLiteQueryBuilder();
sb.setTables(DOCS_CATS_TABLE + " \"b_c\" JOIN "
+ CategoryDaoDb.CATEGORY_TABLE + " \"cat\" ON b_c." + DC_CAT_ID
+ "=cat." + CategoryDaoDb.ID_COL);
String[] columns = { "cat.*" };
String where = "b_c." + DC_DOC_ID + "=?";
String[] whereArgs = { "" + id };
String query = sb.buildQuery(columns, where, null, null, null, null);
Cursor c = db.rawQuery(query, whereArgs);
List<Category> cats = new ArrayList<Category>();
Category cat;
c.moveToFirst();
while (!c.isAfterLast()) {
cat = CategoryDaoDb.generateCategory(c);
cats.add(cat);
c.moveToNext();
}
return cats;
}
private List<DList> getDListsByBookId(long id) {
SQLiteQueryBuilder sb = new SQLiteQueryBuilder();
sb.setTables(DOCS_DLISTS_TABLE + " \"b_d\" JOIN "
+ DListDaoDb.DLIST_TABLE + " \"list\" ON b_d." + DD_DLIST_ID
+ "=list." + DListDaoDb.ID_COL);
String[] columns = { "list.*" };
String where = "b_d." + DD_DOC_ID + "=?";
String[] whereArgs = { "" + id };
String query = sb.buildQuery(columns, where, null, null, null, null);
Cursor c = db.rawQuery(query, whereArgs);
List<DList> lists = new ArrayList<DList>();
DList lst;
c.moveToFirst();
while (!c.isAfterLast()) {
lst = DListDaoDb.generateDList(c);
lists.add(lst);
c.moveToNext();
}
return lists;
}
private void updateCategories(Book b) {
List<Category> cats = getCategoriesByBookId(b.getId());
b.setCategories(cats);
}
private void updateDLists(Book b) {
List<DList> lists = getDListsByBookId(b.getId());
b.setLists(lists);
}
private ContentValues generateContentValuesBook(Book element) {
ContentValues values = new ContentValues();
if (element.getAuthor() != null && element.getAuthor().equals(""))
element.setAuthor(null);
values.put(AUTH_COL, element.getAuthor());
values.put(PUBL_COL, element.getPublishing());
values.put(LANG_COL, element.getLanguage());
values.put(PAGES_COL, element.getPages());
return values;
}
}
|
package com.netcracker.ncstore.exception;
/**
* Used when something is not found in price conversion service
*/
public class PriceConversionServiceNotFoundException extends RuntimeException {
public PriceConversionServiceNotFoundException() {
}
public PriceConversionServiceNotFoundException(String message) {
super(message);
}
public PriceConversionServiceNotFoundException(String message, Throwable cause) {
super(message, cause);
}
}
|
<filename>app/src/main/java/com/github/ayltai/hknews/util/Irrelevant.java
package com.github.ayltai.hknews.util;
public enum Irrelevant {
INSTANCE
}
|
const path = require('path');
const webpack = require('webpack');
const RemovePlugin = require('remove-files-webpack-plugin');
const CopyWebpackPlugin = require('copy-webpack-plugin');
const HtmlWebpackPlugin = require('html-webpack-plugin');
const MiniCssExtractPlugin = require('mini-css-extract-plugin');
const { BundleAnalyzerPlugin } = require('webpack-bundle-analyzer');
const devMode = process.env.NODE_ENV !== 'production';
const analyze = process.env.npm_config_analyze;
const inlineStyle = process.env.npm_config_inline_style;
const plugins = [
new webpack.ProvidePlugin({
$: 'jquery',
jQuery: 'jquery',
Popper: 'popper.js'
}),
new RemovePlugin({
before: {
include: [path.resolve(__dirname, 'dist')]
}
}),
new CopyWebpackPlugin({
patterns: [
{
from: path.resolve(__dirname, 'config.local.js'),
to: path.resolve(__dirname, 'dist', 'config.js')
},
{
from: path.resolve(__dirname, 'assets'),
to: path.resolve(__dirname, 'dist', 'assets')
}
]
}),
new HtmlWebpackPlugin({
filename: path.resolve(__dirname, 'dist', 'index.html'),
template: path.resolve(__dirname, 'index.ejs'),
meta: {
viewport:
'width=device-width, minimum-scale=1, initial-scale=1, shrink-to-fit=no'
},
title: 'React Router Redux Todo List',
favicon: path.resolve(__dirname, 'assets', 'favicon.ico')
})
];
if (!inlineStyle) {
plugins.push(
new MiniCssExtractPlugin({
filename: '[name].[contenthash:8].css',
chunkFilename: '[id].[contenthash:8].css'
})
);
}
if (analyze) {
plugins.push(new BundleAnalyzerPlugin());
}
module.exports = {
entry: {
app: './src/index.jsx'
},
output: {
path: path.resolve(__dirname, 'dist', 'assets', 'bundle'),
filename: '[name].[contenthash:8].js',
chunkFilename: '[name].[contenthash:8].chunk.js',
publicPath: '/assets/bundle/',
libraryTarget: 'var',
library: '[name]',
sourceMapFilename: '[file].map',
crossOriginLoading: 'anonymous'
},
module: {
rules: [
{
test: /\.js(x)?$/,
loader: 'babel-loader'
},
{
test: /\.(sa|sc|c)ss$/,
use: [
inlineStyle ? 'style-loader' : MiniCssExtractPlugin.loader,
{
loader: 'css-loader',
options: { sourceMap: devMode }
},
'postcss-loader',
{ loader: 'resolve-url-loader', options: { sourceMap: devMode } },
{ loader: 'sass-loader', options: { sourceMap: true } }
]
},
{
test: /\.(woff(2)?|ttf|eot|svg)(\?v=\d+\.\d+\.\d+)?$/,
use: [
{
loader: 'file-loader',
options: {
name: '[name].[ext]',
outputPath: 'fonts/'
}
}
]
}
]
},
resolve: {
extensions: ['.js', '.jsx', '.json', '.css', '.scss'],
alias: {
lib: path.resolve(__dirname, 'lib'),
src: path.resolve(__dirname, 'src'),
common: path.resolve(__dirname, 'src', 'common'),
'error-404': path.resolve(__dirname, 'src', 'error-404'),
list: path.resolve(__dirname, 'src', 'list'),
new: path.resolve(__dirname, 'src', 'new'),
mocks: path.resolve(__dirname, '__tests__', '__mocks__')
}
},
externals: {
config: 'window.appGlob.config'
},
watchOptions: {
ignored: /node_modules/
},
devServer: {
contentBase: path.join(__dirname, 'dist'),
disableHostCheck: true,
writeToDisk: true,
compress: true,
historyApiFallback: {}
},
context: __dirname,
target: 'web',
plugins
};
|
def sum_list(arr):
s = 0
for num in arr:
s += num
return s
result = sum_list([2, 8, 10, -3])
print(result) |
def extract_product_language(response, num):
language = response.xpath('//table[@id="vulnprodstable"]/tr')[num].xpath('./td')[7].xpath('./text()').get().strip()
return language |
#!/bin/bash -e
npm install
MY_DIR=$(cd $(dirname $BASH_SOURCE); pwd)
cd $MY_DIR
[ -e defs_ts ] || git clone https://github.com/borisyankov/DefinitelyTyped.git defs_ts
cd defs_ts
git pull
[ -e defs/revision.txt ] && git checkout $(cat defs/revision.txt)
REVISION=$(git rev-parse HEAD)
cd $MY_DIR
mkdir -p defs
cd defs
echo $REVISION > revision.txt
cp ../defs_ts/LICENSE .
if ! which parallel &> /dev/null; then
echo "Please install GNU parallel" >&2
return 1
fi
ls ../defs_ts | parallel '
DIR=../defs_ts/{1}
find_main_file() {
local DIR=$1
local NAME=$2
try() {
if [ -e $1 ]; then
echo "$1"
else
return 1
fi
}
try $DIR/$DIR.d.ts ||
try $DIR/angular.d.ts ||
try `ls $DIR/*.d.ts` ||
echo "$NAME.json - could not find main signature file :("
}
if [ ! -d $DIR ]; then
exit
fi
NAME=$(basename $DIR)
MAIN=$(find_main_file $DIR $NAME)
if [ "$MAIN" ]; then
bash '$MY_DIR'/from_ts.sh $MAIN $NAME.json
fi
for OTHER in $DIR/*.d.ts; do
if [ "$OTHER" == "$MAIN" ]; then
continue
fi
bash -e '$MY_DIR'/from_ts.sh $OTHER _$(basename $DIR)_$(basename $OTHER .d.ts).json
done
'
rm -f defs/yosay.json
{
echo '{"defs":{'
for D in $(cd $MY_DIR/defs; ls); do
if [[ "$D" =~ ^_ ]]; then
continue
fi
echo -n '"'${D%.json}'": {'
echo -n '"main": "'$D'",'
PROJECT=$(grep -s 'Project: http' $MY_DIR/defs_ts/${D%.json}/${D%.json}.d.ts | head -1 | grep -o 'http:.*' | tr -d '\r' || :)
if [ "$PROJECT" ]; then
echo "PROJECT: $PROJECT" >&2
echo -n '"url": "'$PROJECT'",'
fi
echo -n '"extra": {'
for E in $(cd $MY_DIR/defs; ls _${D%.json}* 2>/dev/null); do
echo -n '"'${E%.json}'": "'$E'",'
done
echo -n '}},'
done
echo '}}'
} | sed 's/"extra": {}//g; s/, *}/}/g' > $MY_DIR/defs/__list.tmp
mv $MY_DIR/defs/__list.tmp $MY_DIR/defs/__list.json
|
import { Injectable } from "@angular/core";
import { Http,Response } from "@angular/http";
import { Observable } from "rxjs/Observable";
import { MyNewInterface } from "../Interfaces/my-new-interface";
import { PjpNewInterface } from "../Interfaces/pjp-new-interface";
import { PjpWiseInterface } from "../Interfaces/pjp-wise-interface";
import 'rxjs/add/operator/map';
@Injectable()
export class ApiService {
// Build API
private postsURL = '';
private postsURL_S = '';
constructor(private http: Http) {}
getPosts(fromDate: String, lastDate: String):Observable<MyNewInterface[]>{
console.log(this.postsURL + fromDate + "/" + lastDate);
return this.http.get(this.postsURL + fromDate + "/" + lastDate)
.map((response: Response)=>{
return <MyNewInterface[]>response.json().tblData;
});
}
getPJPData(fromDate: String, lastDate: String): Observable<PjpNewInterface[]> {
console.log(this.postsURL_S + fromDate + '/' + lastDate);
return this.http.get(this.postsURL_S + fromDate + '/' + lastDate)
.map((response: Response) => {
return <PjpNewInterface[]>response.json().tblData;
});
}
}
|
#!/bin/bash
# Generate test coverage statistics for Go packages.
#
# Works around the fact that `go test -coverprofile` currently does not work
# with multiple packages, see https://code.google.com/p/go/issues/detail?id=6909
#
# Usage: hack/test
#
# --html Additionally create HTML report and open it in browser
#
set -e
set -o pipefail
[ -z "$COVER" ] && COVER=.cover
profile="$COVER/cover.out"
mode=atomic
OS=$(uname)
race_flag="-race"
if [ "$OS" = "Linux" ]; then
# check Alpine - alpine does not support race test
if [ -f "/etc/alpine-release" ]; then
race_flag=""
fi
fi
generate_cover_data() {
[ -d "${COVER}" ] && rm -rf "${COVER:?}/*"
[ -d "${COVER}" ] || mkdir -p "${COVER}"
# Save current IFS
SAVEIFS=$IFS
# Change IFS to new line.
IFS=$'\n'
pkgs=($(go list -f '{{if .TestGoFiles}}{{ .ImportPath }}{{end}}' ./... | grep -v vendor))
# Restore IFS
# Restore IFS
IFS=$SAVEIFS
for pkg in "${pkgs[@]}"; do
f="${COVER}/$(echo $pkg | tr / -).cover"
tout="${COVER}/$(echo $pkg | tr / -)_tests.out"
go test -v $race_flag -covermode="$mode" -coverprofile="$f" "$pkg" | tee "$tout"
done
echo "mode: $mode" >"$profile"
grep -h -v "^mode:" "${COVER}"/*.cover >>"$profile"
}
generate_cover_report() {
go tool cover -${1}="$profile" -o "${COVER}/coverage.html"
}
generate_cover_data
generate_cover_report html
|
Ext.define('KitchenSink.view.Navigation', {
extend: 'Ext.tree.Panel',
xtype: 'navigation',
title: 'Examples',
rootVisible: false,
lines: false,
useArrows: true,
root: {
expanded: true,
children: [
{
text: 'Panels',
expanded: true,
children: [
{ id: 'basic-panels', text: 'Basic Panel', leaf: true },
{ id: 'framed-panels', text: 'Framed Panel', leaf: true }
]
},
{
text: 'Grids',
expanded: true,
children: [
{ id: 'array-grid', text: 'Array Grid', leaf: true },
{ id: 'grouped-grid', text: 'Grouped Grid', leaf: true },
{ id: 'locking-grid', text: 'Locking Grid', leaf: true },
{ id: 'grouped-header-grid', text: 'Grouped Header Grid', leaf: true },
{ id: 'multi-sort-grid', text: 'Multiple Sort Grid', leaf: true },
{ id: 'progress-bar-pager', text: 'Progress Bar Pager', leaf: true },
{ id: 'sliding-pager', text: 'Sliding Pager', leaf: true },
{ id: 'reconfigure-grid', text: 'Reconfigure Grid', leaf: true },
{ id: 'property-grid', text: 'Property Grid', leaf: true },
{ id: 'cell-editing', text: 'Cell Editing', leaf: true },
{ id: 'row-expander-grid', text: 'Row Expander', leaf: true },
{ id: 'big-data-grid', text: 'Big Data', leaf: true }
]
},
{
text: 'Trees',
expanded: true,
children: [
{ id: 'basic-trees', text: 'Basic Trees', leaf: true },
{ id: 'tree-reorder', text: 'Tree Reorder', leaf: true },
{ id: 'tree-grid', text: 'Tree Grid', leaf: true },
{ id: 'tree-two', text: 'Two Trees', leaf: true },
{ id: 'check-tree', text: 'Check Tree', leaf: true },
{ id: 'tree-xml', text: 'XML Tree', leaf: true }
]
},
{
text: 'Tabs',
expanded: true,
children: [
{ id: 'basic-tabs', text: 'Basic Tabs', leaf: true },
{ id: 'plain-tabs', text: 'Plain Tabs', leaf: true },
{ id: 'framed-tabs', text: 'Framed Tabs', leaf: true },
{ id: 'icon-tabs', text: 'Icon Tabs', leaf: true }
]
},
{
text: 'Windows',
expanded: true,
children: [
{ id: 'basic-window', text: 'Basic Window', leaf: true }
]
},
{
text: 'Buttons',
expanded: true,
children: [
{ id: 'basic-buttons', text: 'Basic Buttons', leaf: true },
{ id: 'toggle-buttons', text: 'Toggle Buttons', leaf: true },
{ id: 'menu-buttons', text: 'Menu Buttons', leaf: true },
{ id: 'menu-bottom-buttons', text: 'Menu Bottom Buttons', leaf: true },
{ id: 'split-buttons', text: 'Split Buttons', leaf: true },
{ id: 'split-bottom-buttons', text: 'Split Bottom Buttons', leaf: true },
{ id: 'left-text-buttons', text: 'Left Text Buttons', leaf: true },
{ id: 'right-text-buttons', text: 'Right Text Buttons', leaf: true },
{ id: 'link-buttons', text: 'Link Buttons', leaf: true }
]
},
{
text: 'DataView',
expanded: true,
children: [
{ id: 'dataview-multisort', text: 'Multisort DataView', leaf: true }
]
},
{
text: 'Forms',
expanded: true,
children: [
{ id: 'login-form', text: 'Login Form', leaf: true },
{ id: 'contact-form', text: 'Contact Form', leaf: true },
{ id: 'register-form', text: 'Register Form', leaf: true },
{ id: 'form-number', text: 'Number Field', leaf: true },
{ id: 'form-checkout', text: 'Checkout Form', leaf: true },
{ id: 'form-grid', text: 'Form with Grid', leaf: true }
]
},
{
text: 'Toolbars',
expanded: true,
children: [
{ id: 'basic-toolbar', text: 'Basic Toolbar', leaf: true },
{ id: 'docked-toolbars', text: 'Docked Toolbar', leaf: true }
]
},
{
text: 'Layout',
expanded: true,
children: [
{ id: 'layout-accordion', text: 'Accordion Layout', leaf: true }
]
},
{
text: 'Slider',
expanded: true,
children: [
{ id: 'slider-field', text: 'Slider Field', leaf: true }
]
},
{
text: 'Drag & Drop',
expanded: true,
children: [
{ id: 'dd-field-to-grid', text: 'Field to Grid', leaf: true },
{ id: 'dd-grid-to-form', text: 'Grid to Form', leaf: true },
{ id: 'dd-grid-to-grid', text: 'Grid to Grid', leaf: true }
]
}
]
}
}); |
import React, { useState } from "react";
import axios from "axios";
import Navbar from "../components/Navbar";
export default function ForgotPassword() {
const [ui, setUi] = useState(0);
const [otp, setOtp] = useState("");
const [email, setEmail] = useState("");
const [done1, setDone1] = useState(false);
const [done2, setDone2] = useState(false);
const [done3, setDone3] = useState(false);
const [done4, setDone4] = useState(false);
const [done5, setDone5] = useState(false);
const requestOTP = async (e, email) => {
e.preventDefault();
//Send Request to backend API
const res = await axios.post("/api/fp", { email: email });
console.log(res);
if (res.data.authCode === "E3") {
alert("Could not find any user with that email. Maybe a typo?");
} else if (res.data.authCode === "S1") {
setOtp(res.data.otp);
setEmail(email);
setUi(1);
}
};
const validateOTP = async (e) => {
//Prevent Reload
e.preventDefault();
//Retrieve Data
const form = new FormData(e.target);
const formData = Object.fromEntries(form.entries());
//Create final OTP
const finalOTP =
formData["1"] +
formData["2"] +
formData["3"] +
formData["4"] +
formData["5"] +
formData["6"];
//CLG it
console.log(finalOTP);
//Check if it matches
if (finalOTP === otp) {
setUi(2);
}
else {
alert("OTP doesn't match")
}
};
const updatePass = async (e) => {
//Prevent Reload
e.preventDefault();
//Retrieve Data
const form = new FormData(e.target);
const formData = Object.fromEntries(form.entries());
//Confirm Passwords
if (formData.cPassword === formData.password) {
//Communicate to Backend
const res = await axios.post("/api/cnp", {
email: email,
password: <PASSWORD>,
});
//Set LocalStorage
const ISSERVER = typeof window === "undefined";
if (!ISSERVER) {
localStorage.setItem("user", JSON.stringify(res.data.data));
sessionStorage.setItem("user", JSON.stringify(res.data.data));
localStorage.setItem("logged", JSON.stringify(true));
sessionStorage.setItem("logged", JSON.stringify(true));
}
//Redirect to dashboard
window.location.replace("/dashboard");
} else {
alert("Both Passwords need to match");
}
};
const autoFocus = (e) => {
if (e.target.id === "1" && done1 === false) {
document.getElementById("2").focus();
setDone1(true);
}
if (e.target.id === "2" && done2 === false) {
document.getElementById("3").focus();
setDone2(true);
}
if (e.target.id === "3" && done3 === false) {
document.getElementById("4").focus();
setDone3(true);
}
if (e.target.id === "4" && done4 === false) {
document.getElementById("5").focus();
setDone4(true);
}
if (e.target.id === "5" && done5 === false) {
document.getElementById("6").focus();
setDone5(true);
}
};
return (
<>
<Navbar />
<div className="main">
<div className="center">
<h2 className="signup" style={{ fontSize: "2.5em", fontWeight:"300" }}>
Password Reset
</h2>
<hr />
{ui === 0 && (
<>
<form
className="form"
onSubmit={(e) =>
requestOTP(
e,
Object.fromEntries(new FormData(e.target).entries()).email
)
}
>
<p style={{fontSize:"0.9em"}}>Enter the email associated to your account</p>
<div
className="field"
style={{ marginTop: "20px", marginLeft: "-5px" }}
>
<input
type="text"
required
placeholder=""
name="email"
className="input"
/>
<span className="span"></span>
<label htmlFor="email" className="label">
Your Email
</label>
</div>
<button
className="standardButton"
style={{ marginLeft: "8vw" }}
type="submit"
>
Submit
</button>
</form>
</>
)}
{ui === 1 && (
<form onSubmit={validateOTP}>
<p style={{ textAlign: "center" }}>
Enter the OTP we sent to your email
</p>
<br />
<div className="otp-inputs">
<input
className="otp-input"
id="1"
name="1"
required
onKeyUp={(e) => autoFocus(e)}
></input>
<input
className="otp-input"
id="2"
name="2"
required
onKeyUp={(e) => autoFocus(e)}
></input>
<input
className="otp-input"
id="3"
name="3"
required
onKeyUp={(e) => autoFocus(e)}
></input>
<input
className="otp-input"
id="4"
name="4"
required
onKeyUp={(e) => autoFocus(e)}
></input>
<input
className="otp-input"
id="5"
name="5"
required
onKeyUp={(e) => autoFocus(e)}
></input>
<input
className="otp-input"
id="6"
name="6"
required
onKeyUp={(e) => autoFocus(e)}
></input>
</div>
<br />
<button className="standardButton" style={{ marginLeft: "11vw" }}>
Validate
</button>
</form>
)}
{ui === 2 && (
<form onSubmit={updatePass} className="form">
<p style={{ textAlign: "center" }}>Create a New Password</p>
<br />
<div className="field" style={{ marginTop: "20px" }}>
<input
type="password"
required
placeholder=""
name="password"
className="input"
/>
<span className="span"></span>
<label htmlFor="password" className="label">
Password
</label>
</div>
<div className="field" style={{ marginTop: "20px" }}>
<input
type="password"
required
placeholder=""
name="cPassword"
className="input"
/>
<span className="span"></span>
<label htmlFor="password" className="label">
Confirm Password
</label>
</div>
<br />
<button className="standardButton" style={{ marginLeft: "8vw" }}>
Update
</button>
</form>
)}
</div>
</div>
<style jsx>
{`
.center {
position: absolute;
top: 50%;
left: 50%;
transform: translate(-50%, -50%);
width: 400px;
background: white;
border-radius: 10px;
box-shadow: rgba(0, 0, 0, 0.35) 0px 5px 15px;
font-family : var(--mainfont);
}
.signup {
text-align: center;
font-size: 55px;
padding: 10px 0;
}
.expert-signup {
font-size: 0.65em;
width: 100%;
text-align: center;
}
.form {
padding: 0 40px;
box-sizing: border-box;
}
.field {
position: relative;
border-bottom: 2px dashed #adadad;
margin: 30px 0;
zoom: 0.8;
}
.input {
width: 100%;
padding: 0 5px;
height: 40px;
font-size: 16px;
border: none;
background: none;
outline: none;
}
.label {
position: absolute;
top: 50%;
left: 5px;
color: #adadad;
transform: translateY(-50%);
font-size: 16px;
pointer-events: none;
transition: 0.5s;
}
.span::before {
content: "";
position: absolute;
top: 40px;
left: 0;
width: 0%;
height: 2px;
background: #2691d9;
transition: 0.5s;
}
.input:focus ~ .label,
.input:valid ~ .label {
top: -5px;
color: #2691d9;
}
.input:focus ~ .span::before,
.input:valid ~ .span::before {
width: 100%;
}
.signupButton {
margin-left: 60px;
width: 200px;
}
.or {
text-align: center;
}
.otp-inputs {
display: flex;
justify-content: space-evenly;
}
.otp-input {
width: 50px;
height: 50px;
box-shadow: rgba(0, 0, 0, 0.24) 0px 3px 8px;
border: none;
outline: none;
text-align: center;
font-family: var(--mainfont);
font-size: 1.2em;
}
`}
</style>
</>
);
}
|
package com.morethanheroic.warc.service.content.request.domain;
import com.morethanheroic.warc.service.content.domain.WarcContentBlock;
import java.io.InputStream;
import java.util.Collections;
import java.util.Map;
import java.util.Optional;
import lombok.Builder;
import lombok.Getter;
@Builder
public class RequestContentBlock implements WarcContentBlock {
/**
* the http method used in the request.
*
* @see <a href="https://developer.mozilla.org/en-US/docs/Web/HTTP/Methods">
* https://developer.mozilla.org/en-US/docs/Web/HTTP/Methods</a>
*/
@Getter
private final String method;
/**
* The target location of the request.
*/
@Getter
private final String location;
/**
* The payload of the request.
*/
@Getter
private final InputStream payload;
/**
* The protocol used for the request.
*/
@Getter
private final String protocol;
/**
* The major protocol version of the request.
*/
@Getter
private final int majorProtocolVersion;
/**
* The minor protocol version of the request.
*/
@Getter
private final int minorProtocolVersion;
private final Map<String, String> headers;
/**
* Return a value of a header from the request.
*
* @param headerName the name of the header to get the value for
* @return the value of the header
*/
public Optional<String> getHeader(final String headerName) {
return Optional.ofNullable(headers.get(headerName));
}
/**
* Return all of the headers of a WARC request.
*
* @return the headers of the response
*/
public Map<String, String> getHeaders() {
return Collections.unmodifiableMap(headers);
}
}
|
#!/bin/bash
GIT_STATUS=`git status | grep "nothing to commit, working tree clean" | wc -l`
if (( "$GIT_STATUS" > 0 )); then
git pull
cd ServiceMonitoringPlugin
CURRENT_NUMBER_OF_COMMITS=`git log --oneline | wc -l`
PACKAGES=('Microting.eForm' 'Microting.eFormApi.BasePn' 'Microting.EformMonitoringBase')
PROJECT_NAME='ServiceMonitoringPlugin.csproj'
REPOSITORY='eform-service-monitoring-plugin'
for PACKAGE_NAME in ${PACKAGES[@]}; do
OLD_VERSION=`dotnet list package | grep "$PACKAGE_NAME " | grep -oP ' \d\.\d+\.\d.*' | grep -oP ' \d.* \b' | xargs`
dotnet add $PROJECT_NAME package $PACKAGE_NAME
NEW_VERSION=`dotnet list package | grep "$PACKAGE_NAME " | grep -oP ' \d\.\d+\.\d.*$' | grep -oP '\d\.\d+\.\d.*$' | grep -oP ' \d\.\d+\.\d.*$' | xargs`
if [ $NEW_VERSION != $OLD_VERSION ]; then
echo "We have a new version of $PACKAGE_NAME, so creating github issue and do a commit message to close that said issue"
RESULT=`curl -X "POST" "https://api.github.com/repos/microting/$REPOSITORY/issues?state=all" \
-H "Cookie: logged_in=no" \
-H "Authorization: token $CHANGELOG_GITHUB_TOKEN" \
-H "Content-Type: text/plain; charset=utf-8" \
-d $'{
"title": "Bump '$PACKAGE_NAME' from '$OLD_VERSION' to '$NEW_VERSION'",
"body": "TBD",
"assignees": [
"renemadsen"
],
"labels": [
".NET",
"backend",
"enhancement"
]
}'`
ISSUE_NUMBER=`echo $RESULT | grep -oP 'number": \d+,' | grep -oP '\d+'`
git add .
git commit -a -m "closes #$ISSUE_NUMBER"
fi
done
NEW_NUMBER_OF_COMMITS=`git log --oneline | wc -l`
if (( $NEW_NUMBER_OF_COMMITS > $CURRENT_NUMBER_OF_COMMITS )); then
CURRENT_GITVERSION=`git tag --sort=-creatordate | cut -d "v" -f 2 | sed -n 1p`
MAJOR_VERSION=`echo $CURRENT_GITVERSION | cut -d "." -f 1`
MINOR_VERSION=`echo $CURRENT_GITVERSION | cut -d "." -f 2`
BUILD_VERSION=`echo $CURRENT_GITVERSION | cut -d "." -f 3`
BUILD_VERSION=$(($BUILD_VERSION + 1))
NEW_GIT_VERSION="v$MAJOR_VERSION.$MINOR_VERSION.$BUILD_VERSION"
git tag "$NEW_GIT_VERSION"
git push --tags
git push
echo "Updated Microting eForm to ${EFORM_VERSION} and pushed new version ${NEW_GIT_VERSION}"
cd ..
github_changelog_generator -u microting -p $REPOSITORY -t $CHANGELOG_GITHUB_TOKEN
git add CHANGELOG.md
git commit -m "Updating changelog"
git push
else
echo "nothing to do, everything is up to date."
fi
else
echo "Working tree is not clean, so we are not going to upgrade. Clean, before doing upgrade!"
fi
|
require 'minitest/autorun'
require 'geometry/size_one'
describe Geometry::SizeOne do
Size = Geometry::Size
let(:one) { Geometry::SizeOne.new }
describe 'arithmetic' do
let(:left) { Size[1, 2] }
let(:right) { Size[3, 4] }
it 'must have +@' do
(+one).must_be :eql?, 1
(+one).must_be_instance_of(Geometry::SizeOne)
end
it 'must have unary negation' do
(-one).must_be :eql?, -1
# (-one).must_be_instance_of(Geometry::SizeOne)
end
it 'must add a number' do
(one + 3).must_equal 4
(3 + one).must_equal 4
end
it 'return a Size when adding two Sizes' do
(one + right).must_be_kind_of Size
(left + one).must_be_kind_of Size
end
it 'must return a Size when adding an array' do
(one + [5, 6]).must_equal [6, 7]
# ([5,6] + one).must_equal [6,7]
end
describe 'when subtracting' do
it 'must subtract a number' do
(one - 3).must_equal(-2)
(3 - one).must_equal 2
end
it 'return a Size when subtracting two Size' do
(one - right).must_equal Size[-2, -3]
(left - one).must_equal Size[0, 1]
end
it 'must return a Size when subtracting an array' do
(one - [5, 6]).must_equal [-4, -5]
# ([5,6] - one).must_equal [6,7]
end
end
it 'must multiply by a scalar' do
(one * 3).must_equal 3
(one * 3.0).must_equal 3.0
end
it 'must refuse to multiply by a Size' do
-> { one * Size[1, 2] }.must_raise Geometry::OperationNotDefined
end
it 'must refuse to multiply by a Vector' do
-> { one * Vector[1, 2] }.must_raise Geometry::OperationNotDefined
end
describe 'division' do
it 'must divide by a scalar' do
(one / 3).must_equal 1 / 3
(one / 4.0).must_equal 1 / 4.0
end
it 'must raise an exception when divided by 0' do
-> { one / 0 }.must_raise ZeroDivisionError
end
it 'must raise an exception for Sizes' do
-> { one / Size[1, 2] }.must_raise Geometry::OperationNotDefined
end
it 'must raise an exception for Vectors' do
-> { one / Vector[1, 2] }.must_raise Geometry::OperationNotDefined
end
end
end
describe 'coercion' do
it 'must coerce Arrays into Sizes' do
one.coerce([3, 4]).must_equal [Size[3, 4], Size[1, 1]]
end
it 'must coerce Vectors into Vectors' do
one.coerce(Vector[3, 4]).must_equal [Vector[3, 4], Vector[1, 1]]
end
it 'must coerce Size into Size' do
one.coerce(Size[5, 6]).must_equal [Size[5, 6], Size[1, 1]]
end
end
describe 'comparison' do
let(:one) { Geometry::SizeOne.new }
it 'must be equal to 1 and 1.0' do
one.must_be :eql?, 1
one.must_be :eql?, 1.0
end
it 'must not be equal to a non-one number' do
0.wont_equal one
3.14.wont_equal one
end
it 'must be equal to an Array of ones' do
one.must_be :==, [1, 1]
one.must_be :eql?, [1, 1]
[1, 1].must_equal one
end
it 'must not be equal to a non-one Array' do
one.wont_equal [3, 2]
[3, 2].wont_equal one
end
it 'must be equal to a Size of ones' do
one.must_be :==, Size[1, 1]
one.must_be :eql?, Size[1, 1]
Size[1, 1].must_equal one
end
it 'must not be equal to a non-one Size' do
one.wont_equal Size[3, 2]
Size[3, 2].wont_equal one
end
it 'must be equal to an Vector of onees' do
one.must_be :eql?, Vector[1, 1]
Vector[1, 1].must_equal one
end
it 'must not be equal to a non-one Vector' do
one.wont_equal Vector[3, 2]
Vector[3, 2].wont_equal one
end
end
describe 'when enumerating' do
it 'must have a first method' do
one.first.must_equal 1
one.first(1).must_equal [1]
one.first(5).must_equal [1, 1, 1, 1, 1]
end
end
end
|
function add_numbers(n1, n2) {
return n1 + n2;
}
if( typeof module !== 'undefined' && typeof module.exports !== 'undefined') {
module.exports = add_numbers;
}
if __name__ == '__main__':
n1 = 20
n2 = 10
print(add_numbers(n1, n2)) |
#!/bin/sh
# This file is a collection of functions used by all the other scripts
## Framework variables
# Convention: all variables from this framework are prefixed with SUIF_
# Variables defaults are specified in the init function
# client projects should source their variables with en .env file
newAuditSession(){
export SUIF_AUDIT_BASE_DIR=${SUIF_AUDIT_BASE_DIR:-"/tmp"}
export SUIF_SESSION_TIMESTAMP=`date +%y-%m-%dT%H.%M.%S_%3N`
export SUIF_AUDIT_SESSION_DIR="${SUIF_AUDIT_BASE_DIR}/${SUIF_SESSION_TIMESTAMP}"
mkdir -p "${SUIF_AUDIT_SESSION_DIR}"
return $?
}
init(){
newAuditSession || return $?
# For internal dependency checks,
export SUIF_DEBUG_ON="${SUIF_DEBUG_ON:-0}"
export SUIF_LOG_TOKEN=${SUIF_LOG_TOKEN:-"SUIF"}
# by default, we assume we are working connected to internet, put this on 0 for offline installations
export SUIF_ONLINE_MODE="${SUIF_ONLINE_MODE:-1}"
if [ ${SUIF_ONLINE_MODE} -eq 0 ]; then
# in offline mode the caller MUST provide the home folder for SUIF in the env var SUIF_HOME
if [ ! -f "${SUIF_HOME}/01.scripts/commonFunctions.sh" ]; then
return 104
else
export SUIF_CACHE_HOME="${SUIF_HOME}" # we already have everything
fi
else
# by default use master branch
export SUIF_HOME_URL=${SUIF_HOME_URL:-"https://raw.githubusercontent.com/Myhael76/sag-unattented-installations/main/"}
export SUIF_CACHE_HOME=${SUIF_CACHE_HOME:-"/tmp/suifCacheHome"}
mkdir -p "${SUIF_CACHE_HOME}"
fi
# SUPPRESS_STDOUT means we will not produce STD OUT LINES
# Normally we want the see the output when we prepare scripts, and suppress it when we finished
export SUIF_SUPPRESS_STDOUT=${SUIF_SUPPRESS_STDOUT:-0}
}
init || exit $?
# all log functions recieve 1 parameter
# $1 - Message to log
logI(){
if [ ${SUIF_SUPPRESS_STDOUT} -eq 0 ]; then echo `date +%y-%m-%dT%H.%M.%S_%3N`" ${SUIF_LOG_TOKEN} -INFO - ${1}"; fi
echo `date +%y-%m-%dT%H.%M.%S_%3N`" ${SUIF_LOG_TOKEN} -INFO - ${1}" >> "${SUIF_AUDIT_SESSION_DIR}/session.log"
}
logW(){
if [ ${SUIF_SUPPRESS_STDOUT} -eq 0 ]; then echo `date +%y-%m-%dT%H.%M.%S_%3N`" ${SUIF_LOG_TOKEN} -WARN - ${1}"; fi
echo `date +%y-%m-%dT%H.%M.%S_%3N`" ${SUIF_LOG_TOKEN} -WARN - ${1}" >> "${SUIF_AUDIT_SESSION_DIR}/session.log"
}
logE(){
if [ ${SUIF_SUPPRESS_STDOUT} -eq 0 ]; then echo `date +%y-%m-%dT%H.%M.%S_%3N`" ${SUIF_LOG_TOKEN} -ERROR- ${1}"; fi
echo `date +%y-%m-%dT%H.%M.%S_%3N`" ${SUIF_LOG_TOKEN} -ERROR- ${1}" >> "${SUIF_AUDIT_SESSION_DIR}/session.log"
}
logD(){
if [ ${SUIF_DEBUG_ON} -ne 0 ]; then
if [ ${SUIF_SUPPRESS_STDOUT} -eq 0 ]; then echo `date +%y-%m-%dT%H.%M.%S_%3N`" ${SUIF_LOG_TOKEN} -DEBUG- ${1}"; fi
echo `date +%y-%m-%dT%H.%M.%S_%3N`" ${SUIF_LOG_TOKEN} -DEBUG- ${1}" >> "${SUIF_AUDIT_SESSION_DIR}/session.log"
fi
}
logEnv(){
if [ ${SUIF_DEBUG_ON} -ne 0 ]; then
if [ ${SUIF_SUPPRESS_STDOUT} -eq 0 ]; then env | grep SUIF | sort; fi
env | grep SUIF | sort >> "${SUIF_AUDIT_SESSION_DIR}/session.log"
fi
}
logFullEnv(){
if [ ${SUIF_DEBUG_ON} -ne 0 ]; then
if [ ${SUIF_SUPPRESS_STDOUT} -eq 0 ]; then env | sort; fi
env | grep SUIF | sort >> "${SUIF_AUDIT_SESSION_DIR}/session.log"
fi
}
# Convention:
# f() function creates a RESULT_f variable for the outcome
# if not otherwise specified, 0 means success
controlledExec(){
# Param $1 - command to execute in a controlled manner
# Param $2 - tag for trace files
local lCrtEpoch=`date +%s`
eval "${1}" >"${SUIF_AUDIT_SESSION_DIR}/controlledExec_${lCrtEpoch}_${2}.out" 2>"${SUIF_AUDIT_SESSION_DIR}/controlledExec_${lCrtEpoch}_${2}.err"
return $?
}
portIsReachable(){
# Params: $1 -> host $2 -> port
if [ -f /usr/bin/nc ]; then
nc -z ${1} ${2} # alpine image
else
temp=`(echo > /dev/tcp/${1}/${2}) >/dev/null 2>&1` # centos image
fi
if [ $? -eq 0 ] ; then echo 1; else echo 0; fi
}
# urlencode / decode taken from https://gist.github.com/cdown/1163649
urlencode() {
# urlencode <string>
# usage A_ENC=$(urlencode ${A})
local old_lc_collate=$LC_COLLATE
LC_COLLATE=C
local length="${#1}"
for (( i = 0; i < length; i++ )); do
local c="${1:$i:1}"
case $c in
[a-zA-Z0-9.~_-]) printf '%s' "$c" ;;
*) printf '%%%02X' "'$c" ;;
esac
done
LC_COLLATE=$old_lc_collate
}
urldecode() {
# urldecode <string>
# usage A=$(urldecode ${A_ENC})
local url_encoded="${1//+/ }"
printf '%b' "${url_encoded//%/\\x}"
}
# Parameters - huntForSuifFile
# $1 - relative Path to SUIF_HOME
# $2 - filename
huntForSuifFile(){
if [ ! -f "${SUIF_CACHE_HOME}/${1}/${2}" ]; then
if [ ${SUIF_ONLINE_MODE} -eq 0 ]; then
logE "File ${SUIF_CACHE_HOME}/${1}/${2} not found!"
return 1 # File should exist, but it does not
fi
logI "File ${SUIF_CACHE_HOME}/${1}/${2} not found in local cache, attempting download"
mkdir -p "${SUIF_CACHE_HOME}/${1}"
curl "${SUIF_HOME_URL}/${1}/${2}" --silent -o "${SUIF_CACHE_HOME}/${1}/${2}"
local RESULT_curl=$?
if [ ${RESULT_curl} -ne 0 ]; then
logE "curl failed, code ${RESULT_curl}"
return 2
fi
logI "File ${SUIF_CACHE_HOME}/${1}/${2} downloaded successfully"
fi
}
# Parameters - applyPostSetupTemplate
# $1 - Setup template directory, relative to <repo_home>/02.templates/02.post-setup
applyPostSetupTemplate(){
logI "Applying post-setup template ${1}"
huntForSuifFile "02.templates/02.post-setup/${1}" "apply.sh"
local RESULT_huntForSuifFile=$?
if [ ${RESULT_huntForSuifFile} -ne 0 ]; then
logE "File ${SUIF_CACHE_HOME}/02.templates/02.post-setup/${1}/apply.sh not found!"
return 1
fi
chmod u+x "${SUIF_CACHE_HOME}/02.templates/02.post-setup/${1}/apply.sh"
local RESULT_chmod=$?
if [ ${RESULT_chmod} -ne 0 ]; then
logW "chmod command for apply.sh failed. This is not always a problem, continuing"
fi
logI "Calling apply.sh for template ${1}"
#controlledExec "${SUIF_CACHE_HOME}/02.templates/02.post-setup/${1}/apply.sh" "PostSetupTemplateApply"
"${SUIF_CACHE_HOME}/02.templates/02.post-setup/${1}/apply.sh"
local RESULT_apply=$?
if [ ${RESULT_apply} -ne 0 ]; then
logE "Application of post-setup template ${1} failed, code ${RESULT_apply}"
return 3
fi
logI "Post setup template ${1} applied successfully"
}
logEnv4Debug(){
logD "Dumping environment variables for debugging purposes"
if [ ${SUIF_DEBUG_ON} -ne 0 ]; then
if [ ${SUIF_SUPPRESS_STDOUT} -eq 0 ]; then
env | grep SUIF_ | grep -v PASS | sort;
fi
echo env | grep SUIF_ | grep -v PASS | sort >> "${SUIF_AUDIT_SESSION_DIR}/session.log"
fi
}
debugSuspend(){
if [ ${SUIF_DEBUG_ON} -ne 0 ]; then
logD "Suspending for debug"
tail -f /dev/null
fi
}
readSecretFromUser(){
# params
# $1 - message -> what to input
secret="0"
while [ "${secret}" == "0" ]; do
read -sp "Please input ${1}: " s1
echo ""
read -sp "Please input ${1} again: " s2
echo ""
if [ "${s1}" == "${s2}" ]; then
secret=${s1}
else
echo "Input do not match, retry"
fi
unset s1 s2
done
}
logI "SLS common framework functions initialized"
|
source $PATHRT/utests/std.sh
# Set up date and time of restart files for restart run
FHROT=12
RESTART_FILE_PREFIX="${SYEAR}${SMONTH}${SDAY}.$(printf "%02d" $(( SHOUR + FHROT )))0000"
if [[ $application == 'global' ]]; then
NSTF_NAME=2,0,1,0,5
elif [[ $application == 'regional' ]]; then
echo "Regional application not yet implemented for restart"
exit 1
elif [[ $application == 'cpld' ]]; then
RUNTYPE='continue'
USE_RESTART_TIME='.true.'
MOM6_RESTART_SETTING="r"
RESTART_FILE_SUFFIX_HRS="${SYEAR}-${SMONTH}-${SDAY}-$(printf "%02d" $(( ${FHROT} )))"
RESTART_FILE_SUFFIX_SECS="${SYEAR}-${SMONTH}-${SDAY}-$(printf "%02d" $(( ${FHROT}*3600 )))"
RESTART_N=$((FHMAX-$FHROT))
DEP_RUN=${TEST_NAME}
fi
WARM_START=.T.
NGGPS_IC=.F.
EXTERNAL_IC=.F.
MAKE_NH=.F.
MOUNTAIN=.T.
NA_INIT=0
LIST_FILES=$(echo -n $LIST_FILES | sed -E "s/phyf000\.(tile.\.nc|nemsio|nc) ?//g" \
| sed -E "s/dynf000\.(tile.\.nc|nemsio|nc) ?//g" \
| sed -E "s/atmos_4xdaily\.tile[1-6]\.nc ?//g" | sed -e "s/^ *//" -e "s/ *$//")
(test $CI_TEST == 'true') && source $PATHRT/utests/cmp_proc_bind.sh
#########source $PATHRT/utests/cmp_proc_bind.sh
source $PATHRT/utests/wrt_env.sh
cat <<EOF >>${RUNDIR_ROOT}/unit_test${RT_SUFFIX}.env
export FHROT=${FHROT}
export RESTART_FILE_PREFIX=${RESTART_FILE_PREFIX}
export NSTF_NAME=${NSTF_NAME}
export RUNTYPE=${RUNTYPE:-}
export USE_RESTART_TIME=${USE_RESTART_TIME:-}
export MOM6_RESTART_SETTING=${MOM6_RESTART_SETTING:-}
export RESTART_FILE_SUFFIX_HRS=${RESTART_FILE_SUFFIX_HRS:-}
export RESTART_FILE_SUFFIX_SECS=${RESTART_FILE_SUFFIX_SECS:-}
export RESTART_N=${RESTART_N:-}
export DEP_RUN=${DEP_RUN:-}
export WARM_START=${WARM_START}
export NGGPS_IC=${NGGPS_IC}
export EXTERNAL_IC=${EXTERNAL_IC}
export MAKE_NH=${MAKE_NH}
export MOUNTAIN=${MOUNTAIN}
export NA_INIT=${NA_INIT}
export LIST_FILES="${LIST_FILES}"
EOF
|
#!/bin/bash
set -e -o pipefail
#? Description:
#? Get the latest AMIs for all enabled regions and build JSON in the format
#? of Region-to-AMI mapping as the key `Mappings` in aws-cfn-vpn template.
#? It takes a few minutes to finish, please be patient.
#?
#? All the AMIs are AWS free tier eligible and have the types:
#? * ImageOwnerAlias: amazon
#? * Public: true
#? * State: available
#? * Architecture: x86_64
#? * Hypervisor: xen
#? * VirtualizationType: hvm
#? * Description: Amazon Linux 2 AMI*
#?
#? Some regions are not enabled for your account by default. Those regions
#? will be updated with en empty AMI object: {}.
#?
#? You can enable the disabled regions in your web console at:
#? https://console.aws.amazon.com/billing/home?#/account
#?
#? Usage:
#? ami.sh
#? [-t TEMPLATE]
#? [-h]
#?
#? Options:
#? [-t TEMPLATE]
#?
#? Update the TEMPLATE file with the new mapping on the key `Mappings`.
#? The file must be in JSON format and be at local.
#?
#? [-h]
#?
#? This help.
#?
#? Example:
#? # get the latest AMIs to stdout
#? ami.sh
#?
#? # update the latest AMIs to stack.json
#? ami.sh -t stack.json
#?
function usage () {
awk '/^#\?/ {sub("^[ ]*#\\?[ ]?", ""); print}' "$0" \
| awk '{gsub(/^[^ ]+.*/, "\033[1m&\033[0m"); print}'
}
function region-long-name () {
#? Usage:
#? region-long-name <REGION>
#?
declare region=${1:?}
aws ssm get-parameters \
--name /aws/service/global-infrastructure/regions/"$region"/longName \
--query 'Parameters[0].[Value]' \
--output text
}
function AMI-ID () {
#? Usage:
#? AMI-ID <REGION>
#?
declare region=${1:?}
aws ssm get-parameters \
--region "$region" \
--query 'Parameters[*].[Value]' \
--names /aws/service/ami-amazon-linux-latest/amzn2-ami-hvm-x86_64-gp2 \
--output text
}
function AMI () {
#? Usage:
#? AMI <REGION>
#?
declare region=${1:?} filters location id
# shellcheck disable=SC2191
# shellcheck disable=SC2054
filters=(
Name=owner-alias,Values=amazon
Name=is-public,Values=true
Name=state,Values=available
Name=architecture,Values=x86_64
Name=description,Values=Amazon\ Linux\ 2\ AMI\*
Name=hypervisor,Values=xen
Name=virtualization-type,Values=hvm
)
location=$(region-long-name "$region")
id=$(AMI-ID "$region")
if [[ -z $id ]]; then
return
fi
aws ec2 describe-images \
--region "$region" \
--image-ids "$id" \
--query 'Images[0].{name:Name,AMI:ImageId,created:CreationDate,location:'"'$location'"'}' \
--filter "${filters[@]}" \
--output json
}
function regions () {
#? Usage:
#? regions
#?
aws ec2 describe-regions \
--query 'Regions[*].[RegionName]' \
--output text
}
function AMIs () {
#? Usage:
#? AMIs
#?
declare regions index ami
# shellcheck disable=SC2207
regions=( $(regions) )
for index in "${!regions[@]}"; do
printf "." >&2
ami=$(AMI "${regions[index]}" | sed 's/ / /g') # indent length: 4 => 2
printf '"%s": %s' "${regions[index]}" "${ami:-{\}}" # ami: None ==> {}
if [[ $index -lt $((${#regions[@]} - 1)) ]]; then
printf ",\n"
else
printf "\n"
fi
done
printf "\n" >&2
}
function wrap () {
#? Usage:
#? wrap <AMIs>
#?
declare amis=${1:?} indent_amis wrapper
IFS='' read -r -d '' wrapper <<EOF
"Mappings": {
"RegionMap": {
%s
}
}
EOF
indent_amis=$(printf '%s' "$amis" | sed 's/^/ /') # indent level: +2
# shellcheck disable=SC2059
printf "$wrapper\n" "$indent_amis"
}
function update () {
#? Usage:
#? update <MAPPING> <FILE>
#?
declare mapping=${1:?} file=${2:?}
printf 'updating mapping in: %s ...' "$file"
xsh /file/inject \
-c "$mapping" \
-p before \
-e '^ "Outputs": \{$' \
-x '^ "Mappings": \{$' \
-y '^ \},$' \
"$file"
printf " [done]\n"
}
function main () {
# shellcheck disable=SC2034
declare template mapping \
OPTAND OPTARG opt
while getopts t:h opt; do
case $opt in
t)
template=$OPTARG
;;
*)
usage
exit 255
;;
esac
done
mapping=$(wrap "$(AMIs)")
printf '%s\n' "$mapping"
if [[ -n $template ]]; then
mapping=${mapping/#/ } # indent level: +1
mapping=${mapping}, # append comma
update "$mapping" "$template"
fi
}
declare BASE_DIR
# shellcheck disable=SC2034
BASE_DIR=$(cd "$(dirname "$0")"; pwd)
main "$@"
exit
|
from collections import Counter
def related_keywords(keyword, data):
related_words = []
data = [word.lower() for word in data]
keyword = keyword.lower()
for word in data:
if keyword in word:
related_words.append(word)
# Use Counter to count the number of times
# each related word appear in the data
c = Counter(related_words)
# We return the related word with the most appearances
return c.most_common()[0][0]
# example input
data = ["Shoes", "Sneakers", "Sandals", "Boots"]
print(related_keywords("shoes", data)) # shoes |
function wsMagnitude(conn, req) {
const x = req.x;
const y = req.y;
const magnitude = Math.sqrt(x * x + y * y);
conn.send(JSON.stringify({ magnitude: magnitude }));
} |
#!/bin/sh
cd ../../wildfly-16.0.0.Final/bin
wget https://repo1.maven.org/maven2/org/postgresql/postgresql/42.2.18/postgresql-42.2.18.jar
rm -r ../modules/org/postgresql
rm -r ../modules/postgresql
./jboss-cli.sh -c --command="module add --name=postgresql --resources=postgresql-42.2.18.jar"
|
class RoboticArm:
def __init__(self):
self.rightArm = ArmComponent()
def simulateArmMovement(self, sequence):
for command in sequence:
component_name, action, *args = command
component = getattr(self, component_name)
if action == 'attach':
component.attach()
elif action == 'moveTo':
position = args[0]
component.moveTo(position)
elif action == 'detach':
component.detach()
class ArmComponent:
def attach(self):
print("Component attached")
def moveTo(self, position):
print(f"Moving to position {position}")
def detach(self):
print("Component detached") |
import React from "react"
import "./CreateComment.css"
import { Row, Col, Image, Button, Form } from "react-bootstrap"
import * as Yup from "yup"
import { useForm } from "react-hook-form"
import { yupResolver } from "@hookform/resolvers/yup"
import { getBackEndHostWithSlash, getAuthorImgOrDefault } from "../utils"
export default function CreateComment({
loggedInUser,
author,
post,
triggerRerender,
postCommentCount,
setPostCommentCount,
}) {
// schema to validate form inputs
const validationSchema = Yup.object().shape({
comment: Yup.string().required("Post comment is required"),
})
// get form functions and link validation schema to form
const {
register,
handleSubmit,
reset,
setError,
formState: { errors },
} = useForm({
resolver: yupResolver(validationSchema),
})
const submitHandler = (data) => {
console.log(data)
const newData = { ...data }
if (newData.title === "") {
delete newData.title
}
const host = getBackEndHostWithSlash()
// post the validated data to the backend registration service
fetch(`${host}service/author${post.id.split("/author")[1]}/comments/`, {
method: "POST",
headers: {
"content-Type": "application/json",
Authorization: `Bearer ${localStorage.getItem("token")}`,
},
body: JSON.stringify(newData),
})
.then((corsResponse) => {
const apiPromise = corsResponse.json()
apiPromise
.then((apiResponse) => {
// empty out the form
reset()
setPostCommentCount(postCommentCount + 1)
triggerRerender()
console.log(apiResponse)
})
.catch((e) => {
// get the errors object
const errors = e.response.data
// set comment errors
if (errors.comment) {
setError("comment", {
type: "server",
message: errors.comment[0],
})
}
})
})
.catch((e) => {
console.log(e)
})
}
return (
<div>
<Form onSubmit={handleSubmit(submitHandler)}>
<Row style={{ marginTop: "30px" }}>
<Col xs={12} style={{ display: "flex", alignItems: "flex-start" }}>
<a href={loggedInUser.id} style={{ textDecoration: "none" }}>
<Image
className="fluid"
src={getAuthorImgOrDefault(loggedInUser?.profileImage)}
roundedCircle
style={{
objectFit: "cover",
backgroundColor: "#EEE",
width: "40px",
height: "40px",
marginRight: "8px",
}}
/>
</a>
{/* comment Form Field */}
<Form.Group className="mb-3" style={{ flexGrow: 1 }}>
<Form.Control
defaultValue=""
name="comment"
placeholder="Add your comment"
as="textarea"
rows={5}
style={{ padding: "0.75rem 0.85rem" }}
{...register("comment")}
className={`form-control ${errors.comment ? "is-invalid" : ""}`}
/>
<Form.Text className="invalid-feedback">
{errors.comment?.message}
</Form.Text>
</Form.Group>
</Col>
</Row>
<Row style={{ display: "flex" }}>
{/* Submit Button */}
<div style={{ display: "flex", justifyContent: "flex-end" }}>
<Button
className="pl-5"
variant="primary"
type="submit"
style={{ padding: "0.6rem 1rem" }}
>
Comment
</Button>
</div>
</Row>
</Form>
</div>
)
}
|
//
// UIScrollViewCategoryHeader.h
// MeiYiQuan
//
// Created by 任强宾 on 16/9/29.
// Copyright © 2016年 任强宾. All rights reserved.
//
#ifndef UIScrollViewCategoryHeader_h
#define UIScrollViewCategoryHeader_h
#import "UIScrollView+Touch.h" //touch
#import "UIScrollView+qbExtension.h" //设置inset
#endif /* UIScrollViewCategoryHeader_h */
|
long long RetrieveExpirationTime(void* Context, int GetExpirationTime) {
if (GetExpirationTime) {
// Assuming the expiration time is stored in the context
// Replace the following line with the actual logic to retrieve the expiration time
long long expirationTime = *(long long*)Context; // Example: Assuming expiration time is stored as a long long in the context
return expirationTime;
} else {
return -1; // Or any appropriate message indicating that the expiration time is not requested
}
} |
# This file is part of Requirements-Builder
# Copyright (C) 2015, 2016 CERN.
#
# Requirements-Builder is free software; you can redistribute it and/or
# modify it under the terms of the Revised BSD License; see LICENSE
# file for more details.
pydocstyle requirements_builder && \
#isort -rc -c -vb -df -sp .isort.cfg **/*.py && \
check-manifest && \
sphinx-build -qnNW docs docs/_build/html && \
python -m pytest && \
sphinx-build -qnNW -b doctest docs docs/_build/doctest
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.