text
stringlengths 1
1.05M
|
|---|
<reponame>grounded042/fission-workflows
package aggregates
import (
"github.com/fission/fission-workflows/pkg/api/events"
"github.com/fission/fission-workflows/pkg/fes"
"github.com/fission/fission-workflows/pkg/types"
"github.com/golang/protobuf/proto"
"github.com/sirupsen/logrus"
)
const (
TypeTaskInvocation = "task"
)
type TaskInvocation struct {
*fes.BaseEntity
*types.TaskInvocation
}
func NewTaskInvocation(id string, fi *types.TaskInvocation) *TaskInvocation {
tia := &TaskInvocation{
TaskInvocation: fi,
}
tia.BaseEntity = fes.NewBaseEntity(tia, *NewTaskInvocationAggregate(id))
return tia
}
func NewTaskInvocationAggregate(id string) *fes.Aggregate {
return &fes.Aggregate{
Id: id,
Type: TypeTaskInvocation,
}
}
func (ti *TaskInvocation) ApplyEvent(event *fes.Event) error {
if err := ti.ensureNextEvent(event); err != nil {
return err
}
eventData, err := fes.ParseEventData(event)
if err != nil {
return err
}
switch m := eventData.(type) {
case *events.TaskStarted:
ti.TaskInvocation = &types.TaskInvocation{
Metadata: &types.ObjectMetadata{
Id: m.GetSpec().TaskId,
CreatedAt: event.Timestamp,
Generation: 1,
},
Spec: m.GetSpec(),
Status: &types.TaskInvocationStatus{
Status: types.TaskInvocationStatus_IN_PROGRESS,
},
}
case *events.TaskSucceeded:
ti.Status.Output = m.GetResult().Output
ti.Status.Status = types.TaskInvocationStatus_SUCCEEDED
case *events.TaskFailed:
ti.Status.Error = m.GetError()
ti.Status.Status = types.TaskInvocationStatus_FAILED
case *events.TaskSkipped:
// TODO ensure that object (spec/status) is present
ti.Status.Status = types.TaskInvocationStatus_SKIPPED
default:
key := ti.Aggregate()
return fes.ErrUnsupportedEntityEvent.WithAggregate(&key).WithEvent(event)
}
ti.Metadata.Generation++
ti.Status.UpdatedAt = event.GetTimestamp()
return nil
}
func (ti *TaskInvocation) CopyEntity() fes.Entity {
n := &TaskInvocation{
TaskInvocation: ti.Copy(),
}
n.BaseEntity = ti.CopyBaseEntity(n)
return n
}
func (ti *TaskInvocation) Copy() *types.TaskInvocation {
return proto.Clone(ti.TaskInvocation).(*types.TaskInvocation)
}
func (ti *TaskInvocation) ensureNextEvent(event *fes.Event) error {
if err := fes.ValidateEvent(event); err != nil {
return err
}
if event.Aggregate.Type != TypeTaskInvocation {
logrus.Info("task check")
return fes.ErrUnsupportedEntityEvent.WithEntity(ti).WithEvent(event)
}
// TODO check sequence of event
return nil
}
|
<reponame>gridem/Serialization<filename>serialization.cpp
/*
* Copyright 2015 <NAME> (aka gridem)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <cstdlib>
#include "serialization.h"
namespace synca {
namespace {
thread_local Buffer* t_buffer = nullptr;
thread_local MemoryAllocator* t_allocator = nullptr;
}
struct DefaultAllocator
{
static void* alloc(size_t sz)
{
void* p = std::malloc(sz);
VERIFY(p != nullptr, "Allocation failed");
return p;
}
static void dealloc(void* p) noexcept
{
std::free(p);
}
};
Buffer& tlsBuffer()
{
auto buf = t_buffer;
if (buf == nullptr) {
buf = new Buffer(1024 * 1024 * 10); // 10MB
t_buffer = buf;
}
return *buf;
}
MemoryAllocator::MemoryAllocator(Buffer& buffer)
: buffer_{buffer}
{
t_allocator = this;
}
MemoryAllocator::~MemoryAllocator()
{
t_allocator = nullptr;
}
void* MemoryAllocator::alloc(size_t sz) // override
{
Ptr p = buffer_.data() + offset_;
size_t diff = sz % c_ptrSize;
if (diff)
sz += c_ptrSize - diff;
VERIFY(
offset_ + sz <= buffer_.size(), "MemoryAllocator: allocation failed: oversize");
offset_ += sz;
std::memset(p, 0, sz);
return p;
}
void MemoryAllocator::dealloc(void* p) // override
{
if (p < buffer_.data() || p >= buffer_.data() + buffer_.size())
DefaultAllocator::dealloc(p);
}
size_t MemoryAllocator::size() const
{
return offset_;
}
void MemoryAllocator::setSize(size_t size)
{
offset_ = size;
}
void bufInsertView(Buffer& b, View v)
{
b.insert(b.end(), v.data, v.data + v.size);
}
View bufToView(Buffer& b)
{
return {b.data(), b.size()};
}
}
void* operator new(size_t sz)
{
return synca::t_allocator ? synca::t_allocator->alloc(sz)
: synca::DefaultAllocator::alloc(sz);
}
void operator delete(void* p) noexcept
{
synca::t_allocator ? synca::t_allocator->dealloc(p)
: synca::DefaultAllocator::dealloc(p);
}
|
#!/usr/bin/env bash
DAEMON_HOME="/tmp/simd$(date +%s)"
RANDOM_KEY="randomvalidatorkey"
echo "#############################################"
echo "### Ensure to set the below ENV settings ###"
echo "#############################################"
echo "
DAEMON= # ex: simd
CHAIN_ID= # ex: testnet-1
DENOM= # ex: ustake
GH_URL= # ex: https://github.com/aliworkshop/terra-sdk
BINARY_VERSION= # ex :v0.44.0
GO_VERSION=1.17
PRELAUNCH_GENESIS_URL= # ex: https://raw.githubusercontent.com/cosmos/cosmos-sdk/master/$CHAIN_ID/genesis-prelaunch.json
GENTXS_DIR= # ex: $GOPATH/github.com/cosmos/mainnet/$CHAIN_ID/gentxs"
echo
if [[ -z "${GH_URL}" ]]; then
echo "GH_URL in not set, required. Ex: https://github.com/aliworkshop/terra-sdk"
exit 0
fi
if [[ -z "${DAEMON}" ]]; then
echo "DAEMON is not set, required. Ex: simd, gaiad etc"
exit 0
fi
if [[ -z "${DENOM}" ]]; then
echo "DENOM in not set, required. Ex: stake, uatom etc"
exit 0
fi
if [[ -z "${GO_VERSION}" ]]; then
echo "GO_VERSION in not set, required. Ex: 1.15.2, 1.16.6 etc."
exit 0
fi
if [[ -z "${CHAIN_ID}" ]]; then
echo "CHAIN_ID in not set, required."
exit 0
fi
if [[ -z "${PRELAUNCH_GENESIS_URL}" ]]; then
echo "PRELAUNCH_GENESIS_URL (genesis file url) in not set, required."
exit 0
fi
if [[ -z "${GENTXS_DIR}" ]]; then
echo "GENTXS_DIR in not set, required."
exit 0
fi
command_exists () {
type "$1" &> /dev/null ;
}
if command_exists go ; then
echo "Golang is already installed"
else
read -s -p "Installing go using apt. Do you want to proceed (y/n)?: " useApt
if [ "$useApt" != "y" ]; then
echo
echo "Install go manually and execute this script"
exit 0;
fi
sudo apt update
sudo apt install build-essential -y
wget https://dl.google.com/go/go$GO_VERSION.linux-amd64.tar.gz
tar -xvf go$GO_VERSION.linux-amd64.tar.gz
sudo mv go /usr/local
echo "" >> ~/.profile
echo 'export GOPATH=$HOME/go' >> ~/.profile
echo 'export GOROOT=/usr/local/go' >> ~/.profile
echo 'export GOBIN=$GOPATH/bin' >> ~/.profile
echo 'export PATH=$PATH:/usr/local/go/bin:$GOBIN' >> ~/.profile
. ~/.profile
go version
fi
if [ "$(ls -A $GENTXS_DIR)" ]; then
echo "Install $DAEMON"
git clone $GH_URL $DAEMON
cd $DAEMON
git fetch && git checkout $BINARY_VERSION
make install
$DAEMON version
for GENTX_FILE in $GENTXS_DIR/*.json; do
if [ -f "$GENTX_FILE" ]; then
set -e
echo "GentxFile::::"
echo $GENTX_FILE
echo "...........Init a testnet.............."
$DAEMON init --chain-id $CHAIN_ID validator --home $DAEMON_HOME
$DAEMON keys add $RANDOM_KEY --keyring-backend test --home $DAEMON_HOME
echo "..........Fetching genesis......."
curl -s $PRELAUNCH_GENESIS_URL > $DAEMON_HOME/config/genesis.json
# this genesis time is different from original genesis time, just for validating gentx.
sed -i '/genesis_time/c\ \"genesis_time\" : \"2021-01-01T00:00:00Z\",' $DAEMON_HOME/config/genesis.json
GENACC=$(cat $GENTX_FILE | sed -n 's|.*"delegator_address":"\([^"]*\)".*|\1|p')
denomquery=$(jq -r '.body.messages[0].value.denom' $GENTX_FILE)
amountquery=$(jq -r '.body.messages[0].value.amount' $GENTX_FILE)
# only allow $DENOM tokens to be bonded
if [ $denomquery != $DENOM ]; then
echo "invalid denomination"
exit 1
fi
$DAEMON add-genesis-account $RANDOM_KEY 1000000000000000$DENOM --home $DAEMON_HOME \
--keyring-backend test
$DAEMON gentx $RANDOM_KEY 900000000000000$DENOM --home $DAEMON_HOME \
--keyring-backend test --chain-id $CHAIN_ID
cp $GENTX_FILE $DAEMON_HOME/config/gentx/
echo "..........Collecting gentxs......."
$DAEMON collect-gentxs --home $DAEMON_HOME
$DAEMON validate-genesis --home $DAEMON_HOME
echo "..........Starting node......."
$DAEMON start --home $DAEMON_HOME &
sleep 10s
echo "...checking network status.."
echo "if this fails, most probably the gentx with address $GENACC is invalid"
$DAEMON status --node http://localhost:26657
echo "...Cleaning the stuff..."
killall $DAEMON >/dev/null 2>&1
sleep 2s
rm -rf $DAEMON_HOME
fi
done
else
echo "$GENTXS_DIR is empty, nothing to validate"
fi
|
import numpy as np
# Given source points
src = np.array(
[
perspective_params['src']['ul'], # upper left
perspective_params['src']['ur'], # upper right
perspective_params['src']['lr'], # lower right
perspective_params['src']['ll'], # lower left
],
np.int32
)
# Complete the dst array based on the given src array and any additional transformation parameters
dst = np.array(
[
[0, 0], # upper left
[500, 0], # upper right
[500, 500], # lower right
[0, 500], # lower left
],
np.int32
)
|
#include <stdio.h>
#include "stack.h"
void init_stack(ArrayStack *stack) {
stack->top = -1;
}
int stack_empty(ArrayStack *stack) {
return stack->top == -1 ? TRUE : FALSE;
}
int push(ArrayStack *stack, void *a) {
if (stack->top == MAXSIZE-1) /* stack is full */
return FALSE;
stack->data[++stack->top] = a;
return TRUE;
}
int pop(ArrayStack *stack, void **x) {
if (stack->top == -1)
return FALSE;
*x = stack->data[stack->top--];
return TRUE;
}
int get_top(ArrayStack *stack, void **x) {
if (stack->top == -1)
return FALSE;
*x = stack->data[stack->top];
return TRUE;
}
|
#!/bin/sh
#
# ignore-tidy-linelength
set -ex
# Originally from https://releases.llvm.org/9.0.0/clang+llvm-9.0.0-x86_64-linux-gnu-ubuntu-14.04.tar.xz
curl https://ci-mirrors.rust-lang.org/rustc/clang%2Bllvm-9.0.0-x86_64-linux-gnu-ubuntu-14.04.tar.xz | \
tar xJf -
export PATH=`pwd`/clang+llvm-9.0.0-x86_64-linux-gnu-ubuntu-14.04/bin:$PATH
git clone https://github.com/WebAssembly/wasi-libc
cd wasi-libc
git reset --hard 215adc8ac9f91eb055311acc72683fd2eb1ae15a
make -j$(nproc) INSTALL_DIR=/wasm32-wasi install
cd ..
rm -rf wasi-libc
rm -rf clang+llvm*
|
curl -X POST -c "./cookiefile" \
-d '{"user":"username", "password":"secret"}' \
-H "accept: application/json" \
-H "Content-Type: application/json" \
"http://127.0.0.1:5000/login/ldap"
|
<filename>src/main/java/uk/ac/cam/ahk44/chess/Component.java
package uk.ac.cam.ahk44.chess;
public @interface Component {
}
|
#!/bin/sh
rm -rf ./index.html
|
#!/bin/sh
set -e
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# use filter instead of exclude so missing patterns dont' throw errors
echo "rsync -av --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync -av --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u && exit ${PIPESTATUS[0]})
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY}" -a "${CODE_SIGNING_REQUIRED}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identitiy
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
echo "/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS} --preserve-metadata=identifier,entitlements \"$1\""
/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS} --preserve-metadata=identifier,entitlements "$1"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current file
archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | rev)"
stripped=""
for arch in $archs; do
if ! [[ "${VALID_ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary" || exit 1
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "$BUILT_PRODUCTS_DIR/AFNetworking/AFNetworking.framework"
install_framework "$BUILT_PRODUCTS_DIR/BZLib/BZLib.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "$BUILT_PRODUCTS_DIR/AFNetworking/AFNetworking.framework"
install_framework "$BUILT_PRODUCTS_DIR/BZLib/BZLib.framework"
fi
|
import * as vscode from "vscode";
import * as path from "path";
import { resolveRoot } from "../utils";
import { NoteTreeItem } from "../models/notes";
import { newSearcher, Searcher } from "../search";
export class NotesTreeView implements vscode.TreeDataProvider<NoteTreeItem> {
noteRoot: string;
listRecentLimit: number;
defaultExt: string;
searcher: Searcher;
constructor() {
const config = vscode.workspace.getConfiguration("vsnowm");
this.noteRoot = resolveRoot(config.get("defaultNotePath"));
this.defaultExt = config.get("defaultExt") as string;
this.searcher = newSearcher("ripgrep", this.defaultExt);
this.listRecentLimit = config.get("listRecentLimit") as number;
}
async getChildren(node?: NoteTreeItem) {
const notes = await this.searcher.listNotes(
this.noteRoot,
this.listRecentLimit
);
return Promise.resolve(notes);
}
getTreeItem(node: NoteTreeItem): vscode.TreeItem {
return node;
}
private _onDidChangeTreeData: vscode.EventEmitter<
NoteTreeItem | undefined | null | void
> = new vscode.EventEmitter<NoteTreeItem | undefined | null | void>();
readonly onDidChangeTreeData: vscode.Event<
NoteTreeItem | undefined | null | void
> = this._onDidChangeTreeData.event;
refresh(): void {
this._onDidChangeTreeData.fire();
}
}
|
# frozen_string_literal: true
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Auto-generated by gapic-generator-ruby. DO NOT EDIT!
module Google
module Ads
module GoogleAds
module V8
module Errors
# Container for enum describing possible custom interest errors.
class CustomInterestErrorEnum
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
# Enum describing possible custom interest errors.
module CustomInterestError
# Enum unspecified.
UNSPECIFIED = 0
# The received error code is not known in this version.
UNKNOWN = 1
# Duplicate custom interest name ignoring case.
NAME_ALREADY_USED = 2
# In the remove custom interest member operation, both member ID and
# pair [type, parameter] are not present.
CUSTOM_INTEREST_MEMBER_ID_AND_TYPE_PARAMETER_NOT_PRESENT_IN_REMOVE = 3
# The pair of [type, parameter] does not exist.
TYPE_AND_PARAMETER_NOT_FOUND = 4
# The pair of [type, parameter] already exists.
TYPE_AND_PARAMETER_ALREADY_EXISTED = 5
# Unsupported custom interest member type.
INVALID_CUSTOM_INTEREST_MEMBER_TYPE = 6
# Cannot remove a custom interest while it's still being targeted.
CANNOT_REMOVE_WHILE_IN_USE = 7
# Cannot mutate custom interest type.
CANNOT_CHANGE_TYPE = 8
end
end
end
end
end
end
end
|
import React, { Component } from 'react';
import { connect } from 'react-redux';
import { compose } from 'redux';
import uuid from 'uuid/v4';
import moment from 'moment';
import { startAddIdea } from '../../actions/ideas';
import requireAuth from '../../middleware/requireAuth';
class AddIdea extends Component {
state = {
category: 'Web App',
name: '',
description: '',
target: ''
};
handleCategoryChange = e => {
const category = e.target.value;
this.setState(() => ({ category }));
};
handleSubmit = e => {
e.preventDefault();
const id = uuid();
const createdAt = moment().format('YYYY-MM-DD HH:mm:ss.SSS');
this.props.dispatch(
startAddIdea({
id,
category: this.state.category,
name: this.state.name,
description: this.state.description,
target: this.state.target,
createdAt
})
);
this.props.history.push(`/create/${id}`);
};
render() {
return (
<div>
<div className="hero">
<div className="hero-body">
<div className="container">
<h2 className="title is-4">Let&aposs begin!!</h2>
<form onSubmit={this.handleSubmit}>
<div className="column">
<h3 className="title is-5">What do you want to create?</h3>
<div className="select is-medium">
<select
value={this.state.category}
onChange={this.handleCategoryChange}
>
<option value="Web App">Web App</option>
<option value="Mobile App">Mobile App</option>
<option value="Book">Book</option>
<option value="Business">Business</option>
<option value="Physical Product">Physical Product</option>
</select>
</div>
</div>
<div className="column">
<input
type="submit"
value="Continue"
className="button is-link"
/>
</div>
</form>
</div>
</div>
</div>
</div>
);
}
}
const mapStateToProps = state => ({
ideas: state.ideas
});
export default compose(
requireAuth,
connect(mapStateToProps)
)(AddIdea);
|
from .base import * # noqa
DEBUG = True
TEMPLATES[0]["OPTIONS"]["debug"] = True
INSTALLED_APPS += ["debug_toolbar", "django_extensions"]
MIDDLEWARE = ["debug_toolbar.middleware.DebugToolbarMiddleware"] + MIDDLEWARE
INTERNAL_IPS = [
"127.0.0.1",
]
ALLOWED_HOSTS = ["*"]
DEBUG_TOOLBAR_CONFIG = {
"SHOW_TOOLBAR_CALLBACK": lambda r: r.environ.get("SERVER_NAME", None)
!= "testserver"
and (r.META.get("REMOTE_ADDR", None) in INTERNAL_IPS)
}
SECRET_KEY = "development"
|
<!DOCTYPE html>
<html>
<head>
<title>Text Formatting</title>
<script type="text/javascript" src="lib/tinymce/tinymce.min.js"></script>
<script type="text/javascript">
tinymce.init({
selector: 'textarea',
plugins: 'a11ychecker advcode casechange formatpainter linkchecker autolink lists checklist media mediaembed pageembed powerpaste table advtable tinycomments tinymcespellchecker',
toolbar: 'a11ycheck addcomment showcomments casechange checklist code formatpainter pageembed table',
toolbar_mode: 'floating',
tinycomments_mode: 'embedded',
tinycomments_author: 'Author name',
});
</script>
</head>
<body>
<textarea>Type your text here...</textarea>
</body>
</html>
|
REPUBLICAN = 'republican'
DEMOCRAT = 'democrat'
|
export default function (db, caller, args) {
return {
message: args?.message || 'Pong'
}
}
|
#!/bin/sh
test_description='prepare-commit-msg hook'
. ./test-lib.sh
test_expect_success 'with no hook' '
echo "foo" > file &&
git add file &&
git commit -m "first"
'
# set up fake editor for interactive editing
cat > fake-editor <<'EOF'
#!/bin/sh
exit 0
EOF
chmod +x fake-editor
## Not using test_set_editor here so we can easily ensure the editor variable
## is only set for the editor tests
FAKE_EDITOR="$(pwd)/fake-editor"
export FAKE_EDITOR
# now install hook that always succeeds and adds a message
HOOKDIR="$(git rev-parse --git-dir)/hooks"
HOOK="$HOOKDIR/prepare-commit-msg"
mkdir -p "$HOOKDIR"
echo "#!$SHELL_PATH" > "$HOOK"
cat >> "$HOOK" <<'EOF'
if test "$2" = commit; then
source=$(git rev-parse "$3")
else
source=${2-default}
fi
if test "$GIT_EDITOR" = :; then
sed -e "1s/.*/$source (no editor)/" "$1" > msg.tmp
else
sed -e "1s/.*/$source/" "$1" > msg.tmp
fi
mv msg.tmp "$1"
exit 0
EOF
chmod +x "$HOOK"
echo dummy template > "$(git rev-parse --git-dir)/template"
test_expect_success 'with hook (-m)' '
echo "more" >> file &&
git add file &&
git commit -m "more" &&
test "`git log -1 --pretty=format:%s`" = "message (no editor)"
'
test_expect_success 'with hook (-m editor)' '
echo "more" >> file &&
git add file &&
GIT_EDITOR="\"\$FAKE_EDITOR\"" git commit -e -m "more more" &&
test "`git log -1 --pretty=format:%s`" = message
'
test_expect_success 'with hook (-t)' '
echo "more" >> file &&
git add file &&
git commit -t "$(git rev-parse --git-dir)/template" &&
test "`git log -1 --pretty=format:%s`" = template
'
test_expect_success 'with hook (-F)' '
echo "more" >> file &&
git add file &&
(echo more | git commit -F -) &&
test "`git log -1 --pretty=format:%s`" = "message (no editor)"
'
test_expect_success 'with hook (-F editor)' '
echo "more" >> file &&
git add file &&
(echo more more | GIT_EDITOR="\"\$FAKE_EDITOR\"" git commit -e -F -) &&
test "`git log -1 --pretty=format:%s`" = message
'
test_expect_success 'with hook (-C)' '
head=`git rev-parse HEAD` &&
echo "more" >> file &&
git add file &&
git commit -C $head &&
test "`git log -1 --pretty=format:%s`" = "$head (no editor)"
'
test_expect_success 'with hook (editor)' '
echo "more more" >> file &&
git add file &&
GIT_EDITOR="\"\$FAKE_EDITOR\"" git commit &&
test "`git log -1 --pretty=format:%s`" = default
'
test_expect_success 'with hook (--amend)' '
head=`git rev-parse HEAD` &&
echo "more" >> file &&
git add file &&
GIT_EDITOR="\"\$FAKE_EDITOR\"" git commit --amend &&
test "`git log -1 --pretty=format:%s`" = "$head"
'
test_expect_success 'with hook (-c)' '
head=`git rev-parse HEAD` &&
echo "more" >> file &&
git add file &&
GIT_EDITOR="\"\$FAKE_EDITOR\"" git commit -c $head &&
test "`git log -1 --pretty=format:%s`" = "$head"
'
test_expect_success 'with hook (merge)' '
test_when_finished "git checkout -f master" &&
git checkout -B other HEAD@{1} &&
echo "more" >>file &&
git add file &&
git commit -m other &&
git checkout - &&
git merge --no-ff other &&
test "`git log -1 --pretty=format:%s`" = "merge (no editor)"
'
test_expect_success 'with hook and editor (merge)' '
test_when_finished "git checkout -f master" &&
git checkout -B other HEAD@{1} &&
echo "more" >>file &&
git add file &&
git commit -m other &&
git checkout - &&
env GIT_EDITOR="\"\$FAKE_EDITOR\"" git merge --no-ff -e other &&
test "`git log -1 --pretty=format:%s`" = "merge"
'
cat > "$HOOK" <<'EOF'
#!/bin/sh
exit 1
EOF
test_expect_success 'with failing hook' '
test_when_finished "git checkout -f master" &&
head=`git rev-parse HEAD` &&
echo "more" >> file &&
git add file &&
test_must_fail env GIT_EDITOR="\"\$FAKE_EDITOR\"" git commit -c $head
'
test_expect_success 'with failing hook (--no-verify)' '
test_when_finished "git checkout -f master" &&
head=`git rev-parse HEAD` &&
echo "more" >> file &&
git add file &&
test_must_fail env GIT_EDITOR="\"\$FAKE_EDITOR\"" git commit --no-verify -c $head
'
test_expect_success 'with failing hook (merge)' '
test_when_finished "git checkout -f master" &&
git checkout -B other HEAD@{1} &&
echo "more" >> file &&
git add file &&
rm -f "$HOOK" &&
git commit -m other &&
write_script "$HOOK" <<-EOF &&
exit 1
EOF
git checkout - &&
test_must_fail git merge --no-ff other
'
test_done
|
<filename>app/src/main/java/com/garfield/alfred/robbin/iflytek/util/UnderstandUtil.java<gh_stars>0
package com.garfield.alfred.robbin.iflytek.util;
import android.content.Context;
import android.os.Bundle;
import android.os.Environment;
import android.widget.Toast;
import android.util.Log;
import com.iflytek.cloud.ErrorCode;
import com.iflytek.cloud.InitListener;
import com.iflytek.cloud.SpeechConstant;
import com.iflytek.cloud.SpeechError;
import com.iflytek.cloud.SpeechUnderstander;
import com.iflytek.cloud.SpeechUnderstanderListener;
import com.iflytek.cloud.UnderstanderResult;
/**
* 语义理解组件
*
* Created by 汪长鸣 on 2016/12/15.
*/
public class UnderstandUtil {
public static void understand(final Context context) {
// 1.创建语音语义理解对象
SpeechUnderstander understander = SpeechUnderstander.createUnderstander(context, new InitListener() {
// 初始化对象
@Override
public void onInit(int code) {
Log.d("DEBUG", "speechUnderstanderListener init() code = " + code);
if (code != ErrorCode.SUCCESS) {
Log.d("ERROR", "初始化失败,错误码:" + code);
}
}
});
// 2.设置参数,语义场景设置请登录 http://osp.voicecloud.cn/
// 设置语言及语言区域
understander.setParameter(SpeechConstant.LANGUAGE, "zh_cn");
understander.setParameter(SpeechConstant.ACCENT, "zh_cn");
/*String lang = mSharedPreferences.getString("understander_language_preference", "mandarin");
if (lang.equals("en_us")) {
mSpeechUnderstander.setParameter(SpeechConstant.LANGUAGE, "en_us");
}else {
mSpeechUnderstander.setParameter(SpeechConstant.LANGUAGE, "zh_cn");
mSpeechUnderstander.setParameter(SpeechConstant.ACCENT, lang);
}*/
// 设置语音前端点:静音超时时间,即用户多长时间不说话则当做超时处理
understander.setParameter(SpeechConstant.VAD_BOS, "4000");
// 设置语音后端点:后端点静音检测时间,即用户停止说话多长时间内即认为不再输入, 自动停止录音
understander.setParameter(SpeechConstant.VAD_EOS, "1000");
// 设置标点符号,默认:1(有标点)
understander.setParameter(SpeechConstant.ASR_PTT, "1");
// 设置音频保存路径,保存音频格式支持pcm、wav,设置路径为sd卡请注意WRITE_EXTERNAL_STORAGE权限
// 注:AUDIO_FORMAT参数语记需要更新版本才能生效
//understander.setParameter(SpeechConstant.AUDIO_FORMAT, "wav");
//understander.setParameter(SpeechConstant.ASR_AUDIO_PATH, Environment.getExternalStorageDirectory()+"/msc/sud.wav");
//3.开始语义理解
understander.startUnderstanding(new SpeechUnderstanderListener(){
//开始录音
public void onBeginOfSpeech() {}
//音量值0~30
public void onVolumeChanged(int volume, byte[] bytes){}
//结束录音
public void onEndOfSpeech() {}
//扩展用接口
public void onEvent(int eventType, int arg1, int arg2, Bundle obj) {}
//会话发生错误回调接口
public void onError(SpeechError error) {}
// XmlParser为结果解析类,见SpeechDemo
public void onResult(UnderstanderResult result) {
String text = result.getResultString();
}
});
}
}
|
<gh_stars>0
package weixin.liuliangbao.jsonbean.ViewBean;
import javax.servlet.http.HttpServletRequest;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
/*import net.sf.json.JSONArray;
import net.sf.json.JSONObject;*/
/**
* Created by aa on 2015/11/26.
*/
public class Post implements java.io.Serializable {
public String getJson(HttpServletRequest request ){
// HttpServletResponse response= getResponse();
// request.setContentType("text/json");
// request.setCharacterEncoding("UTF-8");
//post 方式
StringBuilder stringBuilder = new StringBuilder();
String str=null;
try {
BufferedReader br = new BufferedReader(new InputStreamReader(request.getInputStream(),"UTF-8"));
String line;
while ((line = br.readLine()) != null) {
stringBuilder.append(line);
}
str=stringBuilder.toString();
// LOGGER.info(stringBuilder);
} catch (IOException e) {
e.printStackTrace();
}
return str;
}
}
|
<filename>core/src/temp/java/awt/Composite.java
package temp.java.awt;
/**
*
*/
public interface Composite {
}
|
colors = ["red", "orange", "green", "blue"]
color_codes = ["#FF0000", "#FFA500", "#008000", "#0000FF"]
color_code_dict = dict(zip(colors, color_codes))
code = color_code_dict[colors[0]]
|
def parse_tokens(tokens):
parsed_tokens = []
for token in tokens:
if token[0] == 'daddress':
parsed_tokens.append(daddress(token))
elif token[0] == 'dcommand':
parsed_tokens.append(dcommand(token))
elif token[0] == 'doaddress':
parsed_tokens.append(doaddress(token))
elif token[0] == 'dosymbol':
parsed_tokens.append(dosymbol(token))
return parsed_tokens
|
import React, {Component} from 'react';
import {connect} from 'react-redux';
import {Switch, Route} from 'react-router-dom'
import Header from './components/Header';
import Body from './components/Body';
import Footer from "./components/Footer/Footer";
import HomePage from "./scenes/Homepage";
class App extends Component {
render() {
return (
<div className="site">
<Header/>
<Body>
<Switch>
<Route path='/' component={HomePage}/>
</Switch>
</Body>
<Footer/>
</div>
);
}
}
export default connect()(App);
|
#!/bin/bash
TOKEN=`cat ${HOME}/digitalocean.token`
api(){
HTTP_METHOD=$1
OBJECT=$2
curl -s -X $HTTP_METHOD \
-H "Content-Type: application/json" \
-H "Authorization: Bearer $TOKEN" \
"https://api.digitalocean.com/v2/$OBJECT"
}
check(){
each=${1}
echo ""
echo "${each}"
result=`ssh ${each} "ps au | grep python"`
echo "\"${result}\""
if [ -z "${result}" ]; then
echo "No Python process found running on ${each}, dropping into shell."
ssh ${each}
fi
echo ""
}
droplets=`api GET droplets`
addresses=`echo ${droplets} | python -m json.tool | \
grep -E ip_address | awk '{print $2}' | sed -E "s/[^0-9.]//g"`
echo ${addresses}
for each in ${addresses}; do
check ${each}
done
#check twoism.meow
|
<filename>app/controllers/drawings_controller.rb
class DrawingsController < ApplicationController
get '/drawings' do
@drawings = Drawing.where("user_id = ?", Helpers.current_user(session).id)
erb :'/drawings/index'
end
get '/drawings/new' do
erb :'drawings/new'
end
post '/drawings' do
@drawing = Drawing.create(
content: params[:content],
title: params[:title],
theme: Theme.find_by(name: params[:theme]),
user_id: Helpers.current_user(session).id
)
if @drawing.errors.any?
@errors = @drawing.errors.full_messages
flash[:error] = @errors.join(". ")
redirect '/drawings/new'
else
redirect "/drawings/#{@drawing.id}"
end
end
get '/drawings/:id' do
@drawing = Drawing.find(params[:id])
if Helpers.current_user(session).id == @drawing.user_id
erb :'/drawings/show'
else
redirect '/drawings'
end
end
get '/drawings/:id/edit' do
if !Helpers.logged_in?(session)
redirect '/login'
else
@drawing = Drawing.find(params[:id])
if @drawing.user_id == Helpers.current_user(session).id
erb :'/drawings/edit'
else
redirect '/drawings'
end
end
end
patch '/drawings/:id' do
if params[:title].empty? || params[:content].empty? || params[:theme].empty?
flash[:error] = "Title, content, and/or theme can't be empty."
redirect "/drawings/#{params[:id]}/edit"
else
@drawing = Drawing.find(params[:id])
@drawing.update(title: params[:title], content: params[:content], theme: Theme.find_by(name: params[:theme]))
@drawing.save
erb :'drawings/show'
end
end
get '/drawings/:id/delete' do
@drawing = Drawing.find(params[:id])
if Helpers.current_user(session).id == @drawing.user_id
@drawing.destroy
redirect '/drawings'
else
redirect "/drawings/#{@drawing.id}"
end
end
end
|
/*
* Copyright (C) 2017-2019 Dremio Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { Component } from 'react';
import PropTypes from 'prop-types';
import Immutable from 'immutable';
import { connect } from 'react-redux';
import { withRouter } from 'react-router';
import { getDataset, getHistoryFromLocation, getExploreState } from '@app/selectors/explore';
import { isSqlChanged } from '@app/sagas/utils';
const mapStateToProp = (state, ownProps) => {
const { location } = ownProps;// withRouter is required for this
const isNewQuery = location.pathname === '/new_query';
const { query } = location || {};
let datasetSql = '';
if (!isNewQuery) {
const dataset = getDataset(state, query.version);
if (dataset) {
datasetSql = dataset.get('sql');
}
}
return {
datasetSql,
history: getHistoryFromLocation(state, location),
currentSql: getExploreState(state).view.currentSql
};
};
// adds getDatasetChangeDetails to childComp properties
// getDatasetChangeDetails: () => {
// sqlChanged: bool,
// historyChanged: bool
// }
export class DatasetChangesView extends Component {
static propTypes = {
currentSql: PropTypes.string,
datasetSql: PropTypes.string,
history: PropTypes.instanceOf(Immutable.Map),
childComp: PropTypes.any
};
hasChanges = () => {
const { datasetSql, currentSql, history } = this.props;
return {
// leaving modified sql?
// currentSql === null means sql is unchanged.
sqlChanged: isSqlChanged(datasetSql, currentSql),
historyChanged: history ? history.get('isEdited') : false
};
}
render() {
const {
childComp: ChildComponent,
...rest
} = this.props;
return <ChildComponent getDatasetChangeDetails={this.hasChanges} {...rest} />;
}
}
//withRouter is required for mapStateToProps
export const DatasetChanges = withRouter(connect(mapStateToProp)(DatasetChangesView));
export const withDatasetChanges = childComp => (props) => {
return <DatasetChanges childComp={childComp} {...props} />;
};
|
<reponame>sergeytkachenko/siesta-template<gh_stars>100-1000
/**
* @private
*/
Ext.define('Ext.device.camera.Abstract', {
source: {
library: 0,
camera: 1,
album: 2
},
destination: {
data: 0,
file: 1,
'native': 2
},
encoding: {
jpeg: 0,
jpg: 0,
png: 1
},
media: {
picture: 0,
video: 1,
all: 2
},
direction: {
back: 0,
front:1
},
/**
* Allows you to capture a photo.
*
* @param {Object} options
* The options to use when taking a photo.
*
* @param {Function} options.success
* The success callback which is called when the photo has been taken.
*
* @param {String} options.success.image
* The image which was just taken, either a base64 encoded string or a URI depending on which
* option you chose (destination).
*
* @param {Function} options.failure
* The function which is called when something goes wrong.
*
* @param {Object} scope
* The scope in which to call the `success` and `failure` functions, if specified.
*
* @param {Number} options.quality
* The quality of the image which is returned in the callback. This should be a percentage.
*
* @param {String} options.source
* The source of where the image should be taken. Available options are:
*
* - **album** - prompts the user to choose an image from an album
* - **camera** - prompts the user to take a new photo
* - **library** - prompts the user to choose an image from the library
*
* @param {String} destination
* The destination of the image which is returned. Available options are:
*
* - **data** - returns a base64 encoded string
* - **file** - returns the file's URI
*
* @param {String} encoding
* The encoding of the returned image. Available options are:
*
* - **jpg**
* - **png**
*
* @param {Number} width
* The width of the image to return
*
* @param {Number} height
* The height of the image to return
*/
capture: Ext.emptyFn,
getPicture: Ext.emptyFn,
cleanup: Ext.emptyFn
});
|
#!/usr/bin/env bash
koopa_kallisto_quant_single_end() {
# """
# Run kallisto on multiple single-end FASTQ files.
# @note Updated 2022-03-25.
#
# @examples
# > koopa_kallisto_quant_single_end \
# > --fastq-dir='fastq' \
# > --fastq-tail='_001.fastq.gz' \
# > --output-dir='kallisto'
# """
local dict fastq_file fastq_files
koopa_assert_has_args "$#"
declare -A dict=(
# e.g. 'fastq'
[fastq_dir]=''
# e.g. "_001.fastq.gz'.
[fastq_tail]=''
# e.g. 'kallisto-index'.
[index_dir]=''
[mode]='single-end'
# e.g. 'kallisto'.
[output_dir]=''
)
while (("$#"))
do
case "$1" in
# Key-value pairs --------------------------------------------------
'--fastq-dir='*)
dict[fastq_dir]="${1#*=}"
shift 1
;;
'--fastq-dir')
dict[fastq_dir]="${2:?}"
shift 2
;;
'--fastq-tail='*)
dict[fastq_tail]="${1#*=}"
shift 1
;;
'--fastq-tail')
dict[fastq-tail]="${2:?}"
shift 2
;;
'--index-dir='*)
dict[index_dir]="${1#*=}"
shift 1
;;
'--index-dir')
dict[index_dir]="${2:?}"
shift 2
;;
'--output-dir='*)
dict[output_dir]="${1#*=}"
shift 1
;;
'--output-dir')
dict[output_dir]="${2:?}"
shift 2
;;
# Other ------------------------------------------------------------
*)
koopa_invalid_arg "$1"
;;
esac
done
koopa_assert_is_set \
'--fastq-dir' "${dict[fastq_dir]}" \
'--fastq-tail' "${dict[fastq_tail]}" \
'--index-dir' "${dict[index_dir]}" \
'--output-dir' "${dict[output_dir]}"
koopa_assert_is_dir "${dict[fastq_dir]}" "${dict[index_dir]}"
dict[fastq_dir]="$(koopa_realpath "${dict[fastq_dir]}")"
dict[index_dir]="$(koopa_realpath "${dict[index_dir]}")"
dict[output_dir]="$(koopa_init_dir "${dict[output_dir]}")"
koopa_h1 'Running kallisto quant.'
koopa_dl \
'Mode' "${dict[mode]}" \
'Index dir' "${dict[index_dir]}" \
'FASTQ dir' "${dict[fastq_dir]}" \
'FASTQ tail' "${dict[fastq_tail]}" \
'Output dir' "${dict[output_dir]}"
readarray -t fastq_files <<< "$( \
koopa_find \
--max-depth=1 \
--min-depth=1 \
--pattern="*${dict[fastq_tail]}" \
--prefix="${dict[fastq_dir]}" \
--sort \
--type='f' \
)"
if koopa_is_array_empty "${fastq_files[@]:-}"
then
koopa_stop "No FASTQs ending with '${dict[fastq_tail]}'."
fi
koopa_alert_info "$(koopa_ngettext \
--num="${#fastq_files[@]}" \
--msg1='sample' \
--msg2='samples' \
--suffix=' detected.' \
)"
for fastq_file in "${fastq_files[@]}"
do
koopa_kallisto_quant_single_end_per_sample \
--fastq-file="$fastq_file" \
--fastq-tail="${dict[fastq_tail]}" \
--index-dir="${dict[index_dir]}" \
--output-dir="${dict[output_dir]}"
done
koopa_alert_success 'kallisto quant was successful.'
return 0
}
|
import flask
# Create the Flask application
app = flask.Flask(__name__)
# Create a route to serve a JSON response
@app.route('/data')
def serve_json():
# Create the list of events
events = [
{'name': 'Easter Sunday', 'date': '4/12/2020', 'description': 'Annual celebration of the resurrection of Jesus.'},
{'name': 'Mothers Day', 'date': '5/10/2020', 'description': 'Annual celebration of motherhood.'}
]
# Return a JSON response
return flask.jsonify(events)
# Run the application
if __name__ == '__main__':
app.run()
|
<reponame>despo/apply-for-teacher-training<gh_stars>0
require 'rails_helper'
RSpec.describe SupportInterface::OrganisationPermissionsExport do
let(:training_provider) { create(:provider) }
let(:ratifying_provider) { create(:provider) }
let(:provider_relationship_permissions) do
create(:provider_relationship_permissions,
ratifying_provider: ratifying_provider,
training_provider: training_provider)
end
let(:audit_user) { create(:provider_user, providers: [audit_user_provider]) }
let(:audit_user_provider) { create(:provider) }
let(:audit_entry) do
create(
:provider_relationship_permissions_audit,
provider_relationship_permissions: provider_relationship_permissions,
changes: changes,
user: audit_user,
)
end
let(:changes) do
{
'training_provider_can_make_decisions' => [false, true],
'ratifying_provider_can_make_decisions' => [false, true],
'training_provider_can_view_diversity_information' => [false, true],
'ratifying_provider_can_view_diversity_information' => [true, false],
'training_provider_can_view_safeguarding_information' => true,
'ratifying_provider_can_view_safeguarding_information' => false,
}
end
describe '#data_for_export' do
it 'exports permissions changes' do
audit_entry
exported_data = described_class.new.data_for_export
created_at, user_id, username, provider_code, provider_name,
training_provider_code, training_provider_name,
training_provider_permissions_enabled, training_provider_permissions_disabled,
ratifying_provider_code, ratifying_provider_name,
ratifying_provider_permissions_enabled, ratifying_provider_permissions_disabled = exported_data.first.values
expect(created_at.to_s).to eq(audit_entry.created_at.to_s)
expect(user_id).to eq(audit_user.id)
expect(username).to eq(audit_user.full_name)
expect(provider_code).to eq(audit_user_provider.code)
expect(provider_name).to eq(audit_user_provider.name)
expect(training_provider_code).to eq(training_provider.code)
expect(training_provider_name).to eq(training_provider.name)
expect(training_provider_permissions_enabled).to eq('make_decisions, view_diversity_information, view_safeguarding_information')
expect(training_provider_permissions_disabled).to eq('')
expect(ratifying_provider_code).to eq(ratifying_provider.code)
expect(ratifying_provider_name).to eq(ratifying_provider.name)
expect(ratifying_provider_permissions_enabled).to eq('make_decisions')
expect(ratifying_provider_permissions_disabled).to eq('view_diversity_information, view_safeguarding_information')
end
context 'for audit entries made by support users' do
let(:audit_user) { create(:support_user) }
it 'omits provider information' do
audit_entry
exported_data = described_class.new.data_for_export
created_at, user_id, username, provider_code, provider_name,
_training_provider_code, _training_provider_name,
_training_provider_permissions_enabled, _training_provider_permissions_disabled,
_ratifying_provider_code, _ratifying_provider_name,
_ratifying_provider_permissions_enabled, _ratifying_provider_permissions_disabled = exported_data.first.values
expect(created_at.to_s).to eq(audit_entry.created_at.to_s)
expect(user_id).to eq(audit_user.id)
expect(username).to eq("#{audit_user.first_name} #{audit_user.last_name}")
expect(provider_code).to be_nil
expect(provider_name).to be_nil
end
end
end
end
|
<filename>src/main/java/org/vertx/java/core/cluster/spi/hazelcast/HazelcastAsyncMultiMap.java
package org.vertx.java.core.cluster.spi.hazelcast;
import org.vertx.java.core.BlockingAction;
import org.vertx.java.core.CompletionHandler;
import org.vertx.java.core.Deferred;
import org.vertx.java.core.cluster.spi.AsyncMultiMap;
import java.util.Collection;
/**
* @author <a href="http://tfox.org"><NAME></a>
*/
public class HazelcastAsyncMultiMap<K, V> implements AsyncMultiMap<K, V> {
private final com.hazelcast.core.MultiMap<K, V> map;
public HazelcastAsyncMultiMap(com.hazelcast.core.MultiMap<K, V> map) {
this.map = map;
}
@Override
public void put(final K k, final V v, CompletionHandler<Void> completionHandler) {
Deferred<Void> action = new BlockingAction<Void>() {
public Void action() throws Exception {
map.put(k, v);
return null;
}
};
action.handler(completionHandler);
action.execute();
}
@Override
public void get(final K k, CompletionHandler<Collection<V>> completionHandler) {
Deferred<Collection<V>> action = new BlockingAction<Collection<V>>() {
public Collection<V> action() throws Exception {
return map.get(k);
}
};
action.handler(completionHandler);
action.execute();
}
@Override
public void remove(final K k, final V v, CompletionHandler<Boolean> completionHandler) {
Deferred<Boolean> action = new BlockingAction<Boolean>() {
public Boolean action() throws Exception {
return map.remove(k, v);
}
};
action.handler(completionHandler);
action.execute();
}
}
|
#!/firmadyne/sh
/firmadyne/busybox date >> /firmadyne/os_cmd_injection_log
/firmadyne/busybox env >> /firmadyne/os_cmd_injection_log
if [ -e /proc/self/stat ];then
/firmadyne/busybox cat /proc/self/stat >> /firmadyne/os_cmd_injection_log
fi
/firmadyne/busybox echo "" >> /firmadyne/os_cmd_injection_log
|
import Data from './data-service';
let myLocalStorage = {
get(key){
return JSON.parse( localStorage.getItem(key) );
},
save(key){
return new Promise(function (resolve, reject) {
if(!localStorage[key]){
Data.getData('http://starlord.hackerearth.com/simility/movieslisting')
.then( (res) => {
localStorage.setItem(key,res);
resolve(localStorage.getItem(key))
})//.error( (e) => reject(Error(e)) )
}else{
// resolve(myLocalStorage.get('mymovieslist').splice(0,50));
resolve(myLocalStorage.get('mymovieslist') );
}
})
}
}
export default myLocalStorage;
|
python3 apps/main.py
|
<filename>utils/cbvrp_eval.py
# __author__ = 'Hulu_Research'
import csv
import numpy as np
def read_csv(filepath):
"""
read csv file.
:param file_path: the path of the csv file
:return: list of list
"""
reader = csv.reader(open(filepath, 'r'))
data = []
for x in reader:
data.append(x)
return data
def read_csv_to_dict(filepath):
"""
read csv file.
:param file_path: the path of the csv file
:return: list of list
"""
reader = csv.reader(open(filepath, 'r'))
video2gtrank = {}
for x in reader:
video2gtrank[x[0]] = x[1:]
return video2gtrank
def eval_recall(ground, predict, top_k):
"""
Compute the recall metric using in CBVRP-ACMMM-2018 Challenge.
:param ground: A list of indices represent real relevant show (ids) for current show (id).
:param predict: A list of indices represent predicted relevant show (ids) for current show (id).
:param top_k: max top_k = 500
:return: recall, a float.
"""
predict = predict[:top_k]
intersect = [x for x in predict if x in ground]
recall = float(len(intersect)) / len(ground)
return recall
def mean_recall_hit_k(gdir, pdir, top_k):
"""
Compute the mean recall@k & mean hit@k metric over a whole val/test set.
:param gdir: the dir (path) of ground truth file.
:param pdir: the dir (path) of prediction file.
:param top_k: max top_k = 500
:return: mean_recall_k, mean_hit_k, both are floats.
"""
recall_k = 0.0
hit_k = 0
predict_set = read_csv(pdir)
ground_set = read_csv(gdir)
for i in range(len(predict_set)):
predict = [int(x) for x in predict_set[i]]
ground = [int(x) for x in ground_set[i]]
recall = eval_recall(ground[1:], predict[1:], top_k)
recall_k = recall_k + recall
if recall > 0:
hit_k = hit_k + 1
mean_recall_k = float(recall_k) / len(predict_set)
mean_hit_k = float(hit_k) / len(predict_set)
return mean_recall_k, mean_hit_k
def recall_k(gdir, pdir, top_k=[50, 100, 200, 300]):
mean_recall_k_list = []
for recall_k in top_k:
mean_recall_k, _ = mean_recall_hit_k(gdir, pdir, recall_k)
mean_recall_k_list.append(round(mean_recall_k,3))
return mean_recall_k_list
def hit_k(gdir, pdir, top_k=[5, 10, 20, 30]):
mean_hit_k_list = []
for hit_k in top_k:
_, mean_hit_k = mean_recall_hit_k(gdir, pdir, hit_k)
mean_hit_k_list.append(round(mean_hit_k,3))
return mean_hit_k_list
def mean_recall_hit_k_own(gdict, pdict, top_k):
"""
Compute the mean recall@k & mean hit@k metric over a whole val/test set.
:param gdir: the dir (path) of ground truth file.
:param pdir: the dir (path) of prediction file.
:param top_k: max top_k = 500
:return: mean_recall_k, mean_hit_k, both are floats.
"""
recall_k = 0.0
hit_k = 0
assert len(gdict) == len(pdict), '%d != %d' % (len(gdict), len(pdict))
for i, video in enumerate(gdict.keys()):
predict = [int(x) for x in pdict[video]]
ground = [int(x) for x in gdict[video]]
recall = eval_recall(ground, predict, top_k)
recall_k = recall_k + recall
if recall > 0:
hit_k = hit_k + 1
mean_recall_k = float(recall_k) / len(gdict.keys())
mean_hit_k = float(hit_k) / len(gdict.keys())
return mean_recall_k, mean_hit_k
def recall_k_own(gdict, pdict, top_k=[50, 100, 200, 300]):
mean_recall_k_list = []
for recall_k in top_k:
mean_recall_k, _ = mean_recall_hit_k_own(gdict, pdict, recall_k)
mean_recall_k_list.append(mean_recall_k)
return mean_recall_k_list
def hit_k_own(gdict, pdict, top_k=[5, 10, 20, 30]):
mean_hit_k_list = []
for hit_k in top_k:
_, mean_hit_k = mean_recall_hit_k_own(gdict, pdict, hit_k)
mean_hit_k_list.append(mean_hit_k)
return mean_hit_k_list
# Evaluation script example.
if __name__ == "__main__":
track = 'track_1_shows'
fname = 'c3d-pool5'
gdir = './%s/relevance_val.csv'%(track)
pdir = './%s/predict_val_%s.csv'%(track, fname)
print('hit_k rate for %s'%(fname))
mean_hit_k_list = []
for hit_k in [5, 10, 20, 30, 40, 50]:
_, mean_hit_k = mean_recall_hit_k(gdir, pdir, hit_k)
mean_hit_k_list.append(round(mean_hit_k,3))
#print('%.3f' % mean_hit_k)
print(mean_hit_k_list)
print('recall_k rate for %s'%(fname))
mean_recall_k_list = []
for recall_k in [50, 100, 200, 300, 400, 500]:
mean_recall_k, _ = mean_recall_hit_k(gdir, pdir, recall_k)
mean_recall_k_list.append(round(mean_recall_k,3))
#print('%.3f' % mean_recall_k)
print(mean_recall_k_list)
|
#!/bin/sh
CONFIG4J_HOME=../../..
CONFIG4JMS_HOME=$CONFIG4J_HOME/config4jms
CLASSPATH=$CONFIG4J_HOME/lib/config4j.jar:$CLASSPATH
CLASSPATH=$CONFIG4JMS_HOME/samples/:$CLASSPATH
CLASSPATH=$CONFIG4JMS_HOME/lib/config4jms.jar:$CLASSPATH
CLASSPATH=$SONICMQ_HOME/lib/sonic_Client.jar:$CLASSPATH
CLASSPATH=$SONICMQ_HOME/lib/mfcontext.jar:$CLASSPATH
CLASSPATH=$SONICMQ_HOME/lib/gnu-regexp-1.0.6.jar:$CLASSPATH
CLASSPATH=$SONICMQ_HOME/lib/rsa_ssl.jar:$CLASSPATH
export CLASSPATH
java testclient.TestClient $*
|
#!/usr/bin/bash
export OMP_NUM_THREADS=1
export MKL_NUM_THREADS=1
export NUMEXPR_NUM_THREADS=1
export OPENBLAS_NUM_THREADS=1
export VECLIB_MAXIMUM_THREADS=1
if [ -z "$PASSIVE" ]; then
export PASSIVE="1"
fi
function launch {
# apply update
if [ "$(git rev-parse HEAD)" != "$(git rev-parse @{u})" ]; then
git reset --hard @{u} &&
git clean -xdf &&
# Touch all files on release2 after checkout to prevent rebuild
BRANCH=$(git rev-parse --abbrev-ref HEAD)
if [[ "$BRANCH" == "release2" ]]; then
touch **
fi
exec "${BASH_SOURCE[0]}"
fi
# no cpu rationing for now
echo 0-3 > /dev/cpuset/background/cpus
echo 0-3 > /dev/cpuset/system-background/cpus
echo 0-3 > /dev/cpuset/foreground/boost/cpus
echo 0-3 > /dev/cpuset/foreground/cpus
echo 0-3 > /dev/cpuset/android/cpus
# handle pythonpath
ln -s /data/openpilot /data/pythonpath
export PYTHONPATH="$PWD"
# start manager
cd selfdrive
./manager.py
# if broken, keep on screen error
while true; do sleep 1; done
}
launch
|
package info.archinnov.achilles.internal.metadata.holder;
import static org.fest.assertions.api.Assertions.*;
import static org.mockito.Mockito.*;
import info.archinnov.achilles.schemabuilder.Create.Options.ClusteringOrder;
import info.archinnov.achilles.schemabuilder.Create.Options.ClusteringOrder.Sorting;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Answers;
import org.mockito.Mock;
import org.mockito.runners.MockitoJUnitRunner;
import java.util.Arrays;
import java.util.List;
@RunWith(MockitoJUnitRunner.class)
public class PropertyMetaSliceQuerySupportTest {
@Mock(answer = Answers.RETURNS_DEEP_STUBS)
private PropertyMeta meta;
@Mock(answer = Answers.RETURNS_DEEP_STUBS)
private EmbeddedIdProperties embeddedIdProperties;
private PropertyMetaSliceQuerySupport view;
@Before
public void setUp() {
view = new PropertyMetaSliceQuerySupport(meta);
when(meta.getEmbeddedIdProperties()).thenReturn(embeddedIdProperties);
when(meta.getEntityClassName()).thenReturn("entity");
}
@Test
public void should_get_partition_key_names() throws Exception {
//Given
when(embeddedIdProperties.getPartitionComponents().getCQL3ComponentNames()).thenReturn(Arrays.asList("id","date", "type"));
//When
final List<String> partitionKeysName = view.getPartitionKeysName(2);
//Then
assertThat(partitionKeysName).containsExactly("id", "date");
}
@Test
public void should_get_last_partition_key_name() throws Exception {
//Given
when(embeddedIdProperties.getPartitionComponents().getCQL3ComponentNames()).thenReturn(Arrays.asList("id","date", "type"));
//When
final String lastPartitionKeyName = view.getLastPartitionKeyName();
//Then
assertThat(lastPartitionKeyName).isEqualTo("type");
}
@Test
public void should_get_clustering_key_names() throws Exception {
//Given
when(embeddedIdProperties.getClusteringComponents().getCQL3ComponentNames()).thenReturn(Arrays.asList("id","date", "type"));
//When
final List<String> clusteringKeysName = view.getClusteringKeysName(2);
//Then
assertThat(clusteringKeysName).containsExactly("id", "date");
}
@Test
public void should_get_last_clustering_key_name() throws Exception {
//Given
when(embeddedIdProperties.getClusteringComponents().getCQL3ComponentNames()).thenReturn(Arrays.asList("id","date", "type"));
//When
final String lastClusteringKeyName = view.getLastClusteringKeyName();
//Then
assertThat(lastClusteringKeyName).isEqualTo("type");
}
@Test
public void should_validate_partition_components() throws Exception {
//Given
final Object[] partitionComponents = {10L, "DuyHai"};
//When
view.validatePartitionComponents(partitionComponents);
//Then
verify(meta.getEmbeddedIdProperties().getPartitionComponents()).validatePartitionComponents("entity", partitionComponents);
}
@Test
public void should_validate_partition_components_IN() throws Exception {
//Given
final Object[] partitionComponentsIN = {"Paul", "DuyHai"};
//When
view.validatePartitionComponentsIn(partitionComponentsIN);
//Then
verify(meta.getEmbeddedIdProperties().getPartitionComponents()).validatePartitionComponentsIn("entity", partitionComponentsIN);
}
@Test
public void should_validate_clustering_components() throws Exception {
//Given
final Object[] clusteringComponents = {10L, "DuyHai"};
//When
view.validateClusteringComponents(clusteringComponents);
//Then
verify(meta.getEmbeddedIdProperties().getClusteringComponents()).validateClusteringComponents("entity", clusteringComponents);
}
@Test
public void should_validate_clustering_components_IN() throws Exception {
//Given
final Object[] clusteringComponentsIN = {"Paul", "DuyHai"};
//When
view.validateClusteringComponentsIn(clusteringComponentsIN);
//Then
verify(meta.getEmbeddedIdProperties().getClusteringComponents()).validateClusteringComponentsIn("entity", clusteringComponentsIN);
}
@Test
public void should_get_clustering_order() throws Exception {
//Given
ClusteringOrder clusteringOrder = new ClusteringOrder("column", Sorting.DESC);
when(meta.structure().isClustered()).thenReturn(true);
when(meta.getEmbeddedIdProperties().getClusteringComponents().getClusteringOrders()).thenReturn(Arrays.asList(clusteringOrder));
//When
final ClusteringOrder actual = view.getClusteringOrder();
//Then
assertThat(actual).isSameAs(clusteringOrder);
}
}
|
import { routeName } from '../routeName';
import { TestCaseImpl } from '../testCaseImpl';
export const cases = [
new TestCaseImpl({ toState: routeName('app'), fromState: routeName(null) }, ['app']),
new TestCaseImpl({ toState: routeName('app.home'), fromState: routeName(null) }, ['app', 'app.home']),
new TestCaseImpl({ toState: routeName('app.home.users'), fromState: routeName('app.home.cars') }, ['app.home.users']),
new TestCaseImpl({ toState: routeName('app.home.users'), fromState: routeName('app') }, [
'app.home',
'app.home.users',
]),
];
|
def matrix_product(matrix1, matrix2):
result_matrix = [[0 for y in range(len(matrix2[0]))] for x in range(len(matrix1))]
for i in range(len(matrix1)):
# iterate through columns of Y
for j in range(len(matrix2[0])):
# iterate through rows of Y
for k in range(len(matrix2)):
result_matrix[i][j] += matrix1[i][k] * matrix2[k][j]
return result_matrix
|
# 删除编译缓存文件
rm -rf build && rm -rf dist && rm -rf HN_Rtsp2Hls.spec && pyinstaller -F HN_Rtsp2Hls.py
|
<gh_stars>0
var tape = require("tape"),
storage = require("..");
tape("storage#get(key) should return value of key", function(assert) {
storage.set("key", "value");
assert.equal(storage.get("key"), "value");
storage.remove("key");
assert.end();
});
tape("storage#set(key, value) should set the value of key", function(assert) {
storage.set("key", "value");
assert.equal(storage.get("key"), "value");
storage.remove("key");
assert.end();
});
tape("storage#remove(key) should remove the value of key", function(assert) {
storage.set("key", "value");
storage.remove("key");
assert.equal(storage.get("key"), undefined);
assert.end();
});
tape("storage#clear() should clear all values", function(assert) {
storage.set("key", "value");
storage.clear();
assert.equal(storage.get("key"), undefined);
assert.end();
});
|
/*
* Copyright 2019 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package eu.doppel_helix.papertrail.papertrailprofileranalysis;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
public class StackTraceSummarizer {
private StackTraceSummarizer() {
}
public static StackTraceElementNode summarize(PapertrailParser pp) {
StackTraceElementNode rootNode = new StackTraceElementNode();
rootNode.setLocation("<>");
rootNode.setCount(0);
rootNode.setTotal(0);
if (pp == null) {
return rootNode;
}
int total = 0;
for (StackTrace st : pp.getStackTraces()) {
total += st.getCount();
}
rootNode.setCount(total);
rootNode.setTotal(total);
for (StackTrace st : pp.getStackTraces()) {
ArrayList<String> elements = new ArrayList<>(st.getTraceElements());
Collections.reverse(elements);
addTrace(rootNode, elements, st.getCount(), total);
}
rootNode.sortChildren((s1, s2) -> Long.signum(s2.getCount() - s1.getCount()), true);
return rootNode;
}
private static void addTrace(StackTraceElementNode parent, List<String> remainingTrace, long count, long total) {
String currentLocation = remainingTrace.get(0);
StackTraceElementNode currentNode = null;
for (StackTraceElementNode child : parent.getChildren()) {
if (child.getLocation().equals(currentLocation)) {
currentNode = child;
break;
}
}
if (currentNode == null) {
currentNode = new StackTraceElementNode();
currentNode.setLocation(currentLocation);
currentNode.setTotal(total);
parent.add(currentNode);
}
currentNode.setCount(currentNode.getCount() + count);
if (remainingTrace.size() > 1) {
addTrace(currentNode, remainingTrace.subList(1, remainingTrace.size()), count, total);
}
}
}
|
<gh_stars>0
import { TestBed, inject } from '@angular/core/testing';
import { HttpClientModule } from '@angular/common/http';
import { HttpModule } from '@angular/http';
import { HttpClientInterceptor } from './http-client-interceptor.service';
import { ErrorModalService } from 'app/services/errorModal/error-modal.service';
import { RequestInProgressService } from 'app/services/requestInProgress/request-in-progress.service';
describe('HttpExtensionService', () => {
beforeEach(() => {
TestBed.configureTestingModule({
imports: [HttpClientModule, HttpModule],
providers: [HttpClientInterceptor, ErrorModalService, RequestInProgressService]
});
});
it('should be created', inject([HttpClientInterceptor], (service: HttpClientInterceptor) => {
expect(service).toBeTruthy();
}));
});
|
#!/bin/bash -f
#*********************************************************************************************************
# Vivado (TM) v2018.2 (64-bit)
#
# Filename : ddr3_ctrl.sh
# Simulator : Mentor Graphics Questa Advanced Simulator
# Description : Simulation script for compiling, elaborating and verifying the project source files.
# The script will automatically create the design libraries sub-directories in the run
# directory, add the library logical mappings in the simulator setup file, create default
# 'do/prj' file, execute compilation, elaboration and simulation steps.
#
# Generated by Vivado on Sun Feb 02 10:08:19 +0800 2020
# SW Build 2258646 on Thu Jun 14 20:03:12 MDT 2018
#
# Copyright 1986-2018 Xilinx, Inc. All Rights Reserved.
#
# usage: ddr3_ctrl.sh [-help]
# usage: ddr3_ctrl.sh [-lib_map_path]
# usage: ddr3_ctrl.sh [-noclean_files]
# usage: ddr3_ctrl.sh [-reset_run]
#
# Prerequisite:- To compile and run simulation, you must compile the Xilinx simulation libraries using the
# 'compile_simlib' TCL command. For more information about this command, run 'compile_simlib -help' in the
# Vivado Tcl Shell. Once the libraries have been compiled successfully, specify the -lib_map_path switch
# that points to these libraries and rerun export_simulation. For more information about this switch please
# type 'export_simulation -help' in the Tcl shell.
#
# You can also point to the simulation libraries by either replacing the <SPECIFY_COMPILED_LIB_PATH> in this
# script with the compiled library directory path or specify this path with the '-lib_map_path' switch when
# executing this script. Please type 'ddr3_ctrl.sh -help' for more information.
#
# Additional references - 'Xilinx Vivado Design Suite User Guide:Logic simulation (UG900)'
#
#*********************************************************************************************************
# Script info
echo -e "ddr3_ctrl.sh - Script generated by export_simulation (Vivado v2018.2 (64-bit)-id)\n"
# Main steps
run()
{
check_args $# $1
setup $1 $2
compile
elaborate
simulate
}
# RUN_STEP: <compile>
compile()
{
# Compile design files
source compile.do 2>&1 | tee -a compile.log
}
# RUN_STEP: <elaborate>
elaborate()
{
source elaborate.do 2>&1 | tee -a elaborate.log
}
# RUN_STEP: <simulate>
simulate()
{
vsim -64 -c -do "do {simulate.do}" -l simulate.log
}
# STEP: setup
setup()
{
case $1 in
"-lib_map_path" )
if [[ ($2 == "") ]]; then
echo -e "ERROR: Simulation library directory path not specified (type \"./ddr3_ctrl.sh -help\" for more information)\n"
exit 1
fi
copy_setup_file $2
;;
"-reset_run" )
reset_run
echo -e "INFO: Simulation run files deleted.\n"
exit 0
;;
"-noclean_files" )
# do not remove previous data
;;
* )
copy_setup_file $2
esac
create_lib_dir
# Add any setup/initialization commands here:-
# <user specific commands>
}
# Copy modelsim.ini file
copy_setup_file()
{
file="modelsim.ini"
if [[ ($1 != "") ]]; then
lib_map_path="$1"
else
lib_map_path="E:/Exercise/FPGA/v3edu/test03_ddr_rd_ctrl/design2/ddr3_hdmi.cache/compile_simlib/questa"
fi
if [[ ($lib_map_path != "") ]]; then
src_file="$lib_map_path/$file"
cp $src_file .
fi
}
# Create design library directory
create_lib_dir()
{
lib_dir="questa_lib"
if [[ -e $lib_dir ]]; then
rm -rf $lib_dir
fi
mkdir $lib_dir
}
# Delete generated data from the previous run
reset_run()
{
files_to_remove=(compile.log elaborate.log simulate.log vsim.wlf questa_lib)
for (( i=0; i<${#files_to_remove[*]}; i++ )); do
file="${files_to_remove[i]}"
if [[ -e $file ]]; then
rm -rf $file
fi
done
create_lib_dir
}
# Check command line arguments
check_args()
{
if [[ ($1 == 1 ) && ($2 != "-lib_map_path" && $2 != "-noclean_files" && $2 != "-reset_run" && $2 != "-help" && $2 != "-h") ]]; then
echo -e "ERROR: Unknown option specified '$2' (type \"./ddr3_ctrl.sh -help\" for more information)\n"
exit 1
fi
if [[ ($2 == "-help" || $2 == "-h") ]]; then
usage
fi
}
# Script usage
usage()
{
msg="Usage: ddr3_ctrl.sh [-help]\n\
Usage: ddr3_ctrl.sh [-lib_map_path]\n\
Usage: ddr3_ctrl.sh [-reset_run]\n\
Usage: ddr3_ctrl.sh [-noclean_files]\n\n\
[-help] -- Print help information for this script\n\n\
[-lib_map_path <path>] -- Compiled simulation library directory path. The simulation library is compiled\n\
using the compile_simlib tcl command. Please see 'compile_simlib -help' for more information.\n\n\
[-reset_run] -- Recreate simulator setup files and library mappings for a clean run. The generated files\n\
from the previous run will be removed. If you don't want to remove the simulator generated files, use the\n\
-noclean_files switch.\n\n\
[-noclean_files] -- Reset previous run, but do not remove simulator generated files from the previous run.\n\n"
echo -e $msg
exit 1
}
# Launch script
run $1 $2
|
#!/bin/bash
function zigbee2mqtt-show-short-info {
echo "Setup for Zigbee2mqtt bridge."
}
function zigbee2mqtt-show-long-info {
echo "This script installs the Zigbee2mqtt bridge"
}
function zigbee2mqtt-show-copyright-info {
echo "Original concept by Landrash <https://github.com/landrash>."
}
function zigbee2mqtt-install-package {
echo -n "Installing dependencies : "
node=$(which npm)
if [ -z "${node}" ]; then #Installing NodeJS if not already installed.
printf "Downloading and installing NodeJS...\\n"
curl -sL https://deb.nodesource.com/setup_8.x | bash -
apt install -y nodejs
fi
echo "Cloning Zigbee2mqtt git repository"
git clone https://github.com/Koenkk/zigbee2mqtt.git /opt/zigbee2mqtt
chown -R pi:pi /opt/zigbee2mqtt
echo "Running install. This might take a while and can produce som expected errors"
cd /opt/zigbee2mqtt || exit
su pi -c "npm install"
echo "Creating service file zigbee2mqtt.service"
service_path="/etc/systemd/system/zigbee2mqtt.service"
echo "[Unit]
Description=zigbee2mqtt
After=network.target
[Service]
ExecStart=/usr/bin/npm start
WorkingDirectory=/opt/zigbee2mqtt
StandardOutput=inherit
StandardError=inherit
Restart=always
User=pi
[Install]
WantedBy=multi-user.target" > $service_path
echo "Checking the installation..."
if [ ! -f /opt/zigbee2mqtt/data/configuration.yaml ]; then
validation=""
else
validation="ok"
fi
if [ ! -z "${validation}" ]; then
echo
echo -e "\\e[32mInstallation done..\\e[0m"
echo -e "Update of configuration.yaml is required found at /opt/zigbee2mqtt/data/"
echo -e "Some further configuration is required and details can be found here https://github.com/Koenkk/zigbee2mqtt/wiki/Running-the-bridge "
echo
echo -e "Service can be started after configuration but running sudo systemctl start zigbee2mqtt"
echo
else
echo
echo -e "\\e[31mInstallation failed..."
echo
return 1
fi
return 0
}
[[ "$_" == "$0" ]] && echo "hassbian-config helper script; do not run directly, use hassbian-config instead"
|
"""
File : MSUnmergedRSE.py
Description: Provides a document Template for the MSUnmerged MicroServices
"""
class MSUnmergedRSE(dict):
"""
A minimal RSE information representation to serve the needs
of the MSUnmerged Micro Service.
"""
def __init__(self, rseName, **kwargs):
super(MSUnmergedRSE, self).__init__(**kwargs)
# NOTE: totalNumFiles reflects the total number of files at the RSE as
# fetched from the Rucio Consistency Monitor. Once the relevant
# protected paths have been filtered out and the path been cut to the
# proper depth (as provided by the WMStats Protected LFNs interface),
# then the final number (but on a directory level rather than on
# files granularity level) will be put in the counter 'toDelete'
self.allUnmerged = []
myDoc = {
"name": rseName,
"delInterface": "",
"isClean": False,
"counters": {"totalNumFiles": 0,
"toDelete": 0,
"deletedSuccess": 0,
"deletedFail": 0},
"files": {"allUnmerged": set(),
"toDelete": set(),
"protected": set(),
"deletedSuccess": [],
"deletedFail": []},
"dirs": {"allUnmerged": [],
"protected": [],
"nonEmpty": [],
"empty": []}}
self.update(myDoc)
|
use std::error::Error;
// Define the RTDError struct
pub struct RTDError {
message: String,
}
impl RTDError {
// Implement the message method for RTDError
pub fn message(&self) -> &String {
&self.message
}
}
// Define the RTDErrorBuilder struct
pub struct RTDErrorBuilder {
inner: RTDError,
}
impl RTDErrorBuilder {
// Implement the build method for RTDErrorBuilder
pub fn build(&self) -> RTDError {
self.inner.clone()
}
}
|
curl "http://localhost:8080/v1/views/myorg/bands/composite_view?rev=3"
|
<gh_stars>0
package com.stylefeng.guns.rest.modular.cinema.vo;
import com.stylefeng.guns.api.cinema.vo.AreaVO;
import com.stylefeng.guns.api.cinema.vo.BrandVO;
import com.stylefeng.guns.api.cinema.vo.HallTypeVO;
import java.util.List;
import lombok.Data;
/**
* Created by xianpeng.xia
* on 2020/7/1 12:11 上午
*/
@Data
public class CinemalConditionResponseVO {
List<BrandVO> brands;
List<AreaVO> areas;
List<HallTypeVO> hallTypes;
}
|
#!/bin/sh
# Start DOSBox, start gretty server
dosbox &
sleep 2
gradle appRun
|
<filename>src/common/server.ts
declare class AirConsole {
broadcast(data: any): void;
message(device_id: number, data: any): void;
onMessage(device_id: number, data: any): void;
onConnect: (device_id: number) => void;
onDisconnect: (device_id: number) => void;
onDeviceStateChange: (device_id: number, user_data: any) => void;
onCustomDeviceStateChange: (device_id: number, user_data: any) => void;
setActivePlayers(max_players: number): void;
}
import {
TransactionTypeInterface,
ControllerData,
TransactionType,
ServerState,
PlayerData,
ObjectData,
ServerData,
PlayerUpdateData,
PlayerState
} from "./index";
import { ConnectedDevice, getDevice, getAllDevices } from "./connectedDevice";
import { EventListener } from "./eventListener";
import { GameState } from "./server/gameState";
import { GameStateJoin } from "./server/gameStateJoin";
const eventListener = EventListener.get();
export const OBJECTDATAMAXHEALTH: number = 10;
export class Server {
airConsole: AirConsole;
serverData: ServerData;
playerData: PlayerData[] = [];
objectData: ObjectData[] = [];
sendPlayerToClient: any;
gameState: GameState;
constructor() {
this.airConsole = new AirConsole();
this.serverData = new ServerData(30, ServerState.initial);
this.subscribeToAirConsole();
this.gameState = new GameStateJoin(this, 3000);
eventListener.on('newGameState', (state) => {
this.gameState = state;
})
this.initTick();
this.initMessageHandler();
}
initTick () {
let time = performance.now();
setInterval(() => {
const newTime = performance.now();
const delta = newTime - time;
time = newTime;
this.gameState.tick(delta);
}, 1000 / 60);
}
private initMessageHandler() {
this.airConsole.onMessage = (from: number, data: any) => {
if (data) {
if(from !== 0){
const event = 'CLIENT_'+data.action;
data.data.from = from;
eventListener.trigger(event as any, data.data);
} else {
// IDK
}
}
};
}
subscribeToAirConsole() {
this.airConsole.onConnect = (id) => {
};
this.airConsole.onDisconnect = (id) => {
getDevice(id).disconnect();
};
this.airConsole.onDeviceStateChange = (id, data) => {
try {
getDevice(id).updateState(data)
} catch(e) {
const newDevice = new ConnectedDevice(id);
newDevice.updateState(data);
}
};
this.airConsole.onCustomDeviceStateChange = (id, data) => {
try {
getDevice(id).updateCustomState(data)
} catch(e) {
const newDevice = new ConnectedDevice(id);
newDevice.updateCustomState(data);
}
};
}
}
|
// 10974. 모든 순열
// 2019.05.22
// 수학
#include<iostream>
using namespace std;
int arr[9];
int visit[9];
int n;
// 브루트 포스 방법으로 순열 구하기
void Permutation(int cnt)
{
if(cnt==n)
{
for(int i=0;i<n;i++)
{
printf("%d ",arr[i]);
}
printf("\n");
return;
}
for(int i=0;i<n;i++)
{
if(!visit[i])
{
visit[i]=1;
arr[cnt]=i+1;
Permutation(cnt+1);
visit[i]=0;
}
}
}
int main()
{
cin>>n;
Permutation(0);
return 0;
}
|
package main
func balancedStringSplit(s string) int {
count, num := 0, 0
for i := 0; i < len(s); i++ {
if string(s[i]) == "L" {
num += 1
} else {
num -= 1
}
if num == 0 {
count++
}
}
return count
}
|
const mysql = require("mysql");
module.exports = {
connect: function (host, user, password, database) {
const conn = mysql.createConnection({
host,
user,
password,
database,
});
conn.connect();
return conn;
},
tables: function (conn, done) {
conn.query(`SHOW TABLES;`, (err, results) => {
if (err) return done(err);
done(
null,
results.map((r) => Object.values(r)).reduce((p, c) => [...p, ...c], [])
);
});
},
records: function (conn, table, done) {
conn.query(`SELECT * FROM ${table};`, (err, results, fields) => {
if (err) return done(err);
done(null, [results.map((r) => ({ ...r })), fields.map((f) => f.name)]);
});
},
end: function (conn) {
conn.end();
},
};
|
#!/bin/bash
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
MXNET_HOME=${PWD}
export PERL5LIB=${MXNET_HOME}/perl5/lib/perl5
cd ${MXNET_HOME}/perl-package/AI-MXNetCAPI/
perl Makefile.PL INSTALL_BASE=${MXNET_HOME}/perl5
make install || exit -1
cd ${MXNET_HOME}/perl-package/AI-NNVMCAPI/
perl Makefile.PL INSTALL_BASE=${MXNET_HOME}/perl5
make install || exit -1
cd ${MXNET_HOME}/perl-package/AI-MXNet/
perl Makefile.PL INSTALL_BASE=${MXNET_HOME}/perl5
make test || exit -1
|
<gh_stars>1-10
/**
* @file
*
* @brief Utilities for querying and manipulating object events.
*
* @since 1.0.0
*
* @copyright 2021 the libaermre authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef AER_EVENT_H
#define AER_EVENT_H
#include <stdbool.h>
#include "aer/instance.h"
/* ----- PUBLIC TYPES ----- */
/**
* @brief Semi-opaque type for an object event.
*
* For more information about how to use this object see @ref ObjListeners.
*
* @since 1.0.0
*/
typedef struct AEREvent {
/**
* @var handle
*
* @brief Handle the current event.
*
* This function represents the next event listener attached to the current
* object event.
*
* @param[in] event `next` event context.
* @param[in] target Target instance passed to the currently executing
* listener.
* @param[in] other Other instance passed to the currently executing
* listener.
*
* @return Whether or not the event was handled.
*
* @since 1.0.0
*
* @memberof AEREvent
*/
bool (*handle)(struct AEREvent* event,
AERInstance* target,
AERInstance* other);
/**
* @var next
*
* @brief Event context for the `handle` event listener.
*
* @since 1.4.0
*
* @memberof AEREvent
*/
struct AEREvent* next;
} AEREvent;
#endif /* AER_EVENT_H */
|
<reponame>tdm1223/Algorithm<filename>acmicpc.net/source/5565.cpp
// 5565. 영수증
// 2019.05.21
// 구현
#include<iostream>
using namespace std;
int main()
{
int a, b;
cin >> a;
// 총 가격에서 9개의 가격을 뺀게 정답
for (int i = 0; i < 9; i++)
{
cin >> b;
a -= b;
}
cout << a << endl;
return 0;
}
|
if [[ -n "${TRAVIS:-}" ]]; then
echo -e "travis_fold:start:$0\033[33;1m$0\033[0m"
fi
|
def find_sum(arr):
primes = []
for num in arr:
if is_prime(num): # Function to check if number is prime
primes.append(num)
return sum(primes)
prime_sum = find_sum(arr)
print('The sum of all prime numbers in the array is: ', prime_sum)
|
<gh_stars>0
/*
* Copyright 2014-2020 The Ideal Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style
* license that can be found in the LICENSE file or at
* https://developers.google.com/open-source/licenses/bsd
*/
package ideal.development.extensions;
import ideal.library.elements.*;
import ideal.library.texts.*;
import ideal.runtime.elements.*;
import ideal.runtime.texts.*;
import ideal.development.elements.*;
import ideal.development.components.*;
import ideal.development.names.*;
import ideal.development.actions.*;
import ideal.development.constructs.*;
import ideal.development.values.*;
import ideal.development.analyzers.*;
public class for_construct extends extension_construct {
public construct init;
public construct condition;
public construct update;
public construct body;
public for_construct(construct init, construct condition, construct update, construct body,
position pos) {
super(pos);
this.init = init;
this.condition = condition;
this.update = update;
this.body = body;
}
@Override
public readonly_list<construct> children() {
list<construct> result = new base_list<construct>();
result.append(init);
result.append(condition);
result.append(update);
result.append(body);
return result;
}
@Override
public analyzable to_analyzable() {
position pos = this;
analyzable body_and_update = new statement_list_analyzer(
new base_list<analyzable>(base_analyzer.make(body), base_analyzer.make(update)), pos);
analyzable break_statement = new jump_analyzer(jump_type.BREAK_JUMP, pos);
analyzable if_statement = new conditional_analyzer(base_analyzer.make(condition),
body_and_update, break_statement, pos);
analyzable loop_statement = new loop_analyzer(if_statement, pos);
return new block_analyzer(new statement_list_analyzer(
new base_list<analyzable>(base_analyzer.make(init), loop_statement), pos), pos);
}
@Override
public text_fragment print(printer p) {
list<text_fragment> fragments = new base_list<text_fragment>();
fragments.append(p.print_word(keyword.FOR));
fragments.append(p.print_space());
list<text_fragment> expressions = new base_list<text_fragment>();
expressions.append(p.print(init));
expressions.append(p.print_punctuation(punctuation.SEMICOLON));
expressions.append(p.print_space());
expressions.append(p.print(condition));
expressions.append(p.print_punctuation(punctuation.SEMICOLON));
expressions.append(p.print_space());
// TODO: this is a hack; fix
if (!(update instanceof empty_construct)) {
expressions.append(p.print(update));
}
fragments.append(p.print_grouping_in_statement(text_util.join(expressions)));
fragments.append(p.print_indented_statement(body));
return text_util.join(fragments);
}
@Override
public boolean is_terminated() {
return true;
}
@Override
public construct transform(transformer the_transformer) {
return new for_construct(the_transformer.transform(init),
the_transformer.transform(condition),
the_transformer.transform(update),
the_transformer.transform(body),
this);
}
}
|
#! /bin/bash
#SBATCH -o /home/hpc/pr63so/di69fol/workspace/SWEET_2016_01_16/benchmarks_performance/rexi_tests/2016_01_06_scalability_rexi_fd/run_rexi_fd_par_m4096_t004_n0128_r0448_a1.txt
###SBATCH -e /home/hpc/pr63so/di69fol/workspace/SWEET_2016_01_16/benchmarks_performance/rexi_tests/2016_01_06_scalability_rexi_fd/run_rexi_fd_par_m4096_t004_n0128_r0448_a1.err
#SBATCH -J rexi_fd_par_m4096_t004_n0128_r0448_a1
#SBATCH --get-user-env
#SBATCH --clusters=mpp2
#SBATCH --ntasks=448
#SBATCH --cpus-per-task=4
#SBATCH --exclusive
#SBATCH --export=NONE
#SBATCH --time=00:05:00
#declare -x NUMA_BLOCK_ALLOC_VERBOSITY=1
declare -x KMP_AFFINITY="granularity=thread,compact,1,0"
declare -x OMP_NUM_THREADS=4
echo "OMP_NUM_THREADS=$OMP_NUM_THREADS"
echo
. /etc/profile.d/modules.sh
module unload gcc
module unload fftw
module unload python
module load python/2.7_anaconda_nompi
module unload intel
module load intel/16.0
module unload mpi.intel
module load mpi.intel/5.1
module load gcc/5
cd /home/hpc/pr63so/di69fol/workspace/SWEET_2016_01_16/benchmarks_performance/rexi_tests/2016_01_06_scalability_rexi_fd
cd ../../../
. local_software/env_vars.sh
# force to use FFTW WISDOM data
declare -x SWEET_FFTW_LOAD_WISDOM_FROM_FILE="FFTW_WISDOM_nofreq_T0"
time -p mpiexec.hydra -genv OMP_NUM_THREADS 4 -envall -ppn 7 -n 448 ./build/rexi_fd_par_m_tno_a1 --initial-freq-x-mul=2.0 --initial-freq-y-mul=1.0 -f 1 -g 1 -H 1 -X 1 -Y 1 --compute-error 1 -t 50 -R 4 -C 0.3 -N 128 -U 0 -S 0 --use-specdiff-for-complex-array 0 --rexi-h 0.8 --timestepping-mode 1 --staggering 0 --rexi-m=4096 -C -5.0
|
#!/bin/bash
#SBATCH --account=def-dkulic
#SBATCH --mem=8000M # memory per node
#SBATCH --time=23:00:00 # time (DD-HH:MM)
#SBATCH --output=/project/6001934/lingheng/Double_DDPG_Job_output/continuous_RoboschoolWalker2d-v1_ddpg_softcopy_action_noise_seed1_run9_%N-%j.out # %N for node name, %j for jobID
module load qt/5.9.6 python/3.6.3 nixpkgs/16.09 gcc/7.3.0 boost/1.68.0 cuda cudnn
source ~/tf_cpu/bin/activate
python ./ddpg_discrete_action.py --env RoboschoolWalker2d-v1 --random-seed 1 --exploration-strategy action_noise --summary-dir ../Double_DDPG_Results_no_monitor/continuous/RoboschoolWalker2d-v1/ddpg_softcopy_action_noise_seed1_run9 --continuous-act-space-flag --double-ddpg-flag
|
<reponame>srproj-will-rename/buddy
package carrot
import (
"crypto/rand"
"errors"
"fmt"
log "github.com/sirupsen/logrus"
"io"
"strings"
)
// InSlice returns whether a string exists in a collection of items.
func InSlice(str string, items []string) bool {
for _, item := range items {
if item == str {
return true
}
}
return false
}
// generateUUID generates a UUID fulfilling RFC 4122 used for assigning tokens to devices.
func generateUUID() (string, error) {
uuid := make([]byte, 16)
n, err := io.ReadFull(rand.Reader, uuid)
if n != len(uuid) || err != nil {
return "", err
}
// variant bits
uuid[8] = uuid[8]&^0xc0 | 0x80
// version 4
uuid[6] = uuid[6]&^0xf0 | 0x40
return strings.ToUpper(fmt.Sprintf("%x-%x-%x-%x-%x", uuid[0:4], uuid[4:6], uuid[6:8], uuid[8:10], uuid[10:])), nil
}
// offsetSub performs offset subtraction in the form of a - b.
func offsetSub(a *offset, b *offset) *offset {
return &offset{
X: a.X - b.X,
Y: a.Y - b.Y,
Z: a.Z - b.Z,
}
}
// getE_P performs calculations to determine the event placement in a recipient's response.
func getE_P(currentSession *Session, offset *offset) (*offset, error) {
var err error
if currentSession.T_L == nil || currentSession.T_P == nil {
err = errors.New("The session did not complete the Picnic Protocol handshake")
return nil, err
}
primaryT_P := currentSession.T_P
log.Infof("t_p: x: %v y: %v z: %v", primaryT_P.X, primaryT_P.Y, primaryT_P.Z)
currentT_L := currentSession.T_L
log.Infof("t_l: x: %v y: %v z: %v", currentT_L.X, currentT_L.Y, currentT_L.Z)
// parameter offset is the e_l
// o_p = t_l - t_p`
// e_p = e_l - o_p
o_p := offsetSub(currentT_L, primaryT_P)
log.Infof("o_p: x: %v y: %v z: %v", o_p.X, o_p.Y, o_p.Z)
e_p := offsetSub(offset, o_p)
log.Infof("e_p: x: %v y: %v z: %v", e_p.X, e_p.Y, e_p.Z)
log.Infof("e_l: x: %v y: %v z: %v", offset.X, offset.Y, offset.Z)
return e_p, err
}
|
#!/bin/bash
# Set bash to 'debug' mode, it will exit on :
# -e 'error', -u 'undefined variable', -o ... 'error in pipeline', -x 'print commands',
set -e
set -u
set -o pipefail
train_set="train_960"
valid_set="dev"
test_sets="test_clean test_other dev_clean dev_other"
asr_config=conf/tuning/train_asr_conformer6_n_fft512_hop_length256.yaml
lm_config=conf/tuning/train_lm_transformer2.yaml
inference_config=conf/decode_asr.yaml
./asr.sh \
--audio_format flac.ark \
--lang en \
--ngpu 16 \
--nbpe 5000 \
--max_wav_duration 30 \
--speed_perturb_factors "0.9 1.0 1.1" \
--asr_config "${asr_config}" \
--lm_config "${lm_config}" \
--inference_config "${inference_config}" \
--train_set "${train_set}" \
--valid_set "${valid_set}" \
--test_sets "${test_sets}" \
--lm_train_text "data/${train_set}/text data/local/other_text/text" \
--bpe_train_text "data/${train_set}/text" "$@"
|
echo "\033[32mbuild managed plugins\033[m"
cd `dirname $0`
dotnet.exe build -c release -o ../lib/
|
import React, { forwardRef, useMemo } from 'react'
import Picklist from 'core/components/Picklist'
import { projectAs } from 'utils/fp'
import useDataLoader from 'core/hooks/useDataLoader'
import PropTypes from 'prop-types'
import { serviceAccountActions } from 'k8s/components/prometheus/actions'
// We need to use `forwardRef` as a workaround of an issue with material-ui Tooltip https://github.com/gregnb/mui-datatables/issues/595
const ServiceAccountPicklist = forwardRef(({ loading, clusterId, namespace, ...rest }, ref) => {
const [services, servicesLoading] = useDataLoader(serviceAccountActions.list, {
clusterId,
namespace,
})
const options = useMemo(() =>
projectAs({ label: 'name', value: 'name' }, services),
[services])
return <Picklist
{...rest}
ref={ref}
loading={loading || servicesLoading}
options={options}
/>
})
ServiceAccountPicklist.propTypes = {
...Picklist.propTypes,
name: PropTypes.string,
label: PropTypes.string,
clusterId: PropTypes.oneOfType([PropTypes.number, PropTypes.string]),
namespace: PropTypes.oneOfType([PropTypes.number, PropTypes.string]),
}
ServiceAccountPicklist.defaultProps = {
...Picklist.defaultProps,
name: 'serviceId',
label: 'Current Service',
}
export default ServiceAccountPicklist
|
package com.leidos.dataparser.data.map;
import com.leidos.dataparser.io.formatting.Output;
import com.leidos.dataparser.io.formatting.OutputData;
import java.util.Vector;
/**
* This class represents an FHWA-formatted MAP (GID) message that has been received by the ASD OBU. It can parse the message
* content and deliver its various data elements.
*
* @author starkj
*
*/
public class MapMessage implements OutputData {
public enum LaneDirection {
APPROACH,
EGRESS,
BARRIER
};
public MapMessage() {
lane_ = new Vector<Lane>();
}
/**
* buf length <= 4 : false (4 bytes is just the msg header). Otherwise...
* any data element doesn't fit the FHWA spec* : false
* all data elements have plausible values : true (and elements stored internally for future retrieval)
*
* Caution: this must be the first method called on any instance of this class. Until it is parsed, which validates
* the content, none of the other methods will be meaningful. And to maximize speed of execution, none of them
* attempt to verify that the parsing has first been performed. It is up to the user to control this interaction.
* If parse() returns false, then do not call any of the remaining methods on that object.
*
* *FHWA spec used is "Inteface Control Document for the Signal Phase and Timing and Related Messages for V-I Applications",
* contract no. DTFH61-06-D-00007 by Battelle & Texas A&M University, doc no. 60606-18A Final, dated June 2013, section 3.2.2.
*
* @param buf - buffer of raw input stream coming from the ASD hardware, which may contain a useful message
*/
public boolean parse(byte[] buf) {
boolean failure = false;
int pos = 1; //read position in the buffer (first byte is pos 0, which indicates MAP message type)
Lane curLane = null; //the lane object currently being populated
LaneDirection laneType = LaneDirection.BARRIER;
//store message content version
contentVersion_ = (int)buf[pos++];
//store overall payload length, and initialize the content length accumulator, which will ensure we don't read past the end of message
int payloadLength = get2Bytes(buf, pos);
pos += 2;
int accumulator = 0;
//begin loop on message objects, while no content failure and we haven't read the full length of the payload
while (!failure && accumulator < payloadLength) {
//get the object identifier and the length of the object (next two bytes); add object length to the length accumulator
int objectId = buf[pos++];
int objLength = 0;
if ((objectId & 0x000000ff) != 0xff){ //the 0xff flag doesn't come with a length byte
objLength = buf[pos++];
++accumulator; //account for the object length byte
}
accumulator += objLength + 1; //adds object payload plus 1 byte for the object ID itself
//if accumulator is larger than the advertised message length then indicate failure and break out of loop
if (accumulator > payloadLength) {
failure = true;
break;
}
//
//switch on the object identifier
switch(objectId & 0x000000ff) {
case MESSAGE_ATTRIBUTES:
//if size word isn't 1 indicate failure
if (objLength != 1) {
failure = true;
}else {
//if the geometry bit isn't set then indicate failure
int attrib = buf[pos++];
if ((attrib & 0x04) == 0) {
failure = true;
}else {
//store the offset resolution
offsetsInDm_ = (attrib & 0x02) > 0;
//store whether elevation data will be included
elevationsPresent_ = (attrib & 0x01) > 0;
}
}
break;
case INTERSECTION_ID:
//if size word isn't 4 indicate failure
if (objLength != 4) {
failure = true;
}else {
//store the ID
intersectionId_ = get4Bytes(buf, pos);
pos += 4;
}
break;
case REFERENCE_POINT:
//if size is 8 and no elevation present or size is 10 with elevation present then
if ((objLength == 8 && !elevationsPresent_) || (objLength == 10 && elevationsPresent_)) {
//read and convert lat & lon
int rawLat = get4Bytes(buf, pos);
pos += 4;
int rawLon = get4Bytes(buf, pos);
pos += 4;
refPoint_ = new Location((double)rawLat/1.0e7, (double)rawLon/1.0e7);
//if elevation is present then
if (elevationsPresent_) {
//skip past elevation data (we won't use that here)
pos += 2;
}
//else
}else {
//indicate failure
failure = true;
}
break;
case APPROACH_EGRESS_BARRIER:
//if size is not 1 then indicate failure
if (objLength != 1) {
failure = true;
}else {
//read the indicator and set the control variable (which will be used for other cases in this loop)
int ind = buf[pos++];
switch (ind) {
case 1: laneType = LaneDirection.APPROACH; break;
case 2: laneType = LaneDirection.EGRESS; break;
case 3: laneType = LaneDirection.BARRIER; break;
default:
failure = true;
}
}
break;
case LANE:
//if size is not 2 then indicate failure
if (objLength != 2) {
failure = true;
}else {
//if indicator is either approach or egress then
if (laneType == LaneDirection.APPROACH || laneType == LaneDirection.EGRESS) {
//create a new lane object and store this indicator for it
curLane = new Lane();
curLane.setApproach(laneType == LaneDirection.APPROACH);
lane_.add(curLane);
}
//read the lane ID and the lane type
int laneNum = buf[pos++];
int type = buf[pos++];
curLane.setId(laneNum);
//Note: for now, Glidepath project assumes no non-motorized lanes are defined for the test intersection.
// In future if other intersections are used, this will have to be made more intelligent to ignore other
// types of lanes during the reading, or to read them in and ensure that they are ignored in the geometry calcs.
if (type != 1) { //for motorized vehicle
failure = true;
}
}
break;
case LANE_ATTRIBUTES:
//if size is not 2 then indicate failure
if (objLength != 2) {
failure = true;
}else {
int attrib = get2Bytes(buf, pos);
pos += 2;
//if lane type indicator is not barrier then
if (laneType != LaneDirection.BARRIER) {
//if bits 0, or 8-12 are set then issue a warning (unsafe for Glidepath experiment);
// this is not a message parsing failure however
if ((attrib & 0x1f01) != 0){
}
//store the attribute word in the lane object
curLane.setAttributes(attrib);
}
}
break;
case BARRIER_ATTRIBUTES:
if (objLength != 2) {
failure = true;
}else {
//skip past the indicated number of bytes - not used in Glidepath
pos += 2;
}
break;
case WIDTH:
//if size is not 2 then indicate failure
if (objLength != 2) {
failure = true;
}else {
int width = get2Bytes(buf, pos);
pos += 2;
//if lane type indicator is not a barrier then
if (laneType != LaneDirection.BARRIER) {
//store the width in the lane object
curLane.setWidth(width);
}
}
break;
case NODE_LIST:
//if size < 10 then indicate failure (allows room for two nodes)
if (objLength < 10) {
failure = true;
}else {
//if lane type indicator is barrier then
if (laneType == LaneDirection.BARRIER) {
//skip past the bytes in this object
pos += objLength;
//else
}else {
//parse all of the nodes describing this lane and populate the lane object
boolean success = parseNodeList(buf, pos, objLength, curLane);
pos += objLength;
if (success) {
//
}else {
failure = true; //don't clear this flag on success, because it might be set elsewhere
}
}
}
break;
case CONNECTION:
//if size is not 2 then indicate failure
if (objLength != 2) {
failure = true;
}else {
//skip past the next two bytes
pos += 2;
}
break;
case REFERENCE_LANE:
//if size is not 3 then indicate failure
if (objLength != 3) {
failure = true;
}else {
//skip past the next 3 bytes
pos += 3;
}
break;
case END_OF_MESSAGE:
//indicate completion of loop
break;
default:
//indicate message failure
failure = true;
} //end switch
} //end loop on message objects
//if parsing succeeded then
if (!failure) {
//if reference point is defined then
if (refPoint_.lat() != 0.0 && refPoint_.lon() != 0.0) {
//loop through each lane
int numApproach = 0;
int numEgress = 0;
for (Lane lane : lane_) {
//count the number of approach and egress lanes
if (lane.isApproach()) {
++numApproach;
}else {
++numEgress;
}
//if it has < 2 nodes then
if (lane.getNodes().length < 2) {
//indicate failure
failure = true;
//else if it is missing width or attributes then
}else if (lane.width() == 0 || lane.attributes() == 0) {
//indicate failure
failure = true;
}
}
//if there is not at least one approach lane then
if (numApproach == 0) {
//indicate failure
failure = true;
}
}else {
failure = true;
}
}
if (!failure) {
logSummary();
}
return !failure;
}
/**
* always : content version of this message (per FHWA spec)
*
* Note: for performance reasons, this method ASSUMES that parse() has already been called and was successful.
* Therefore, no data validation is being done here.
*/
public int getContentVersion() {
return contentVersion_;
}
/**
* always : ID number of the intersection, as broadcast in the MAP message
*
* Note: for performance reasons, this method ASSUMES that parse() has already been called and was successful.
* Therefore, no data validation is being done here.
*/
public int intersectionId() {
return intersectionId_;
}
/**
* always : number of approach & egress lanes defined in this message
*
* Note: for performance reasons, this method ASSUMES that parse() has already been called and was successful.
* Therefore, no data validation is being done here.
*/
public int numLanes() {
return lane_.size();
}
/**
* index is a valid lane index for this intersection : object that describes the lane
* index is invalid : exception
*
* Note: index refers to internal storage position (0 <= index < numLanes()), and is NOT necessarily the same as the MAP message's ID
* of the lane.
*
* Note: for performance reasons, this method ASSUMES that parse() has already been called and was successful.
* Therefore, no data validation is being done here.
*/
public Lane getLane(int index) {
return lane_.get(index);
}
/**
* always : intersection reference point
*
* Note: for performance reasons, this method ASSUMES that parse() has already been called and was successful.
* Therefore, no data validation is being done here.
*/
public Location getRefPoint() {
return refPoint_;
}
/**
* prints a synopsis of the message content to the log file for human reading
*/
public void logSummary() {
int approach = 0;
int egress = 0;
for (Lane lane : lane_){
if (lane.isApproach()){
++approach;
}else{
++egress;
}
Location[] nodes = lane.getNodes();
int sum = 0;
for (int i = 1; i < nodes.length; ++i){
sum += nodes[i].distanceFrom(nodes[i-1]);
}
}
}
//////////////////
// private members
//////////////////
private int get2Bytes(byte[] b, int p) {
int res = ((b[p] << 8) & 0x0000ff00) | (b[p+1] & 0x000000ff);
return res;
}
private int get4Bytes(byte[] b, int p) {
int res = ((b[p] << 24) & 0xff000000) | ((b[p+1] << 16) & 0x00ff0000) | ((b[p+2] << 8) & 0x0000ff00) | (b[p+3] & 0x000000ff);
return res;
}
private boolean parseNodeList(byte[]b, int p, int len, Lane lane) {
int bufLoc = 0;
int eastOffset = 0;
int northOffset = 0;
//get the attribute byte and decode it
int attrib = b[p + bufLoc++];
boolean hasWidth = (attrib & 0x01) != 0;
boolean packed = (attrib & 0x02) != 0;
//if data are packed, then both the elevation & width data need to be present or absent, since they
//share a byte of the data, so check if these presences are not in sync
if (packed && (elevationsPresent_ ^ hasWidth)) {
return false;
}
//loop on nodes until we've run out of buffer
while (bufLoc < len){
//if nodes are packed then
if (packed) {
byte byte1 = b[p + bufLoc++];
byte byte2 = b[p + bufLoc++];
byte byte3 = b[p + bufLoc++];
//build the east offset from the first 3 nibbles
eastOffset = ((byte1 << 4) & 0x000007f0) | ((byte2 >>> 4) & 0x0000000f);
//build the north offset from the next 3 nibbles
northOffset = ((byte2 << 8) & 0x00000700) | (byte3 & 0x000000ff);
//if these are negative numbers, need to manually take the 2s complement for correct representation
if ((byte1 & 0x00000080) > 0) {
eastOffset = -((~eastOffset & 0x000007ff) + 1);
}
if ((byte2 & 0x00000008) > 0) {
northOffset = -((~northOffset & 0x000007ff) + 1);
}
//discard next two bytes (if elevations are present, which we don't use)
if (elevationsPresent_) {
bufLoc += 2;
}
//if width is included then discard the next byte
if (hasWidth) {
++bufLoc;
}
//else
}else {
//store east offset from first two bytes
eastOffset = get2Bytes(b, p+bufLoc);
bufLoc += 2;
//store north offset from next two bytes
northOffset = get2Bytes(b, p+bufLoc);
bufLoc += 2;
//discard next two bytes (if elevations are present, which we don't use)
if (elevationsPresent_) {
bufLoc += 2;
}
//if width is included then discard the next two bytes
if (hasWidth) {
bufLoc += 2;
}
}
//store the offsets with the correct units of measure
if (offsetsInDm_) {
lane.addNodeDm(refPoint_, eastOffset, northOffset);
}else{
lane.addNodeCm(refPoint_, eastOffset, northOffset);
}
}
return true;
}
@Output("elevationsPresent")
private boolean elevationsPresent_; //will elevation data be present in reference point and node definitions?
@Output("offsetsInDm")
private boolean offsetsInDm_; //are lane node offsets stored in decimeters? (false indicates centimeters)
@Output("contentVersion")
private int contentVersion_; //the version sequence # of the MAP content published by the intersection RSU
@Output("intersectionId")
private int intersectionId_; //the regional ID number of this intersection
@Output("referencePoint")
private Location refPoint_; //the reference point
@Output("lanes")
private Vector<Lane> lane_;
private final int MESSAGE_ATTRIBUTES = 0x01;
private final int INTERSECTION_ID = 0x02;
private final int REFERENCE_POINT = 0x03;
private final int APPROACH_EGRESS_BARRIER = 0x04;
private final int LANE = 0x05;
private final int LANE_ATTRIBUTES = 0x06;
private final int BARRIER_ATTRIBUTES = 0x07;
private final int WIDTH = 0x08;
private final int NODE_LIST = 0x09;
private final int CONNECTION = 0x0a;
private final int REFERENCE_LANE = 0x0b;
private final int END_OF_MESSAGE = 0xff;
}
|
from selenium import webdriver
# create the webdriver instance
driver = webdriver.Chrome()
# navigate to the page and maximize the window
driver.get('https://www.example-website.com/login')
driver.maximize_window()
# find and fill the form with username and password
username = driver.find_element_by_name('username')
username.send_keys('YOUR_USERNAME')
password = driver.find_element_by_name('password')
password.send_keys('YOUR_PASSWORD')
# submit the form
driver.find_element_by_name('submit').click()
# log out the website
driver.find_element_by_id('logout_button').click()
driver.close()
|
<reponame>buythewhale/eslint-plugin-no-inline-styles
//------------------------------------------------------------------------------
// Requirements
//------------------------------------------------------------------------------
const requireIndex = require('requireindex');
const path = require('path');
//------------------------------------------------------------------------------
// Plugin Definition
//------------------------------------------------------------------------------
// import all rules in lib/rules
module.exports.rules = requireIndex(path.join(__dirname, '/rules'));
|
#!/bin/bash
# Converts a from PowerPoint created PDF file into SVG files.
# (every page get's converted to one SVG file)
#
#
# Works only if you have Inkscape and PDFtk installed and in the environment variables!
# > Inkscape: https://inkscape.org/en/release/0.92.2/
# > PDFtk: https://www.pdflabs.com/tools/pdftk-the-pdf-toolkit/
#
#
# PDF to SVG part originally made by Alain Pannetier
# > https://stackoverflow.com/a/34119481/7827128
# change directory into the directory where this script is
# > TheMarko, James Ko | https://stackoverflow.com/a/242550/7827128
cd $(dirname "$0")
# find all .pdf files and do this for every found file
# > devnull | https://stackoverflow.com/a/19852970/7827128
find -type f -name "*.pdf" | while read pdf; do
# get the current pdf file name
inputPdf=$pdf
# get the pdf file name
# > Petesh | https://stackoverflow.com/a/965072/7827128
filename=$(basename "$pdf")
filename="${filename%.*}"
# count the pages of the pdf width pdftk
pageCnt=$(pdftk $inputPdf dump_data | grep NumberOfPages | cut -d " " -f 2)
echo "$inputPdf opened >> counted $pageCnt pages"
# convert every page to a seperate .svg file
for i in $(seq 1 $pageCnt); do
echo "converting page $i to ${filename}_${i}.svg..."
# create for every page a pdf file
pdftk ${inputPdf} cat $i output "${filename}_${i}.pdf"
# convert the file with inkscape
inkscape --without-gui "--file=${filename}_${i}.pdf" "--export-plain-svg=${filename}_${i}.svg"
# remove the created pdf file
rm "${filename}_${i}.pdf"
done
done
# Do not close the window -> In case of bugs/failures show the history of everything => Debugging
read -n1 -r -p "Press any key to continue..." key
|
#!/usr/bin/env bash
src="pagerank-adjust-rank-datatype"
out="/home/resources/Documents/subhajit/$src.log"
ulimit -s unlimited
printf "" > "$out"
# Download program
rm -rf $src
git clone https://github.com/puzzlef/$src
cd $src
# Run
g++ -std=c++17 -O3 main.cxx
stdbuf --output=L ./a.out ~/data/min-1DeadEnd.mtx 2>&1 | tee -a "$out"
stdbuf --output=L ./a.out ~/data/min-2SCC.mtx 2>&1 | tee -a "$out"
stdbuf --output=L ./a.out ~/data/min-4SCC.mtx 2>&1 | tee -a "$out"
stdbuf --output=L ./a.out ~/data/min-NvgraphEx.mtx 2>&1 | tee -a "$out"
stdbuf --output=L ./a.out ~/data/web-Stanford.mtx 2>&1 | tee -a "$out"
stdbuf --output=L ./a.out ~/data/web-BerkStan.mtx 2>&1 | tee -a "$out"
stdbuf --output=L ./a.out ~/data/web-Google.mtx 2>&1 | tee -a "$out"
stdbuf --output=L ./a.out ~/data/web-NotreDame.mtx 2>&1 | tee -a "$out"
stdbuf --output=L ./a.out ~/data/soc-Slashdot0811.mtx 2>&1 | tee -a "$out"
stdbuf --output=L ./a.out ~/data/soc-Slashdot0902.mtx 2>&1 | tee -a "$out"
stdbuf --output=L ./a.out ~/data/soc-Epinions1.mtx 2>&1 | tee -a "$out"
stdbuf --output=L ./a.out ~/data/coAuthorsDBLP.mtx 2>&1 | tee -a "$out"
stdbuf --output=L ./a.out ~/data/coAuthorsCiteseer.mtx 2>&1 | tee -a "$out"
stdbuf --output=L ./a.out ~/data/soc-LiveJournal1.mtx 2>&1 | tee -a "$out"
stdbuf --output=L ./a.out ~/data/coPapersCiteseer.mtx 2>&1 | tee -a "$out"
stdbuf --output=L ./a.out ~/data/coPapersDBLP.mtx 2>&1 | tee -a "$out"
stdbuf --output=L ./a.out ~/data/indochina-2004.mtx 2>&1 | tee -a "$out"
stdbuf --output=L ./a.out ~/data/italy_osm.mtx 2>&1 | tee -a "$out"
stdbuf --output=L ./a.out ~/data/great-britain_osm.mtx 2>&1 | tee -a "$out"
stdbuf --output=L ./a.out ~/data/germany_osm.mtx 2>&1 | tee -a "$out"
stdbuf --output=L ./a.out ~/data/asia_osm.mtx 2>&1 | tee -a "$out"
|
#!/bin/bash
set -e
case "$(uname)" in
Darwin* )
wiseguy_host_os=OSX
;;
linux* )
wiseguy_host_os=Linux
;;
* )
abend "Homeport will only run on OS X or Linux."
;;
esac
if [ "$1" == "module" ]; then
echo $0
echo "Please do not execute these programs directly. Use `wiseguy`."
exit 1
fi
function wiseguy_readlink() {
file=$1
if [ "$wiseguy_host_os" = "OSX" ]; then
if [ -L "$file" ]; then
readlink $1
else
echo "$file"
fi
else
readlink -f $1
fi
}
wiseguy_file=$0
while [ -L "$wiseguy_file" ]; do
expanded=$(wiseguy_readlink "$wiseguy_file")
pushd "${wiseguy_file%/*}" > /dev/null
pushd "${expanded%/*}" > /dev/null
wiseguy_path=$(pwd)
popd > /dev/null
popd > /dev/null
wiseguy_file="$wiseguy_path/${wiseguy_file##*/}"
done
pushd "${wiseguy_file%/*}" > /dev/null
wiseguy_path=$(pwd)
popd > /dev/null
source "$wiseguy_path/lib/common.bash"
source "$wiseguy_path/lib/externalized.bash"
source "$wiseguy_path/lib/getopt.bash"
if [ -e ~/.wiseguy.conf ]; then
source ~/.wiseguy.conf
fi
wiseguy_exec "$@"
|
import React from 'react';
import { createStackNavigator } from '@react-navigation/stack';
import Home from '../Screens/Home';
import Details from '../Screens/Details';
const HomeStack = createStackNavigator();
export default ({ navigation }) => {
return(
<HomeStack.Navigator
initialRouteName="Home"
>
<HomeStack.Screen
name="Home"
component={Home}
/>
<HomeStack.Screen
name="Details"
component={Details}
/>
</HomeStack.Navigator>
);
};
|
import * as React from 'react';
import { Dropdown, MenuItem } from 'react-bootstrap';
import '../actions.less';
export interface Props {
onDelete: () => any;
onStop: () => any;
isRunning: boolean;
pullRight: boolean;
}
function BuildActions(props: Props) {
return (
<span className={props.pullRight ? 'actions pull-right' : 'actions'}>
<Dropdown
pullRight={true}
key={1}
id={`dropdown-actions-1`}
>
<Dropdown.Toggle
bsStyle="default"
bsSize="small"
noCaret={true}
>
<i className="fa fa-ellipsis-h icon" aria-hidden="true"/>
</Dropdown.Toggle>
<Dropdown.Menu>
{props.onStop && props.isRunning &&
<MenuItem eventKey="1" onClick={props.onStop}>
<i className="fa fa-stop icon" aria-hidden="true"/> Stop
</MenuItem>
}
<MenuItem eventKey="2" onClick={props.onDelete}>
<i className="fa fa-trash icon" aria-hidden="true"/> Delete
</MenuItem>
</Dropdown.Menu>
</Dropdown>
</span>
);
}
export default BuildActions;
|
import time
from typing import Optional
import uvicorn
from devtools import debug # výpis premenný do promptu
from fastapi import FastAPI, Form, Request
# from fastapi.responses import HTMLResponse
from fastapi.staticfiles import StaticFiles
from fastapi.templating import Jinja2Templates
# import requests
import modules.netfunctions as netf
from config import HOST, PORT
moje_meno = netf.get_hostname()
moje_ip = netf.get_ip()
print('It is', moje_meno, 'My IP address:', moje_ip)
app = FastAPI()
app.mount("/static", StaticFiles(directory="./static"), name="static")
templates = Jinja2Templates(directory="./templates")
# Routes:
@app.get("/")
async def root(request: Request):
"""
Ukáže zoznam zaevidovaných kryptomien
"""
localtime = time.asctime(time.localtime(time.time()))
print("/; Čas:", localtime)
return templates.TemplateResponse("home.html", {"request": request, "time": localtime})
# Code for running app
if __name__ == "__main__":
uvicorn.run("main:app", host=HOST,
port=int(PORT), reload=True, debug=True)
|
var _Promise = typeof Promise === 'undefined' ? require('es6-promise').Promise : Promise;
module.exports = function getArrayBuffer(chunk) {
return new _Promise(function (resolve, reject) {
var reader = new FileReader();
reader.addEventListener('load', function (e) {
// e.target.result is an ArrayBuffer
resolve(e.target.result);
});
reader.addEventListener('error', function (err) {
console.error('FileReader error' + err);
reject(err);
});
// file-type only needs the first 4100 bytes
reader.readAsArrayBuffer(chunk);
});
};
//# sourceMappingURL=getArrayBuffer.js.map
|
#!/bin/bash
CUR_USER=`whoami`
if [ "$CUR_USER" != "root" ]; then
echo "Not root yet"
exit 1
fi
if [ "" == "$1" ]; then
echo "Please provide domain name:"
echo " setup-proxy.sh full.domain.name Private.IP.Address"
exit 1
fi
DOMAIN=$1
if [ "" == "$2" ]; then
echo "Please provide Private IP address to Jenkins master"
echo " setup-proxy.sh full.domain.name Private.IP.Address"
exit 1
fi
IPADDR=$2
cd /etc/nginx/sites-available
if [ -f webproxy.conf ]; then
rm webproxy.conf
fi
wget https://raw.githubusercontent.com/svangeepuram/jenkins-bootcamp-course/master/aws/lightsail/scale/webproxy.conf
sed -i "s/DOMAIN_NAME/$DOMAIN/g" webproxy.conf
sed -i "s/JENKINS_IP/$IPADDR/g" webproxy.conf
cd ../sites-enabled
if [ -e default ]; then
rm default
fi
if [ -e web.conf ]; then
rm web.conf
fi
if [ -e web-secured.conf ]; then
rm web-secured.conf
fi
if [ -e webproxy.conf ]; then
rm webproxy.conf
fi
ln -s ../sites-available/webproxy.conf webproxy.conf
nginx -t
systemctl restart nginx
|
<gh_stars>10-100
module_name = "jupyter-datawidgets"
EXTENSION_SPEC_VERSION = "^5.1.0"
|
// Create an if statement inside the function to return "Yes, that was true" if the parameter wasThatTrue is true and return "No, that was false" otherwise.
// Example
function ourTrueOrFalse(isItTrue) {
if (isItTrue) {
return "Yes, it's true";
}
return "No, it's false";
}
// Setup
function trueOrFalse(wasThatTrue) {
// Only change code below this line.
if (wasThatTrue) {
return 'Yes, that was true';
} else {
return 'No, that was false';
}
// Only change code above this line.
}
// Change this value to test
trueOrFalse(true);
|
<reponame>fairix/kerkow2
/* eslint-disable import/no-unresolved */
import Plugin from 'src/plugin-system/plugin.class';
import PseudoModalUtil from 'src/utility/modal-extension/pseudo-modal.util';
import PageLoadingIndicatorUtil from 'src/utility/loading-indicator/page-loading-indicator.util';
import ButtonLoadingIndicator from 'src/utility/loading-indicator/button-loading-indicator.util';
import HttpClient from "src/service/http-client.service";
export default class PayonePaymentDebitCard extends Plugin {
static options = {
editorModalClass: 'payone-debit-modal'
};
init() {
this.orderFormDisabled = true;
this._client = new HttpClient();
document
.getElementById('confirmOrderForm')
.addEventListener('submit', this._handleOrderSubmit.bind(this));
}
_handleOrderSubmit(event) {
const errorOutput = document.getElementById('errorOutput');
errorOutput.style.display = 'none';
if (!this.orderFormDisabled) {
return;
}
this._handleOrderFormError(event);
this._getModal(event);
}
_handleOrderFormError(event) {
const confirmFormSubmit = document.getElementById('confirmFormSubmit');
event.preventDefault();
if(confirmFormSubmit) {
const loader = new ButtonLoadingIndicator(confirmFormSubmit);
confirmFormSubmit.disabled = false;
loader.remove();
}
}
_getModal(event) {
event.preventDefault();
PageLoadingIndicatorUtil.create();
const data = this._getRequestData();
this._client.abort();
this._client.post(this._getManageMandateUrl(), JSON.stringify(data), content => this._openModal(content));
}
_submitForm() {
this.orderFormDisabled = false;
document
.getElementById('confirmOrderForm')
.submit();
}
_openModal(response) {
response = JSON.parse(response);
if (response.error) {
const errorOutput = document.getElementById('errorOutput');
errorOutput.innerHTML = response.error;
errorOutput.style.display = 'block';
PageLoadingIndicatorUtil.remove();
return;
}
if (response.mandate.Status === 'active') {
this._submitForm();
return;
}
const pseudoModal = new PseudoModalUtil(response.modal_content);
pseudoModal.open(this._onOpen.bind(this, pseudoModal));
}
_onOpen(pseudoModal) {
const modal = pseudoModal.getModal();
modal.classList.add('payone-debit-mandate-modal');
window.PluginManager.initializePlugins();
this._registerEvents();
PageLoadingIndicatorUtil.remove();
}
_getRequestData() {
const csrfToken = document.getElementById('payoneCsrfTokenDebitManageMandate');
const iban = document.getElementById('iban');
const bic = document.getElementById('bic');
return {
'_csrf_token': csrfToken.value,
'iban': iban.value,
'bic': bic.value
};
}
_getManageMandateUrl() {
const configuration = document.getElementById('payone-configuration');
return configuration.getAttribute('data-manage-mandate-url');
}
_registerEvents() {
document
.getElementById('mandateSubmit')
.addEventListener('click', this._onMandateSubmit.bind(this));
}
_onMandateSubmit() {
const checkbox = document.getElementById('accept-mandate');
if (checkbox.checked) {
this._submitForm();
}
}
}
|
def check_shell_script(file_path):
with open(file_path, 'r') as file:
first_line = file.readline().strip()
if first_line.startswith("#!"):
if "#!/bin/zsh" in first_line:
print("The script uses zsh as the executing shell. No issues found.")
elif "#!/bin/sh" in first_line:
print("Warning: The script uses sh as the executing shell. Consider using zsh for compatibility.")
else:
print("Error: Shebang line not found in the script.")
# Test the function with a sample file
check_shell_script("script.sh")
|
java -jar ../implementation/target/ATM-implementation-jar-with-dependencies.jar
|
SdkName="BrightScriptSDK"
targetSrc="brightscript"
delSrc=true
cd ..
. ./shared_build.sh
|
#!/bin/bash
# Copyright 2017 MSO4SC - javier.carnero@atos.net
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
ssh-keygen -R 192.168.56.20
ssh-keyscan -H 192.168.56.20 >> ~/.ssh/known_hosts
cd /opt/cfy/cloudify-manager-blueprints/
sed -i -e 's/public_ip: '\'''\''/public_ip: '\''192\.168\.56\.20'\''/g' ./simple-manager-blueprint-inputs.yaml
sed -i -e 's/private_ip: '\'''\''/private_ip: '\''192\.168\.56\.20'\''/g' ./simple-manager-blueprint-inputs.yaml
sed -i -e 's/ssh_user: '\'''\''/ssh_user: '\''vagrant'\''/g' ./simple-manager-blueprint-inputs.yaml
sed -i -e 's/ssh_key_filename: '\'''\''/ssh_key_filename: '\''~\/\.ssh\/id_rsa'\''/g' ./simple-manager-blueprint-inputs.yaml
sed -i -e 's/#agents_user: ubuntu/agents_user: ubuntu/g' ./simple-manager-blueprint-inputs.yaml
sed -i -e 's/#admin_username: '\''admin'\''/admin_username: '\''admin'\''/g' ./simple-manager-blueprint-inputs.yaml
sed -i -e 's/#admin_password: '\'''\''/admin_password: '\''admin'\''/g' ./simple-manager-blueprint-inputs.yaml
cfy bootstrap simple-manager-blueprint.yaml -i simple-manager-blueprint-inputs.yaml
cfy status
|
from setuptools import setup, find_packages
setup(name='Ennchan',
version='0.0',
description='The Ennchan home page.',
author='Ennchan',
author_email='<EMAIL>',
url='http://www.python.org/sigs/distutils-sig/',
packages=find_packages(),
install_requires=['Flask'],
)
|
#!/usr/bin/env bash
set -e
KAFKA_HOME=${KAFKA_HOME:-"/opt/kafka"}
SCALA_VERSION=${SCALA_VERSION:-2.13}
KAFKA_VERSION=${KAFKA_VERSION:-3.0.0}
FILENAME="kafka_${SCALA_VERSION}-${KAFKA_VERSION}.tgz"
echo "Installing kafka ${SCALA_VERSION}-${KAFKA_VERSION}"
url=$(curl --fail --silent --stderr /dev/null "https://www.apache.org/dyn/closer.cgi?path=/kafka/${KAFKA_VERSION}/${FILENAME}&as_json=1" | jq -r '"\(.preferred)\(.path_info)"')
# Test if mirror is valid. Fallback to archive.apache.org
if [[ ! $(curl --fail --silent --head "${url}") ]]; then
url="https://archive.apache.org/dist/kafka/${KAFKA_VERSION}/${FILENAME}"
fi
echo "Downloading Kafka from $url"
curl --fail --silent --show-error --location "${url}" --output "/tmp/${FILENAME}"
echo "Installing Kafka to ${KAFKA_HOME}"
tar --extract --gzip --verbose --file=/tmp/${FILENAME} --directory=/opt
ln -s /opt/kafka_${SCALA_VERSION}-${KAFKA_VERSION} ${KAFKA_HOME}
ls -la /opt/
rm -rf /tmp/${FILENAME}
cp -r $KAFKA_HOME/config $KAFKA_HOME/config-backup
|
<gh_stars>0
/* Serbian locals for flatpickr */
import { CustomLocale } from "types/locale";
import { FlatpickrFn } from "types/instance";
const fp: FlatpickrFn =
typeof window !== "undefined" && window.flatpickr !== undefined
? window.flatpickr
: {
l10ns: {},
} as FlatpickrFn;
export const Serbian: CustomLocale = {
weekdays: {
shorthand: ["Ned", "Pon", "Uto", "Sre", "Čet", "Pet", "Sub", "Ned"],
longhand: [
"Nedelja",
"Ponedeljak",
"Utorak",
"Sreda",
"Četvrtak",
"Petak",
"Subota",
"Nedelja",
],
},
months: {
shorthand: [
"Jan",
"Feb",
"Mar",
"Apr",
"Maj",
"Jun",
"Jul",
"Avg",
"Sep",
"Okt",
"Nov",
"Dec",
],
longhand: [
"Januar",
"Februar",
"Mart",
"April",
"Maj",
"Jun",
"Jul",
"Avgust",
"Septembar",
"Oktobar",
"Novembar",
"Decembar",
],
},
firstDayOfWeek: 1,
weekAbbreviation: "Ned.",
rangeSeparator: " do ",
};
fp.l10ns.sr = Serbian;
export default fp.l10ns;
|
#!/bin/bash
nbonds=20
long=2
cfg=/Users/Arthur/stratt/polymer/configs/trial_job.cfg
out=/Users/Arthur/stratt/polymer/test/gd_trial_job/out/
for njob in {1..100}
do
sim="${nbonds}_${long}_${njob}"
log1="${out}/${sim}_gd_out.log"
log2="${out}/${sim}_gd_err.log"
echo "Running job ${njob}"
bin/run_geodesic_simulation ${sim} ${cfg} 1>${log1} 2>${log2}
done
|
#!/usr/bin/env bash
{{!
Template adapted from here:
https://github.com/chriskempson/base16-builder/blob/master/templates/gnome-terminal/dark.sh.erb
}}
# Base16 dirtysea - Gnome Terminal color scheme install script
# Kahlil (Kal) Hodgson
[[ -z "$PROFILE_NAME" ]] && PROFILE_NAME="Base 16 dirtysea 256"
[[ -z "$PROFILE_SLUG" ]] && PROFILE_SLUG="base-16-dirtysea-256"
[[ -z "$DCONF" ]] && DCONF=dconf
[[ -z "$UUIDGEN" ]] && UUIDGEN=uuidgen
dset() {
local key="$1"; shift
local val="$1"; shift
if [[ "$type" == "string" ]]; then
val="'$val'"
fi
"$DCONF" write "$PROFILE_KEY/$key" "$val"
}
# Because dconf still doesn't have "append"
dlist_append() {
local key="$1"; shift
local val="$1"; shift
local entries="$(
{
"$DCONF" read "$key" | tr -d '[]' | tr , "\n" | fgrep -v "$val"
echo "'$val'"
} | head -c-1 | tr "\n" ,
)"
"$DCONF" write "$key" "[$entries]"
}
# Newest versions of gnome-terminal use dconf
if which "$DCONF" > /dev/null 2>&1; then
# Check that uuidgen is available
type $UUIDGEN >/dev/null 2>&1 || { echo >&2 "Requires uuidgen but it's not installed. Aborting!"; exit 1; }
[[ -z "$BASE_KEY_NEW" ]] && BASE_KEY_NEW=/org/gnome/terminal/legacy/profiles:
if [[ -n "`$DCONF list $BASE_KEY_NEW/`" ]]; then
if which "$UUIDGEN" > /dev/null 2>&1; then
PROFILE_SLUG=`uuidgen`
fi
if [[ -n "`$DCONF read $BASE_KEY_NEW/default`" ]]; then
DEFAULT_SLUG=`$DCONF read $BASE_KEY_NEW/default | tr -d \'`
else
DEFAULT_SLUG=`$DCONF list $BASE_KEY_NEW/ | grep '^:' | head -n1 | tr -d :/`
fi
DEFAULT_KEY="$BASE_KEY_NEW/:$DEFAULT_SLUG"
PROFILE_KEY="$BASE_KEY_NEW/:$PROFILE_SLUG"
# Copy existing settings from default profile
$DCONF dump "$DEFAULT_KEY/" | $DCONF load "$PROFILE_KEY/"
# Add new copy to list of profiles
dlist_append $BASE_KEY_NEW/list "$PROFILE_SLUG"
# Update profile values with theme options
dset visible-name "'$PROFILE_NAME'"
dset palette "['#e0e0e0', '#840000', '#730073', '#755B00', '#007300', '#000090', '#755B00', '#000000', '#707070', '#840000', '#730073', '#755B00', '#007300', '#000090', '#755B00', '#c4d9c4']"
dset background-color "'#e0e0e0'"
dset foreground-color "'#000000'"
dset bold-color "'#000000'"
dset bold-color-same-as-fg "true"
dset cursor-colors-set "true"
dset cursor-background-color "'#000000'"
dset cursor-foreground-color "'#e0e0e0'"
dset use-theme-colors "false"
dset use-theme-background "false"
unset PROFILE_NAME
unset PROFILE_SLUG
unset DCONF
unset UUIDGEN
exit 0
fi
fi
# Fallback for Gnome 2 and early Gnome 3
[[ -z "$GCONFTOOL" ]] && GCONFTOOL=gconftool
[[ -z "$BASE_KEY" ]] && BASE_KEY=/apps/gnome-terminal/profiles
PROFILE_KEY="$BASE_KEY/$PROFILE_SLUG"
gset() {
local type="$1"; shift
local key="$1"; shift
local val="$1"; shift
"$GCONFTOOL" --set --type "$type" "$PROFILE_KEY/$key" -- "$val"
}
# Because gconftool doesn't have "append"
glist_append() {
local type="$1"; shift
local key="$1"; shift
local val="$1"; shift
local entries="$(
{
"$GCONFTOOL" --get "$key" | tr -d '[]' | tr , "\n" | fgrep -v "$val"
echo "$val"
} | head -c-1 | tr "\n" ,
)"
"$GCONFTOOL" --set --type list --list-type $type "$key" "[$entries]"
}
# Append the Base16 profile to the profile list
glist_append string /apps/gnome-terminal/global/profile_list "$PROFILE_SLUG"
gset string visible_name "$PROFILE_NAME"
gset string palette "#e0e0e0:#840000:#730073:#755B00:#007300:#000090:#755B00:#000000:#707070:#840000:#730073:#755B00:#007300:#000090:#755B00:#c4d9c4"
gset string background_color "#e0e0e0"
gset string foreground_color "#000000"
gset string bold_color "#000000"
gset bool bold_color_same_as_fg "true"
gset bool cursor-colors-set "true"
gset string cursor-background-color "'#000000'"
gset string cursor-foreground-color "'#e0e0e0'"
gset bool use_theme_colors "false"
gset bool use_theme_background "false"
unset PROFILE_NAME
unset PROFILE_SLUG
unset DCONF
unset UUIDGEN
|
#! /bin/sh
java -jar bin/csv-report.jar
|
#!/bin/bash
diff -w -I "Real time" -I "RooRealVar::" -I "mkdir" -I "libSM.so" -I "libASImage" -I "png file FitExampleMorphingDilep" LOG_MORPH_DILEP_f test/logs/FitExampleMorphingDilep/LOG_MORPH_DILEP_f && diff -w FitExampleMorphingDilep/Fits/FitExampleMorphingDilep.txt test/reference/FitExampleMorphingDilep/Fits/FitExampleMorphingDilep.txt
diff -w -I "Real time" -I "RooRealVar::" -I "mkdir" -I "libSM.so" -I "libASImage" -I "png file FitExampleMorphingDilep" LOG_MORPH_DILEP_f test/logs/FitExampleMorphingDilep/LOG_MORPH_DILEP_f && diff -w FitExampleMorphingDilep/Fits/FitExampleMorphingDilep_saturatedModel.txt test/reference/FitExampleMorphingDilep/Fits/FitExampleMorphingDilep_saturatedModel.txt
|
/* eslint-disable */
const createProxyMiddleware = require('http-proxy-middleware');
const morgan = require('morgan');
module.exports = (app) => {
app.use(createProxyMiddleware('/auth', {
target: process.env.API_BASEURL || 'http://localhost:9999/auth',
changeOrigin: true,
pathRewrite: {
'^/auth': '/',
},
}));
app.use(morgan('combined'));
};
/* eslint-enable */
|
<reponame>carlosdnba/serverless-stack
import {
DeleteItemCommand,
DescribeTableCommand,
DynamoDBClient,
GetItemCommand,
PutItemCommand,
QueryCommand,
ScanCommand,
ScanCommandInput,
ScanCommandOutput,
} from "@aws-sdk/client-dynamodb";
import {
useInfiniteQuery,
useMutation,
useQuery,
useQueryClient,
} from "react-query";
import { useClient } from "./client";
import { marshall } from "@aws-sdk/util-dynamodb";
export function useDescribeTable(name?: string) {
const dynamo = useClient(DynamoDBClient);
return useQuery({
queryKey: ["describeTable", name],
queryFn: async () => {
const response = await dynamo.send(
new DescribeTableCommand({
TableName: name,
})
);
return response;
},
enabled: Boolean(name),
refetchOnWindowFocus: false,
});
}
export interface ScanOpts {
version: number;
pk?: Filter;
sk?: Filter;
filters: Filter[];
}
interface Filter {
key: string;
op: "=" | "<>" | "<" | "<=" | ">" | ">=";
value: string;
}
export function useScanTable(name?: string, index?: string, opts?: ScanOpts) {
const dynamo = useClient(DynamoDBClient);
return useInfiniteQuery({
queryKey: ["scanTable", { name, index, opts }],
queryFn: async (ctx) => {
const isQuery = opts.pk?.op === "=";
const filters = [
!isQuery && opts.pk,
!isQuery && opts.sk,
...opts.filters,
].filter((item) => {
return Boolean(item?.op);
});
const filterExpression = filters
.filter(
(item) => !isQuery || ![opts.pk?.key, opts.sk?.key].includes(item.key)
)
.map((item) => `#${item.key} ${item.op} :${item.key}`);
const expressionAttributes = [opts.pk, opts.sk, ...opts.filters]
.filter((item) => Boolean(item?.op))
.map((item) => {
let val = undefined;
try {
val = JSON.parse(item.value);
} catch (e) {
val = item.value;
}
return [
`:${item.key}`,
marshall({ val }, { removeUndefinedValues: true }).val,
];
});
const expressionAttributesNames = [opts.pk, opts.sk, ...opts.filters]
.filter((item) => Boolean(item?.op))
.map((item) => [`#${item.key}`, item.key]);
const params: ScanCommandInput = {
TableName: name,
IndexName: index === "Primary" ? undefined : index,
ExclusiveStartKey: ctx.pageParam,
Limit: 50,
FilterExpression: filterExpression.length
? filterExpression.join(" AND ")
: undefined,
ExpressionAttributeNames: expressionAttributesNames.length
? Object.fromEntries(expressionAttributesNames)
: undefined,
ExpressionAttributeValues: expressionAttributes.length
? Object.fromEntries(expressionAttributes)
: undefined,
};
const response = await dynamo.send(
isQuery
? new QueryCommand({
...params,
KeyConditionExpression: (["pk", "sk"] as const)
.map((key) => opts[key])
.filter((item) => Boolean(item?.op))
.map((item) => `#${item.key} ${item.op} :${item.key}`)
.join(" AND "),
})
: new ScanCommand(params)
);
// if (!response.Count) throw new Error("No items");
return response;
},
refetchOnWindowFocus: false,
retry: false,
enabled: Boolean(index) && Boolean(name) && Boolean(opts),
getNextPageParam: (page: ScanCommandOutput) => page.LastEvaluatedKey,
});
}
export function useDeleteItem() {
const dynamo = useClient(DynamoDBClient);
const qc = useQueryClient();
return useMutation({
mutationFn: async (opts: {
tableName: string;
keys: any;
original: any;
}) => {
const response = await dynamo.send(
new DeleteItemCommand({
TableName: opts.tableName,
Key: marshall(opts.keys, {
removeUndefinedValues: true,
}),
})
);
qc.setQueriesData(["scanTable"], (old: any) => {
if (!old) return;
return {
...old,
pages: old.pages.map((page: any) => ({
...page,
Items: page.Items.filter((item: any) => opts.original !== item),
})),
};
});
return response;
},
});
}
export function usePutItem() {
const dynamo = useClient(DynamoDBClient);
const qc = useQueryClient();
return useMutation({
mutationFn: async (opts: {
tableName: string;
item: any;
original: any;
}) => {
const marshalled = marshall(opts.item, { removeUndefinedValues: true });
const response = await dynamo.send(
new PutItemCommand({
TableName: opts.tableName,
Item: marshalled,
})
);
if (!opts.original) {
qc.invalidateQueries(["scanTable"]);
return;
}
qc.setQueriesData(["scanTable"], (old: any) => {
if (!old) return;
return {
...old,
pages: old.pages.map((page: any) => ({
...page,
Items: page.Items.map((item: any) => {
if (opts.original !== item) return item;
return marshalled;
}),
})),
};
});
return response;
},
});
}
export function useGetItem(table: string, keys: Record<string, string>) {
const dynamo = useClient(DynamoDBClient);
return useQuery({
queryKey: ["getItem", keys],
keepPreviousData: false,
queryFn: async () => {
const response = await dynamo.send(
new GetItemCommand({
TableName: table,
Key: marshall(keys, {
removeUndefinedValues: true,
}),
})
);
return response;
},
enabled: Boolean(keys),
refetchOnWindowFocus: false,
});
}
|
#!/usr/bin/env bash
set -e
set -x
basepath=$(cd `dirname $0`/..; pwd)
RUN_MEDIAN_FILTERS=${basepath}/build_linux/run_median_filters
INPUT_PATH=${basepath}/images/jian20_salt_pepper_noise.jpg
FILTER_RADIUS=3
ITERATION_NUM=1
MEDIAN_FILTER_OUT_PATH_PREFIX=${basepath}/results/median_filter_radius${FILTER_RADIUS}_iter${ITERATION_NUM}
${RUN_MEDIAN_FILTERS} \
${INPUT_PATH} \
${FILTER_RADIUS} \
${ITERATION_NUM} \
${MEDIAN_FILTER_OUT_PATH_PREFIX}
|
<gh_stars>0
#ifndef FUNCTIONS_H
#define FUNCTIONS_H
void Sleep_us(uint16_t delay);
void Sleep_ms(uint16_t delay);
void Sleep_sec(uint16_t delay);
#endif
|
#!/bin/sh
#Copyright (c) 2016.
#
#Juergen Key. Alle Rechte vorbehalten.
#
#Weiterverbreitung und Verwendung in nichtkompilierter oder kompilierter Form,
#mit oder ohne Veraenderung, sind unter den folgenden Bedingungen zulaessig:
#
# 1. Weiterverbreitete nichtkompilierte Exemplare muessen das obige Copyright,
#die Liste der Bedingungen und den folgenden Haftungsausschluss im Quelltext
#enthalten.
# 2. Weiterverbreitete kompilierte Exemplare muessen das obige Copyright,
#die Liste der Bedingungen und den folgenden Haftungsausschluss in der
#Dokumentation und/oder anderen Materialien, die mit dem Exemplar verbreitet
#werden, enthalten.
# 3. Weder der Name des Autors noch die Namen der Beitragsleistenden
#duerfen zum Kennzeichnen oder Bewerben von Produkten, die von dieser Software
#abgeleitet wurden, ohne spezielle vorherige schriftliche Genehmigung verwendet
#werden.
#
#DIESE SOFTWARE WIRD VOM AUTOR UND DEN BEITRAGSLEISTENDEN OHNE
#JEGLICHE SPEZIELLE ODER IMPLIZIERTE GARANTIEN ZUR VERFUEGUNG GESTELLT, DIE
#UNTER ANDEREM EINSCHLIESSEN: DIE IMPLIZIERTE GARANTIE DER VERWENDBARKEIT DER
#SOFTWARE FUER EINEN BESTIMMTEN ZWECK. AUF KEINEN FALL IST DER AUTOR
#ODER DIE BEITRAGSLEISTENDEN FUER IRGENDWELCHE DIREKTEN, INDIREKTEN,
#ZUFAELLIGEN, SPEZIELLEN, BEISPIELHAFTEN ODER FOLGENDEN SCHAEDEN (UNTER ANDEREM
#VERSCHAFFEN VON ERSATZGUETERN ODER -DIENSTLEISTUNGEN; EINSCHRAENKUNG DER
#NUTZUNGSFAEHIGKEIT; VERLUST VON NUTZUNGSFAEHIGKEIT; DATEN; PROFIT ODER
#GESCHAEFTSUNTERBRECHUNG), WIE AUCH IMMER VERURSACHT UND UNTER WELCHER
#VERPFLICHTUNG AUCH IMMER, OB IN VERTRAG, STRIKTER VERPFLICHTUNG ODER
#UNERLAUBTE HANDLUNG (INKLUSIVE FAHRLAESSIGKEIT) VERANTWORTLICH, AUF WELCHEM
#WEG SIE AUCH IMMER DURCH DIE BENUTZUNG DIESER SOFTWARE ENTSTANDEN SIND, SOGAR,
#WENN SIE AUF DIE MOEGLICHKEIT EINES SOLCHEN SCHADENS HINGEWIESEN WORDEN SIND.
list="$(VBoxManage list vms 2>/dev/null|cut -d{ -f2 |cut -d} -f1)"
#quote=`$'\t'`
sysbrdgs="$(brctl show |sed 's/\t/#/g'|cut -d# -f1|grep .|tail -n +2)"
quote=\'
brdgifaces="$(VBoxManage list vms --long 2>/dev/null|grep NIC|cut -d, -f 2 |grep 'Bridged Interface'|cut -d\' -f2 |sort|uniq)"
#echo $list
#printf '%s\n' "$list" | while IFS= read -r machine
#do
# echo "$machine"
#done
#printf '%s\n' "$brdgifaces" | while IFS= read -r iface
#do
# echo "$iface"
#done
case "$1" in
intnet)
if [ $# -ne 2 ];
then
echo "Usage: $0 intnet <network-name>"
exit 1
fi
printf '%s\n' "$list" | while IFS= read -r machine
do
part="$(VBoxManage showvminfo $machine 2>/dev/null|grep NIC|grep -v disabled |grep $2|wc -l)"
if [ $part -ne 0 ];
then
name="$(VBoxManage list vms 2>/dev/null|grep $machine)"
echo "$name"
fi
done
;;
unplug)
if [ $# -ne 3 ];
then
echo "Usage: $0 unplug <uuid> <network-name>"
exit 1
fi
name="$(VBoxManage list vms 2>/dev/null|grep $2)"
nic="$(VBoxManage showvminfo $2 2>/dev/null|grep NIC|grep "'$3'"|head -n 1|cut -d: -f1|cut -d' ' -f2)"
echo "unplugging "$name" from" $3 "("$nic")"
VBoxManage modifyvm $2 --nic$nic none
;;
plug)
if [ $# -ne 3 ];
then
echo "Usage: $0 plug <uuid> <network-name>"
exit 1
fi
name="$(VBoxManage list vms 2>/dev/null|grep $2)"
nic="$(VBoxManage showvminfo $2|grep NIC|grep disabled|head -n 1|cut -d: -f1|cut -d' ' -f2)"
echo "plugging "$name" into" $3 "("$nic")"
VBoxManage modifyvm $2 --nic$nic intnet
VBoxManage modifyvm $2 --intnet$nic $3
;;
disconnect)
name="$(VBoxManage list vms 2>/dev/null|grep $2)"
nics="$(VBoxManage showvminfo $2|grep NIC|grep -v disabled|cut -d: -f1|cut -d' ' -f2)"
printf '%s\n' "$nics" | while IFS= read -r nic
do
net="$(VBoxManage showvminfo $2|sed 's/ /#/g'|grep NIC#$nic|cut -d: -f4|cut -d\' -f2)"
echo "unplugging "$name" from " $net "("$nic")"
VBoxManage modifyvm $2 --nic$nic none
done
;;
*)
echo "Usage: $0 {intnet|unplug|plug|disconnect}" >&2
exit 1
;;
esac
|
//
// EndPointFramework.h
// EndPointFramework
//
// Created by <NAME> on 11/9/20.
//
#import <Foundation/Foundation.h>
//! Project version number for EndPointFramework.
FOUNDATION_EXPORT double EndPointFrameworkVersionNumber;
//! Project version string for EndPointFramework.
FOUNDATION_EXPORT const unsigned char EndPointFrameworkVersionString[];
// In this header, you should import all the public headers of your framework using statements like #import <EndPointFramework/PublicHeader.h>
|
#include <iostream>
#include <cstdio>
using namespace std;
bool set1(char a,b,c){
if (a='Zulian' && b='Razzashi' && c='Hakkari') return true;
else return false;
}
bool set2(char d,e,f){
if(d='Sandfury' && e='Skullsplitter' && f='Bloodscalp') return true;
else return false;
}
bool set3(char g,h,i){
if(g='Gurubashi' && h='Vilebranch' && i='Witherbark') return true;
else return false;
}
int main() {
int N,M;
cin >> N >> M;
for (int i=0; i <M; ++i){
}
return 0;
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.