text stringlengths 1 1.05M |
|---|
<reponame>destenson/tensorflow--tensorflow
/*
Copyright 2016 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package tensorflow
// #include <stdlib.h>
// #include <string.h>
// #include "tensorflow/c/c_api.h"
import "C"
import (
"bytes"
"encoding/binary"
"fmt"
"io"
"reflect"
"runtime"
"unsafe"
)
// DataType holds the type for a scalar value. E.g., one slot in a tensor.
type DataType C.TF_DataType
// Types of scalar values in the TensorFlow type system.
const (
Float DataType = C.TF_FLOAT
Double DataType = C.TF_DOUBLE
Int32 DataType = C.TF_INT32
Uint32 DataType = C.TF_UINT32
Uint8 DataType = C.TF_UINT8
Int16 DataType = C.TF_INT16
Int8 DataType = C.TF_INT8
String DataType = C.TF_STRING
Complex64 DataType = C.TF_COMPLEX64
Complex DataType = C.TF_COMPLEX
Int64 DataType = C.TF_INT64
Uint64 DataType = C.TF_UINT64
Bool DataType = C.TF_BOOL
Qint8 DataType = C.TF_QINT8
Quint8 DataType = C.TF_QUINT8
Qint32 DataType = C.TF_QINT32
Bfloat16 DataType = C.TF_BFLOAT16
Qint16 DataType = C.TF_QINT16
Quint16 DataType = C.TF_QUINT16
Uint16 DataType = C.TF_UINT16
Complex128 DataType = C.TF_COMPLEX128
Half DataType = C.TF_HALF
)
// Tensor holds a multi-dimensional array of elements of a single data type.
type Tensor struct {
c *C.TF_Tensor
shape []int64
}
// NewTensor converts from a Go value to a Tensor. Valid values are scalars,
// slices, and arrays. Every element of a slice must have the same length so
// that the resulting Tensor has a valid shape.
func NewTensor(value interface{}) (*Tensor, error) {
val := reflect.ValueOf(value)
shape, dataType, err := shapeAndDataTypeOf(val)
if err != nil {
return nil, err
}
nflattened := numElements(shape)
nbytes := typeOf(dataType, nil).Size() * uintptr(nflattened)
if dataType == String {
// TF_STRING tensors are encoded as an array of 8-byte offsets
// followed by string data. See c_api.h.
nbytes = uintptr(nflattened*8) + byteSizeOfEncodedStrings(value)
}
var shapePtr *C.int64_t
if len(shape) > 0 {
shapePtr = (*C.int64_t)(unsafe.Pointer(&shape[0]))
}
t := &Tensor{
c: C.TF_AllocateTensor(C.TF_DataType(dataType), shapePtr, C.int(len(shape)), C.size_t(nbytes)),
shape: shape,
}
runtime.SetFinalizer(t, (*Tensor).finalize)
raw := tensorData(t.c)
buf := bytes.NewBuffer(raw[:0:len(raw)])
if dataType != String {
if err := encodeTensor(buf, val, shape); err != nil {
return nil, err
}
if uintptr(buf.Len()) != nbytes {
return nil, bug("NewTensor incorrectly calculated the size of a tensor with type %v and shape %v as %v bytes instead of %v", dataType, shape, nbytes, buf.Len())
}
} else {
e := stringEncoder{offsets: buf, data: raw[nflattened*8 : len(raw)], status: newStatus()}
if err := e.encode(reflect.ValueOf(value), shape); err != nil {
return nil, err
}
if int64(buf.Len()) != nflattened*8 {
return nil, bug("invalid offset encoding for TF_STRING tensor with shape %v (got %v, want %v)", shape, buf.Len(), nflattened*8)
}
}
return t, nil
}
// ReadTensor constructs a Tensor with the provided type and shape from the
// serialized tensor contents in r.
//
// See also WriteContentsTo.
func ReadTensor(dataType DataType, shape []int64, r io.Reader) (*Tensor, error) {
if err := isTensorSerializable(dataType); err != nil {
return nil, err
}
nbytes := typeOf(dataType, nil).Size() * uintptr(numElements(shape))
var shapePtr *C.int64_t
if len(shape) > 0 {
shapePtr = (*C.int64_t)(unsafe.Pointer(&shape[0]))
}
t := &Tensor{
c: C.TF_AllocateTensor(C.TF_DataType(dataType), shapePtr, C.int(len(shape)), C.size_t(nbytes)),
shape: shape,
}
runtime.SetFinalizer(t, (*Tensor).finalize)
raw := tensorData(t.c)
n, err := r.Read(raw)
if err != nil {
return nil, err
}
if uintptr(n) != nbytes {
return nil, fmt.Errorf("expected serialized tensor to be %v bytes, read %v", nbytes, n)
}
return t, nil
}
// newTensorFromC takes ownership of c and returns the owning Tensor.
func newTensorFromC(c *C.TF_Tensor) *Tensor {
var shape []int64
if ndims := int(C.TF_NumDims(c)); ndims > 0 {
shape = make([]int64, ndims)
}
for i := range shape {
shape[i] = int64(C.TF_Dim(c, C.int(i)))
}
t := &Tensor{c: c, shape: shape}
runtime.SetFinalizer(t, (*Tensor).finalize)
return t
}
func (t *Tensor) finalize() { C.TF_DeleteTensor(t.c) }
// DataType returns the scalar datatype of the Tensor.
func (t *Tensor) DataType() DataType { return DataType(C.TF_TensorType(t.c)) }
// Shape returns the shape of the Tensor.
func (t *Tensor) Shape() []int64 { return t.shape }
// Value converts the Tensor to a Go value. For now, not all Tensor types are
// supported, and this function may panic if it encounters an unsupported
// DataType.
//
// The type of the output depends on the Tensor type and dimensions.
// For example:
// Tensor(int64, 0): int64
// Tensor(float64, 3): [][][]float64
func (t *Tensor) Value() interface{} {
typ := typeOf(t.DataType(), t.Shape())
val := reflect.New(typ)
raw := tensorData(t.c)
if t.DataType() != String {
if err := decodeTensor(bytes.NewReader(raw), t.Shape(), typ, val); err != nil {
panic(bug("unable to decode Tensor of type %v and shape %v - %v", t.DataType(), t.Shape(), err))
}
} else {
nflattened := numElements(t.Shape())
d := stringDecoder{offsets: bytes.NewReader(raw[0 : 8*nflattened]), data: raw[8*nflattened:], status: newStatus()}
if err := d.decode(val, t.Shape()); err != nil {
panic(bug("unable to decode String tensor with shape %v - %v", t.Shape(), err))
}
}
return reflect.Indirect(val).Interface()
}
// WriteContentsTo writes the serialized contents of t to w.
//
// Returns the number of bytes written. See ReadTensor for
// reconstructing a Tensor from the serialized form.
//
// WARNING: WriteContentsTo is not comprehensive and will fail
// if t.DataType() is non-numeric (e.g., String). See
// https://github.com/tensorflow/tensorflow/issues/6003.
func (t *Tensor) WriteContentsTo(w io.Writer) (int64, error) {
if err := isTensorSerializable(t.DataType()); err != nil {
return 0, err
}
return io.Copy(w, bytes.NewReader(tensorData(t.c)))
}
func tensorData(c *C.TF_Tensor) []byte {
// See: https://github.com/golang/go/wiki/cgo#turning-c-arrays-into-go-slices
cbytes := C.TF_TensorData(c)
if cbytes == nil {
return nil
}
length := int(C.TF_TensorByteSize(c))
slice := (*[1 << 30]byte)(unsafe.Pointer(cbytes))[:length:length]
return slice
}
var types = []struct {
typ reflect.Type
dataType C.TF_DataType
}{
{reflect.TypeOf(float32(0)), C.TF_FLOAT},
{reflect.TypeOf(float64(0)), C.TF_DOUBLE},
{reflect.TypeOf(int32(0)), C.TF_INT32},
{reflect.TypeOf(uint32(0)), C.TF_UINT32},
{reflect.TypeOf(uint8(0)), C.TF_UINT8},
{reflect.TypeOf(int16(0)), C.TF_INT16},
{reflect.TypeOf(int8(0)), C.TF_INT8},
{reflect.TypeOf(""), C.TF_STRING},
{reflect.TypeOf(complex(float32(0), float32(0))), C.TF_COMPLEX64},
{reflect.TypeOf(int64(0)), C.TF_INT64},
{reflect.TypeOf(uint64(0)), C.TF_UINT64},
{reflect.TypeOf(false), C.TF_BOOL},
{reflect.TypeOf(uint16(0)), C.TF_UINT16},
{reflect.TypeOf(complex(float64(0), float64(0))), C.TF_COMPLEX128},
// TODO(apassos): support DT_RESOURCE representation in go.
// TODO(keveman): support DT_VARIANT representation in go.
}
// shapeAndDataTypeOf returns the data type and shape of the Tensor
// corresponding to a Go type.
func shapeAndDataTypeOf(val reflect.Value) (shape []int64, dt DataType, err error) {
typ := val.Type()
for typ.Kind() == reflect.Array || typ.Kind() == reflect.Slice {
shape = append(shape, int64(val.Len()))
if val.Len() > 0 {
// In order to check tensor structure properly in general case we need to iterate over all slices of the tensor to check sizes match
// Since we already going to iterate over all elements in encodeTensor() let's
// 1) do the actual check in encodeTensor() to save some cpu cycles here
// 2) assume the shape is represented by lengths of elements with zero index in each dimension
val = val.Index(0)
}
typ = typ.Elem()
}
for _, t := range types {
if typ.Kind() == t.typ.Kind() {
return shape, DataType(t.dataType), nil
}
}
return shape, dt, fmt.Errorf("unsupported type %v", typ)
}
// typeOf converts from a DataType and Shape to the equivalent Go type.
func typeOf(dt DataType, shape []int64) reflect.Type {
var ret reflect.Type
for _, t := range types {
if dt == DataType(t.dataType) {
ret = t.typ
break
}
}
if ret == nil {
panic(bug("DataType %v is not supported", dt))
}
for range shape {
ret = reflect.SliceOf(ret)
}
return ret
}
func numElements(shape []int64) int64 {
n := int64(1)
for _, d := range shape {
n *= d
}
return n
}
// byteSizeOfEncodedStrings returns the size of the encoded strings in val.
// val MUST be a string, or a container (array/slice etc.) of strings.
func byteSizeOfEncodedStrings(val interface{}) uintptr {
if s, ok := val.(string); ok {
return uintptr(C.TF_StringEncodedSize(C.size_t(len(s))))
}
// Otherwise must be an array or slice.
var size uintptr
v := reflect.ValueOf(val)
for i := 0; i < v.Len(); i++ {
size += byteSizeOfEncodedStrings(v.Index(i).Interface())
}
return size
}
// encodeTensor writes v to the specified buffer using the format specified in
// c_api.h. Use stringEncoder for String tensors.
func encodeTensor(w *bytes.Buffer, v reflect.Value, shape []int64) error {
switch v.Kind() {
case reflect.Bool:
b := byte(0)
if v.Bool() {
b = 1
}
if err := w.WriteByte(b); err != nil {
return err
}
case reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Uint8, reflect.Uint16, reflect.Float32, reflect.Float64, reflect.Complex64, reflect.Complex128:
if err := binary.Write(w, nativeEndian, v.Interface()); err != nil {
return err
}
case reflect.Array, reflect.Slice:
// If current dimension is a slice, verify that it has the expected size
// Go's type system makes that guarantee for arrays.
if v.Kind() == reflect.Slice {
expected := int(shape[0])
if v.Len() != expected {
return fmt.Errorf("mismatched slice lengths: %d and %d", v.Len(), expected)
}
}
subShape := shape[1:]
for i := 0; i < v.Len(); i++ {
err := encodeTensor(w, v.Index(i), subShape)
if err != nil {
return err
}
}
default:
return fmt.Errorf("unsupported type %v", v.Type())
}
return nil
}
// decodeTensor decodes the Tensor from the buffer to ptr using the format
// specified in c_api.h. Use stringDecoder for String tensors.
func decodeTensor(r *bytes.Reader, shape []int64, typ reflect.Type, ptr reflect.Value) error {
switch typ.Kind() {
case reflect.Bool:
b, err := r.ReadByte()
if err != nil {
return err
}
ptr.Elem().SetBool(b == 1)
case reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Uint8, reflect.Uint16, reflect.Float32, reflect.Float64, reflect.Complex64, reflect.Complex128:
if err := binary.Read(r, nativeEndian, ptr.Interface()); err != nil {
return err
}
case reflect.Slice:
val := reflect.Indirect(ptr)
val.Set(reflect.MakeSlice(typ, int(shape[0]), int(shape[0])))
for i := 0; i < val.Len(); i++ {
if err := decodeTensor(r, shape[1:], typ.Elem(), val.Index(i).Addr()); err != nil {
return err
}
}
default:
return fmt.Errorf("unsupported type %v", typ)
}
return nil
}
type stringEncoder struct {
offsets io.Writer
data []byte
offset uint64
status *status
}
func (e *stringEncoder) encode(v reflect.Value, shape []int64) error {
if v.Kind() == reflect.String {
if err := binary.Write(e.offsets, nativeEndian, e.offset); err != nil {
return err
}
var (
s = v.Interface().(string)
src = C.CString(s)
srcLen = C.size_t(len(s))
dst = (*C.char)(unsafe.Pointer(&e.data[e.offset]))
dstLen = C.size_t(uint64(len(e.data)) - e.offset)
)
e.offset += uint64(C.TF_StringEncode(src, srcLen, dst, dstLen, e.status.c))
C.free(unsafe.Pointer(src))
return e.status.Err()
}
if v.Kind() == reflect.Slice {
expected := int(shape[0])
if v.Len() != expected {
return fmt.Errorf("mismatched slice lengths: %d and %d", v.Len(), expected)
}
}
subShape := shape[1:]
for i := 0; i < v.Len(); i++ {
if err := e.encode(v.Index(i), subShape); err != nil {
return err
}
}
return nil
}
type stringDecoder struct {
offsets io.Reader
data []byte
status *status
}
func (d *stringDecoder) decode(ptr reflect.Value, shape []int64) error {
if len(shape) == 0 {
var offset uint64
if err := binary.Read(d.offsets, nativeEndian, &offset); err != nil {
return err
}
var (
src = (*C.char)(unsafe.Pointer(&d.data[offset]))
srcLen = C.size_t(len(d.data)) - C.size_t(offset)
dst *C.char
dstLen C.size_t
)
if offset > uint64(len(d.data)) {
return fmt.Errorf("invalid offsets in String Tensor")
}
C.TF_StringDecode(src, srcLen, &dst, &dstLen, d.status.c)
if err := d.status.Err(); err != nil {
return err
}
s := ptr.Interface().(*string)
*s = C.GoStringN(dst, C.int(dstLen))
return nil
}
val := reflect.Indirect(ptr)
val.Set(reflect.MakeSlice(typeOf(String, shape), int(shape[0]), int(shape[0])))
for i := 0; i < val.Len(); i++ {
if err := d.decode(val.Index(i).Addr(), shape[1:]); err != nil {
return err
}
}
return nil
}
func bug(format string, args ...interface{}) error {
return fmt.Errorf("BUG: Please report at https://github.com/tensorflow/tensorflow/issues with the note: Go TensorFlow %v: %v", Version(), fmt.Sprintf(format, args...))
}
func isTensorSerializable(dataType DataType) error {
// For numeric types, the serialized Tensor matches the in-memory
// representation. See the implementation of Tensor::AsProtoContent in
// https://www.tensorflow.org/code/tensorflow/core/framework/tensor.cc
//
// The more appropriate way to be in sync with Tensor::AsProtoContent
// would be to have the TensorFlow C library export functions for
// serialization and deserialization of Tensors. Till then capitalize
// on knowledge of the implementation for numeric types.
switch dataType {
case Float, Double, Int32, Uint8, Int16, Int8, Complex, Int64, Bool, Quint8, Qint32, Bfloat16, Qint16, Quint16, Uint16, Complex128, Half:
return nil
default:
return fmt.Errorf("serialization of tensors with the DataType %d is not yet supported, see https://github.com/tensorflow/tensorflow/issues/6003", dataType)
}
}
// nativeEndian is the byte order for the local platform. Used to send back and
// forth Tensors with the C API. We test for endianness at runtime because
// some architectures can be booted into different endian modes.
var nativeEndian binary.ByteOrder
func init() {
buf := [2]byte{}
*(*uint16)(unsafe.Pointer(&buf[0])) = uint16(0xABCD)
switch buf {
case [2]byte{0xCD, 0xAB}:
nativeEndian = binary.LittleEndian
case [2]byte{0xAB, 0xCD}:
nativeEndian = binary.BigEndian
default:
panic("Could not determine native endianness.")
}
}
|
#!/usr/bin/env bash
nsml run \
-m 'kaist korquad open' \
-d korquad-open-ldbd \
-g 1 \
-c 1 \
-e run_squad_base_finetuned.py \
-a "--model_type electra
--model_name_or_path monologg/koelectra-base-v2-finetuned-korquad-384
--do_train
--do_eval
--data_dir train
--num_train_epochs 5
--per_gpu_train_batch_size 8
--per_gpu_eval_batch_size 16
--output_dir output
--verbose_logging
--overwrite_output_dir
--version_2_with_negative"
|
#!/bin/bash
# This is run on every commit that Azure Pipelines picks up. It assumes that docs have already been built
# via docs/build.sh. The push behavior differs depending on the nature of the commit:
# * Tag commit (e.g. v1.6.0): pushes docs to versioned location, e.g.
# https://www.envoyproxy.io/docs/envoy/v1.6.0/.
# * Master commit: pushes docs to https://www.envoyproxy.io/docs/envoy/latest/.
# * Otherwise: noop.
set -e
DOCS_DIR=generated/docs
CHECKOUT_DIR=envoy-docs
BUILD_SHA=$(git rev-parse HEAD)
MAIN_BRANCH="refs/heads/main"
RELEASE_TAG_REGEX="^refs/tags/v.*"
if [[ "${AZP_BRANCH}" =~ ${RELEASE_TAG_REGEX} ]]; then
PUBLISH_DIR="${CHECKOUT_DIR}"/docs/envoy/"${AZP_BRANCH/refs\/tags\//}"
elif [[ "$AZP_BRANCH" == "${MAIN_BRANCH}" ]]; then
PUBLISH_DIR="${CHECKOUT_DIR}"/docs/envoy/latest
else
echo "Ignoring docs push"
exit 0
fi
DOCS_MAIN_BRANCH="main"
echo 'cloning'
git clone git@github.com:envoyproxy/envoyproxy.github.io "${CHECKOUT_DIR}" -b "${DOCS_MAIN_BRANCH}" --depth 1
rm -fr "$PUBLISH_DIR"
mkdir -p "$PUBLISH_DIR"
cp -r "$DOCS_DIR"/* "$PUBLISH_DIR"
cd "${CHECKOUT_DIR}"
git config user.name "envoy-docs(Azure Pipelines)"
git config user.email envoy-docs@users.noreply.github.com
set -x
git add .
git commit -m "docs envoy@$BUILD_SHA"
git push origin "${DOCS_MAIN_BRANCH}"
|
<gh_stars>1-10
const TelegramBot = require('node-telegram-bot-api');
var config = require('./config')
var firebase = require('firebase')
var firebase_config = config.firebase_config;
var token = config.token;
firebase.initializeApp(firebase_config);
const bot = new TelegramBot(token, {polling: true});
const img_url = 'https://cdn-images-1.medium.com/max/1200/1*b708XUPLvguJNmrpbg8oXg.jpeg'
var t_username = '';
var u_email = '';
var e_wallet = '';
var t_mobileno='';
bot.onText(/\/start/, (msg) => {
bot.sendPhoto(msg.chat.id,img_url,{caption : "Welcome to Era Swap Airdrop! ๐๐ \nJoin Era Swap Community on Telegram and earn 3 Era Swap Tokens\n \n "}).then(() => {
var option = {
"reply_markup": {
"keyboard": [["1. Join the Era Swap Telegram group", "2. Your Telegram Username", "3. Your Mobile Number"], ["4. E-mail address" , "5. ETH address (No exchange wallet!)"]]
}
};
bot.sendMessage(msg.chat.id,"Airdrop Rules โ๏ธโ๏ธ\n 1. Join the Era Swap Telegram group \n 2. Your Telegram Username \n 3. Mobile Number \n 4. E-mail address \n 5. ETH address (No exchange wallet!) \n Visit https://eraswaptoken.io for more\n",option);
})
})
bot.on('message', (msg) => {
var send_text1 = msg.text;
var send_msg = "Hi";
if(send_text1.toString().indexOf(send_msg) === 0){
bot.sendMessage(msg.chat.id,"Hello i am smart bot from Era Swap, start the task list by replying /start");
}
});
bot.on('message', (msg) => {
var send_text1 = msg.text;
var send_msg = "hi";
if(send_text1.toString().indexOf(send_msg) === 0){
bot.sendMessage(msg.chat.id,"Hello i am smart bot from Era Swap, start the task list by replying /start");
}
});
bot.on('message', (msg) => {
var send_text2 = msg.text;
var re = /project/i;
if(re.test(send_text2)){
var keyboardStr = JSON.stringify({
inline_keyboard: [
[
{text:'View Whitepaper',url:'https://eraswaptoken.io/pdf/era-swap-whitepaper.pdf'}
]
]
});
var keyboard = {reply_markup: JSON.parse(keyboardStr)};
bot.sendMessage(msg.chat.id,"You can have a look on our whitepaper to know more about our project.",keyboard);
}
});
var re = /[A-Z0-9._%+-]+@[A-Z0-9.-]+.[A-Z]{2,4}/igm;
bot.on('message', (msg) => {
var send_text = msg.text;
var step1_text = '1. Join the Era Swap Telegram group'
if (send_text.toString().indexOf(step1_text) === 0) {
var text = 'Era Swap Telegram Group';
var keyboardStr = JSON.stringify({
inline_keyboard: [
[
{text:'Join the chat',url:'https://t.me/eraswap'}
]
]
});
var keyboard = {reply_markup: JSON.parse(keyboardStr)};
bot.sendMessage(msg.chat.id,text,keyboard);
}
var step2_text = '2. Your Telegram Username';
if (send_text.toString().indexOf(step2_text) === 0) {
bot.sendMessage(msg.chat.id, "Please Enter Your Telegram Username (@username)")
}
if(send_text.toString().charAt(0) === '@') {
t_username = send_text;
var option = {
"reply_markup": {
"keyboard": [["1. Join the Era Swap Telegram group", "2. Your Telegram Username", "3. Your Mobile Number"], ["4. E-mail address" , "5. ETH address (No exchange wallet!)"]]
}
};
bot.sendMessage(msg.chat.id, "Hello "+send_text, option);
}
var step3_text = '3. Your Mobile Number';
if (send_text.toString().indexOf(step3_text) === 0) {
bot.sendMessage(msg.chat.id, "Please Enter Your Mobile Number, please maintain the format for example for India it will be like +91XXXXXXXXXX");
}
if(send_text.length === 13) {
t_mobileno = send_text;
var option = {
"reply_markup": {
"keyboard": [["1. Join the Era Swap Telegram group", "2. Your Telegram Username", "3. Your Mobile Number"], ["4. E-mail address" , "5. ETH address (No exchange wallet!)"]]
}
};
bot.sendMessage(msg.chat.id, "Your Number is "+send_text, option);
}
var step4_text = '4. E-mail address';
if(send_text.toString().indexOf(step4_text) === 0) {
bot.sendMessage(msg.chat.id, "Enter your email address")
}
var re = /[A-Z0-9._%+-]+@[A-Z0-9.-]+.[A-Z]{2,4}/igm;
if(re.test(send_text)) {
u_email = send_text;
var option = {
"reply_markup": {
"keyboard": [["1. Join the Era Swap Telegram group", "2. Your Telegram Username", "3. Your Mobile Number"], ["4. E-mail address" , "5. ETH address (No exchange wallet!)"]]
}
};
bot.sendMessage(msg.chat.id, "Email address: "+send_text, option)
}
var step5_text = '5. ETH address (No exchange wallet!)';
if(send_text.toString().indexOf(step5_text) === 0) {
bot.sendMessage(msg.chat.id, "Make sure that you have an erc20 wallet (0x) ๐");
}
var re_eth = /^0x[a-fA-F0-9]{40}$/g
if(re_eth.test(send_text)) {
e_wallet = send_text;
bot.sendMessage(msg.chat.id, 'Confirmโ', {
reply_markup: {
keyboard: [
[{"text": "Yes โ
"}],
[{"text": "Cancel โ"}]
],
resize_keyboard: true
}
})
}
var confirm = 'Yes โ
';
if(send_text.toString().indexOf(confirm) === 0) {
var db = firebase.database().ref();
db.child(e_wallet.toLocaleLowerCase()).once('value', snap => {
if(!snap.exists()) {
db.child(e_wallet.toLocaleLowerCase()).update({
telegram_username: t_username,
email: u_email,
mobilenumber: t_mobileno,
wallet: e_wallet.toLocaleLowerCase(),
status: 'pending',
createAt: Date.now()
}).then(() => {
bot.sendMessage(msg.chat.id, "Thank'you ๐๐ \n");
bot.sendMessage(msg.chat.id, `Telegram username: ${t_username} \n Email: ${u_email} \n Ethereum wallet: ${e_wallet} \n Visit https://eraswaptoken.io for more.\n`).then(() => {
})
}).catch((err) => {
console.log(err)
})
} else {
bot.sendMessage(msg.chat.id, "This wallet is already in use");
}
})
}
var calcel = 'Cancel โ';
if(send_text.toString().indexOf(calcel) === 0) {
bot.sendMessage(msg.chat.id, "Good bye โ๏ธโ๏ธ");
}
});
|
#!/bin/bash
set -e
cargo clean
# verify using KLEE
# this should detect an error
( cargo-verify --tests --verbose . > out1 || true )
cat out1
grep -q "test t1 ... .*ERROR" out1
# replay input values
( cargo-verify --tests --replay . > out2 || true )
cat out2
grep -q "Test values: a = 1000, b = 1000" out2
|
#ifndef INCLUDED_CORE_MAP_I_OUTPUT_H
#define INCLUDED_CORE_MAP_I_OUTPUT_H
#include "platform/auto_id.h"
#include "core/opt.h"
#include "boost/function.hpp"
#include "function_declarations.h"
namespace map {
class IOutput
{
public:
virtual ~IOutput() {};
virtual void PlugInNode( int32_t ordinal, int_function_t node ) = 0;
virtual void PlugInNodeId( int32_t id, int_function_t node ) = 0;
virtual void DoOutput( int32_t ordinal, int32_t val ) = 0;
virtual void DoOutputId( int32_t id, int32_t val ) = 0;
protected:
virtual void AddOutputNodeId( int32_t id ) = 0;
};
} // namespace map
#endif//INCLUDED_CORE_MAP_I_OUTPUT_H
|
#!/bin/bash
rm serialService
clear |
/**
* @fileoverview
*
* Actions for base
*/
goog.provide("app.state.BaseActions");
/**
* [BaseActions description]
* @enum {Function}
*/
app.state.BaseActions = {};
app.state.BaseActions.isLoading = function ( loading ) {
return {
type: "IS_LOADING",
payload: loading
}
}
app.state.BaseActions.isLoaded = function ( loaded ) {
return {
type: "IS_LOADED",
payload: loaded
}
}
app.state.BaseActions.isAuthUser = function ( auth ) {
return {
type: "IS_USER",
payload: auth
}
} |
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
function error_exit {
echo "$1" >&2 ## Send message to stderr. Exclude >&2 if you don't want it that way.
exit "${2:-1}" ## Return a code specified by $2 or 1 by default.
}
if [ -z "${HADOOP_HOME}" ]; then
error_exit "Please make sure the environment variable HADOOP_HOME is setup"
fi
if [ -z "${HIVE_HOME}" ]; then
error_exit "Please make sure the environment variable HIVE_HOME is setup"
fi
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
#Ensure we pick the right jar even for hive11 builds
HUDI_HIVE_UBER_JAR=`ls -c $DIR/../../packaging/hudi-hive-sync-bundle/target/hudi-hive-sync-*.jar | grep -v source | head -1`
if [ -z "$HADOOP_CONF_DIR" ]; then
echo "setting hadoop conf dir"
HADOOP_CONF_DIR="${HADOOP_HOME}/etc/hadoop"
fi
## Include only specific packages from HIVE_HOME/lib to avoid version mismatches
HIVE_EXEC=`ls ${HIVE_HOME}/lib/hive-exec-*.jar | tr '\n' ':'`
HIVE_SERVICE=`ls ${HIVE_HOME}/lib/hive-service-*.jar | grep -v rpc | tr '\n' ':'`
HIVE_METASTORE=`ls ${HIVE_HOME}/lib/hive-metastore-*.jar | tr '\n' ':'`
HIVE_JDBC=`ls ${HIVE_HOME}/lib/hive-jdbc-*.jar | tr '\n' ':'`
if [ -z "${HIVE_JDBC}" ]; then
HIVE_JDBC=`ls ${HIVE_HOME}/lib/hive-jdbc-*.jar | grep -v handler | tr '\n' ':'`
fi
HIVE_JACKSON=`ls ${HIVE_HOME}/lib/jackson-*.jar | tr '\n' ':'`
LOG_JAR=`ls ${HIVE_HOME}/lib/log4j-*.jar | tr '\n' ':'`
HADOOP_COMMON_JAR=`ls ${HADOOP_HOME}/hadoop-common.jar | tr '\n' ':'`
HIVE_JARS=$HIVE_METASTORE:$HIVE_SERVICE:$HIVE_EXEC:$HIVE_JDBC:$HIVE_JACKSON:$LOG_JAR:$HADOOP_COMMON_JAR
HADOOP_HIVE_JARS=${HIVE_JARS}:${HADOOP_HOME}/share/hadoop/common/*:${HADOOP_HOME}/share/hadoop/mapreduce/*:${HADOOP_HOME}/share/hadoop/hdfs/*:${HADOOP_HOME}/share/hadoop/common/lib/*:${HADOOP_HOME}/share/hadoop/hdfs/lib/*:${HDFS_HOME}/*:${HADOOP_HOME}/*:${HADOOP_HOME}/lib/*:${MAPREDUCE_HOME}/*:${HIVE_HOME}/lib/*
echo "Running Command : java -cp ${HADOOP_HIVE_JARS}:${HADOOP_CONF_DIR}:$HUDI_HIVE_UBER_JAR org.apache.hudi.hive.HiveSyncTool $@"
java -cp $HUDI_HIVE_UBER_JAR:${HADOOP_HIVE_JARS}:${HADOOP_CONF_DIR} org.apache.hudi.hive.HiveSyncTool "$@"
|
#!/bin/bash
#PBS -q gpu
#PBS -l select=1:ncpus=1:mem=10gb:ngpus=1:scratch_local=3gb:cluster=adan
#PBS -l walltime=2:00:00
DATADIR=/storage/brno6/home/apprehension
cd $DATADIR
module add python-3.6.2-gcc
module add python36-modules-gcc
module add tensorflow-1.13.1-gpu-python3
module add opencv-3.4.5-py36
module add cuda-10.0
module add cudnn-7.4.2-cuda10
cp -R $DATADIR/bigan_tester.py $DATADIR/Models $DATADIR/DataBigan $SCRATCHDIR
cd $SCRATCHDIR
mkdir -p Graphs/{Accuracies,Losses,biGANScores,biGANReco}
mkdir -p Model_Saves/{Detailed,Weights}
python bigan_tester.py -e 500 -b 16 -m BasicBiganHF -t SE -d low_dim_
cp -vr $SCRATCHDIR/Graphs/Accuracies/* $DATADIR/Graphs/Accuracies/
cp -vr $SCRATCHDIR/Graphs/Losses/* $DATADIR/Graphs/Losses/
cp -vr $SCRATCHDIR/Graphs/biGANScores/* $DATADIR/Graphs/biGANScores/
cp -vr $SCRATCHDIR/Graphs/biGANReco/* $DATADIR/Graphs/biGANReco/
cp -vr $SCRATCHDIR/Model_Saves/Detailed/* $DATADIR/Model_Saves/Detailed/
cp -vr $SCRATCHDIR/Model_Saves/Weights/* $DATADIR/Model_Saves/Weights/
clean_scratch
|
// ==UserScript==
// @name Blocker Monkey
// @namespace https://greasyfork.org/zh-TW/users/848949-kajipa-coder
// @version 1.0
// @description An addon prevents children visiting Gaming Website.Made with JS.
// @license Blocker Monkey @ CopyRight 2022
// @author Jypa
// @author HahaLouisOMG
// @match *://www.youtube.com*
// @match *://krunker.io/*
// @match *://web.roblox.com/home*
// @match *://roblox.com*
// @match *://discord.com*
// @match *://www.arkadium.com/*
// @match *://www.twitch.tv/*
// @match *://www.crazygames.com*
// @match *://classic.minecraft.net/*
// @match *://freeonlinegames.com/*
// @match *://www.yiv.com/*
// @match *://www.agame.com/*
// @match *://poki.com/*
// @match *://m.fog.com*
// @match *://www.bgames.com/*
// @match *://games.aarp.org/*
// @match *://slither.io/*
// @match *://viu.tv*
// @match *://www.instagram.com/*
// @match *://zh-hk.facebook.com/*
// @match *://faceit.com/*
// @match *://youtu.be/*
// @grant none
// @icon https://i.imgur.com/7sKkNi0.jpg
// @support https://discord.gg/F3wwA64TxX
// @support https://coding-club-forum.000webhostapp.com/
// @run-at document-start
// ==/UserScript==
//Loop Alert x253
//Connect the addons
(function() {
'use strict';
alert("BLOCKING WEB LTD")
})();
(function() {
'use strict';
alert("Blocking, Start Document")
})();
(function() {
'use strict';
alert("Looping x253...")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert(" ")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert(" ")
})();
(function() {
'use strict';
alert(" ")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert(" ")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("Looping x200 ")
})();
(function() {
'use strict';
alert(" ")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert(" ")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert(" ")
})();
(function() {
'use strict';
alert(" ")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert(" ")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
(function() {
'use strict';
alert("")
})();
|
create table user_progress (
id integer PRIMARY KEY,
user_string varchar(40) UNIQUE NOT NULL,
display integer NOT NULL,
last_modif date
);
|
<filename>src-ts/button/utils.ts
/*
Copyright (c) 2018-2020 Uber Technologies, Inc.
This source code is licensed under the MIT license found in the
LICENSE file in the root directory of this source tree.
*/
import {ButtonProps} from './types';
export function getSharedProps({
disabled,
isLoading,
isSelected,
kind,
shape,
size,
}: ButtonProps) {
return {
$disabled: disabled,
$isLoading: isLoading,
$isSelected: isSelected,
$kind: kind,
$shape: shape,
$size: size,
};
}
|
#!/bin/bash
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
# A script that uploads the payload.json to aws s3 bucket, and
# create a new package on the core update server.
# jq, docker and updateservicectl (https://github.com/coreos/updateservicectl)
# are required.
# Assume the payload.json is in the current working directory.
function print_usage() {
echo "Usage:"
echo "export AWS_ACCESS_KEY_ID=<id>"
echo "export AWS_SECRET_ACCESS_KEY=<key>"
echo "export COREUPDATE_USR=<user@coreos.com>"
echo "export COREUPDATE_KEY=<coreupdate_key>"
echo "$0"
exit 1
}
# main function
if [[ ${AWS_ACCESS_KEY_ID} == "" || ${AWS_SECRET_ACCESS_KEY} == "" || ${COREUPDATE_USR} == "" || ${COREUPDATE_KEY} == "" ]]; then
print_usage
fi
which jq > /dev/null
if [[ $? != 0 ]]; then
echo "Require jq"
exit 1
fi
which updateservicectl > /dev/null
if [[ $? == 0 ]]; then
export UPDATESERVICECTL
UPDATESERVICECTL=$(which updateservicectl)
fi
if [[ ${UPDATESERVICECTL} == "" ]]; then
echo "Require updateservicectl (https://github.com/coreos/updateservicectl)"
exit 1
fi
AWSCLI=${AWSCLI:-"aws"}
set -e
payload=${DIR}/payload.json
for f in "${payload}" "${payload}.sig"; do
if [[ ! -f "${f}" ]]; then
echo "Expecting ${f} in the current directory" >&2
exit 1
fi
done
# shellcheck disable=SC2002,SC2086
VERSION=${VERSION:-$(cat ${payload} | jq -r .version)}
if [[ ${VERSION} == "" ]]; then
echo "Invalid payload format"
exit 1
fi
DESTINATION=${DESTINATION:-"${VERSION}.json"}
BUCKET=${BUCKET:-"tectonic-update-payload"}
PAYLOAD_URL="https://s3-us-west-2.amazonaws.com/${BUCKET}/${DESTINATION}"
echo "Uploading payload to \"${PAYLOAD_URL}\", version: \"${VERSION}\""
${AWSCLI} s3 cp "${payload}" "s3://${BUCKET}/${DESTINATION}"
${AWSCLI} s3 cp "${payload}.sig" "s3://${BUCKET}/${DESTINATION}.sig"
SERVER=${SERVER:-"https://tectonic.update.core-os.net"}
APPID=${APPID:-"6bc7b986-4654-4a0f-94b3-84ce6feb1db4"}
echo "Payload successfully uploaded"
echo "Creating package ${VERSION} on Core Update server ${SERVER} for ${APPID}"
# shellcheck disable=SC2086,SC2154
${UPDATESERVICECTL} --server ${SERVER} \
--key ${COREUPDATE_KEY} \
--user ${COREUPDATE_USR} \
package create \
--app-id ${APPID} \
--url ${PAYLOAD_URL} \
--version ${VERSION} \
--file ${payload}
echo "Packaged successfully created"
|
def linearSearch(arr, x):
for i in range (len(arr)):
if arr[i] == x:
return i
return -1 |
#!/bin/bash
export PATH=/usr/local/sbin:/usr/local/bin:/sbin:/bin:/usr/sbin:/usr/bin
# Copyright (c) 2014 hellofwy
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# ๆต้้ๆ ท้ด้,ๅไฝไธบ็ง
INTERVEL=10
TMPDIR=$DIR/tmp
if [ ! -e $TMPDIR ]; then
mkdir $TMPDIR;
chmod 777 $TMPDIR;
fi
USER_FILE=$DIR/ssusers
JSON_FILE=$DIR/ssmlt.json
TRAFFIC_FILE=$DIR/sstraffic
SSCOUNTER_PID=$TMPDIR/sscounter.pid
TRA_FORMAT='%-5d\t%.0f\n'
TRAFFIC_LOG=$DIR/traffic.log
IPT_TRA_LOG=$TMPDIR/ipt_tra.log
MIN_TRA_LOG=$TMPDIR/min_tra.log
PORTS_ALREADY_BAN=$TMPDIR/ports_already_ban.tmp
SS_IN_RULES=ssinput
SS_OUT_RULES=ssoutput
del_ipt_chains () {
iptables -F $SS_IN_RULES
iptables -F $SS_OUT_RULES
iptables -D INPUT -j $SS_IN_RULES
iptables -D OUTPUT -j $SS_OUT_RULES
iptables -X $SS_IN_RULES
iptables -X $SS_OUT_RULES
}
init_ipt_chains () {
del_ipt_chains 2> /dev/null
iptables -N $SS_IN_RULES
iptables -N $SS_OUT_RULES
iptables -A INPUT -j $SS_IN_RULES
iptables -A OUTPUT -j $SS_OUT_RULES
}
add_rules () {
PORT=$1;
iptables -A $SS_IN_RULES -p tcp --dport $PORT -j ACCEPT
iptables -A $SS_OUT_RULES -p tcp --sport $PORT -j ACCEPT
iptables -A $SS_IN_RULES -p udp --dport $PORT -j ACCEPT
iptables -A $SS_OUT_RULES -p udp --sport $PORT -j ACCEPT
}
add_reject_rules () {
PORT=$1;
iptables -A $SS_IN_RULES -p tcp --dport $PORT -j REJECT
iptables -A $SS_OUT_RULES -p tcp --sport $PORT -j REJECT
iptables -A $SS_IN_RULES -p udp --dport $PORT -j REJECT
iptables -A $SS_OUT_RULES -p udp --sport $PORT -j REJECT
}
del_rules () {
PORT=$1;
iptables -D $SS_IN_RULES -p tcp --dport $PORT -j ACCEPT
iptables -D $SS_OUT_RULES -p tcp --sport $PORT -j ACCEPT
iptables -D $SS_IN_RULES -p udp --dport $PORT -j ACCEPT
iptables -D $SS_OUT_RULES -p udp --sport $PORT -j ACCEPT
}
del_reject_rules () {
PORT=$1;
iptables -D $SS_IN_RULES -p tcp --dport $PORT -j REJECT
iptables -D $SS_OUT_RULES -p tcp --sport $PORT -j REJECT
iptables -D $SS_IN_RULES -p udp --dport $PORT -j REJECT
iptables -D $SS_OUT_RULES -p udp --sport $PORT -j REJECT
}
list_rules () {
iptables -vnx -L $SS_IN_RULES
iptables -vnx -L $SS_OUT_RULES
}
add_new_rules () {
ports=`awk '
{
if($0 !~ /^#|^\s*$/) print $1
}
' $USER_FILE`
for port in $ports
do
add_rules $port
done
}
update_or_create_traffic_file_from_users () {
#ๆ นๆฎ็จๆทๆไปถ็ๆๆๆดๆฐๆต้่ฎฐๅฝ
while [ -e $TRAFFIC_LOG.lock ]; do
sleep 1
done
touch $TRAFFIC_LOG.lock
if [ ! -f $TRAFFIC_LOG ]; then
awk '{if($1 > 0) printf("%-5d\t0\n", $1)}' $USER_FILE > $TRAFFIC_LOG
else
awk '
BEGIN {
i=1;
}
{
if(FILENAME=="'$USER_FILE'"){
if($0 !~ /^#|^\s*$/){
port=$1;
user[i++]=port;
}
}
if(FILENAME=="'$TRAFFIC_LOG'"){
uport=$1;
utra=$2;
uta[uport]=utra;
}
}
END {
for(j=1;j<i;j++) {
port=user[j];
if(uta[port]>0) {
printf("'$TRA_FORMAT'", port, uta[port])
} else {
printf("%-5d\t0\n", port)
}
}
}' $USER_FILE $TRAFFIC_LOG > $TRAFFIC_LOG.tmp
mv -f $TRAFFIC_LOG.tmp $TRAFFIC_LOG
fi
rm $TRAFFIC_LOG.lock
}
calc_remaining () {
while [ -e $TRAFFIC_FILE.lock ]; do
sleep 1
done
touch $TRAFFIC_FILE.lock
awk '
function print_in_gb(bytes) {
tb=bytes/(1024*1024*1024*1024*1.0);
if(tb>=1||tb<=-1) {
printf("(%.2fTB)", tb);
} else {
gb=bytes/(1024*1024*1024*1.0);
if(gb>=1||gb<=-1) {
printf("(%.2fGB)", gb);
} else {
mb=bytes/(1024*1024*1.0);
if(mb>=1||mb<=-1) {
printf("(%.2fMB)", mb);
} else {
kb=bytes/(1024*1.0);
printf("(%.2fKB)", kb);
}
}
}
}
BEGIN {
i=1;
totallim=0;
totalused=0;
totalrem=0;
}
{
if(FILENAME=="'$USER_FILE'"){
if($0 !~ /^#|^\s*$/){
port=$1;
user[i++]=port;
limit=$3;
limits[port]=limit
}
}
if(FILENAME=="'$TRAFFIC_LOG'"){
uport=$1;
utra=$2;
uta[uport]=utra;
}
}
END {
printf("#็ซฏๅฃ\t้ๅถๆต้\tๅทฒไฝฟ็จ\tๅฉไฝ\n");
for(j=1;j<i;j++) {
port=user[j];
printf("%-5d\t", port);
limit=limits[port]
print_in_gb(limit);
printf("\t");
totallim+=limit;
used=uta[port];
print_in_gb(used);
printf("\t");
totalused+=used;
remaining=limits[port]-uta[port];
print_in_gb(remaining);
printf("\n");
totalrem+=remaining;
}
printf("%s\t", "ๆปๅ
ฑ");
print_in_gb(totallim);
printf("\t");
print_in_gb(totalused);
printf("\t");
print_in_gb(totalrem);
printf("\n");
}' $USER_FILE $TRAFFIC_LOG > $TRAFFIC_FILE.tmp
mv $TRAFFIC_FILE.tmp $TRAFFIC_FILE
rm $TRAFFIC_FILE.lock
}
check_traffic_against_limits () {
#ๆ นๆฎ็จๆทๆไปถๆฅ็ๆต้ๆฏๅฆ่ถ
้
ports_2ban=`awk '
BEGIN {
i=1;
}
{
if(FILENAME=="'$USER_FILE'"){
if($0 !~ /^#|^\s*$/){
port=$1;
user[i++]=port;
limit=$3;
limits[port]=limit
}
}
if(FILENAME=="'$TRAFFIC_LOG'"){
uport=$1;
utra=$2;
uta[uport]=utra;
}
}
END {
for(j=1;j<i;j++) {
port=user[j];
remaining=limits[port]-uta[port];
if(remaining<=0) print port;
}
}' $USER_FILE $TRAFFIC_LOG`
for p in $ports_2ban; do
if grep -q $p $PORTS_ALREADY_BAN; then
continue;
else
del_rules $p
add_reject_rules $p
echo $p >> $PORTS_ALREADY_BAN
fi
done
}
get_traffic_from_iptables () {
echo "$(iptables -nvx -L $SS_IN_RULES)" "$(iptables -nvx -L $SS_OUT_RULES)" |
sed -nr '/ [sd]pt:[0-9]{1,5}$/ s/[sd]pt:([0-9]{1,5})/\1/p' |
awk '
{
trans=$2;
port=$NF;
tr[port]+=trans;
}
END {
for(port in tr) {
printf("'$TRA_FORMAT'", port, tr[port])
}
}
'
}
get_traffic_from_iptables_first_time () {
get_traffic_from_iptables > $IPT_TRA_LOG
}
get_traffic_from_iptables_now () {
get_traffic_from_iptables > $IPT_TRA_LOG.tmp
}
calc_traffic_between_intervel () {
awk '
{
if(FILENAME=="'$IPT_TRA_LOG.tmp'") {
port=$1;
tras=$2;
tr[port]=tras;
}
if(FILENAME=="'$IPT_TRA_LOG'") {
port=$1;
tras=$2;
pretr[port]=tras;
}
}
END {
for(port in tr) {
min_tras=tr[port]-pretr[port];
if(min_tras<0) min_tras=0;
printf("'$TRA_FORMAT'", port, min_tras);
}
}
' $IPT_TRA_LOG.tmp $IPT_TRA_LOG > $MIN_TRA_LOG
mv $IPT_TRA_LOG.tmp $IPT_TRA_LOG
}
update_traffic_record () {
while [ -e $TRAFFIC_LOG.lock ]; do
sleep 1
done
touch $TRAFFIC_LOG.lock
awk '
BEGIN {
i=1;
}
{
if(FILENAME=="'$MIN_TRA_LOG'"){
trans=$2;
port=$1;
ta[port]+=trans;
}
if(FILENAME=="'$TRAFFIC_LOG'"){
uport=$1;
utra=$2;
uta[uport]=utra;
useq[i++]=uport;
}
}
END {
for (j=1;j<i;j++) {
pt=useq[j];
printf("'$TRA_FORMAT'", pt, uta[pt]+ta[pt]);
}
}' $MIN_TRA_LOG $TRAFFIC_LOG > $TRAFFIC_LOG.tmp
mv $TRAFFIC_LOG.tmp $TRAFFIC_LOG
rm $TRAFFIC_LOG.lock
}
|
# -*- coding: utf-8 -*-
# <nbformat>3.0</nbformat>
# <headingcell level=1>
# Exploring the Lorenz System of Differential Equations
# <markdowncell>
# In this Notebook we explore the Lorenz system of differential equations:
#
# $$
# \begin{aligned}
# \dot{x} & = \sigma(y-x) \\
# \dot{y} & = \rho x - y - xz \\
# \dot{z} & = -\beta z + xy
# \end{aligned}
# $$
#
# This is one of the classic systems in non-linear differential equations. It exhibits a range of different behaviors as the parameters ($\sigma$, $\beta$, $\rho$) are varied.
# <headingcell level=2>
# Imports
# <markdowncell>
# First, we import the needed things from IPython, NumPy, Matplotlib and SciPy.
# <codecell>
%matplotlib inline
# <codecell>
from IPython.html.widgets import interact, interactive
from IPython.display import clear_output, display, HTML
# <codecell>
import numpy as np
from scipy import integrate
from matplotlib import pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
from matplotlib.colors import cnames
from matplotlib import animation
# <headingcell level=2>
# Computing the trajectories and plotting the result
# <markdowncell>
# We define a function that can integrate the differential equations numerically and then plot the solutions. This function has arguments that control the parameters of the differential equation ($\sigma$, $\beta$, $\rho$), the numerical integration (`N`, `max_time`) and the visualization (`angle`).
# <codecell>
def solve_lorenz(N=10, angle=0.0, max_time=4.0, sigma=10.0, beta=8./3, rho=28.0):
fig = plt.figure()
ax = fig.add_axes([0, 0, 1, 1], projection='3d')
ax.axis('off')
# prepare the axes limits
ax.set_xlim((-25, 25))
ax.set_ylim((-35, 35))
ax.set_zlim((5, 55))
def lorenz_deriv((x, y, z), t0, sigma=sigma, beta=beta, rho=rho):
"""Compute the time-derivative of a Lorentz system."""
return [sigma * (y - x), x * (rho - z) - y, x * y - beta * z]
# Choose random starting points, uniformly distributed from -15 to 15
np.random.seed(1)
x0 = -15 + 30 * np.random.random((N, 3))
# Solve for the trajectories
t = np.linspace(0, max_time, int(250*max_time))
x_t = np.asarray([integrate.odeint(lorenz_deriv, x0i, t)
for x0i in x0])
# choose a different color for each trajectory
colors = plt.cm.jet(np.linspace(0, 1, N))
for i in range(N):
x, y, z = x_t[i,:,:].T
lines = ax.plot(x, y, z, '-', c=colors[i])
plt.setp(lines, linewidth=2)
ax.view_init(30, angle)
plt.show()
return t, x_t
# <markdowncell>
# Let's call the function once to view the solutions. For this set of parameters, we see the trajectories swirling around two points, called attractors.
# <codecell>
t, x_t = solve_lorenz(angle=0, N=10)
# <markdowncell>
# Using IPython's `interactive` function, we can explore how the trajectories behave as we change the various parameters.
# <codecell>
w = interactive(solve_lorenz, angle=(0.,360.), N=(0,50), sigma=(0.0,50.0), rho=(0.0,50.0))
display(w)
# <markdowncell>
# The object returned by `interactive` is a `Widget` object and it has attributes that contain the current result and arguments:
# <codecell>
t, x_t = w.result
# <codecell>
w.kwargs
# <markdowncell>
# After interacting with the system, we can take the result and perform further computations. In this case, we compute the average positions in $x$, $y$ and $z$.
# <codecell>
xyz_avg = x_t.mean(axis=1)
# <codecell>
xyz_avg.shape
# <markdowncell>
# Creating histograms of the average positions (across different trajectories) show that on average the trajectories swirl about the attractors.
# <codecell>
plt.hist(xyz_avg[:,0])
plt.title('Average $x(t)$')
# <codecell>
plt.hist(xyz_avg[:,1])
plt.title('Average $y(t)$')
|
import { camelCase } from 'lodash';
/**
* ่ทๅๅค็ๅ็้กน็ฎๅ็งฐ
* @param pkgName
*/
export function getProjectName(pkgName: string) {
// strip none @umijs scope
if (pkgName.charAt(0) === '@') {
pkgName = pkgName.split('/')[1];
}
return nameToKey(pkgName);
}
function nameToKey(name: string) {
return name
.split('.')
.map((part) => camelCase(part))
.join('.');
}
|
<reponame>enviroCar/track-count-service
package org.envirocar.trackcount.mapmatching;
import static java.util.stream.Collectors.toList;
import java.io.IOException;
import java.util.List;
import java.util.Objects;
import java.util.stream.IntStream;
import org.envirocar.trackcount.model.Feature;
import org.envirocar.trackcount.model.FeatureCollection;
import org.locationtech.jts.geom.Geometry;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import retrofit2.Call;
import retrofit2.Response;
@Service
public class MapMatcherImpl implements MapMatcher {
private final MapMatchingService service;
@Autowired
public MapMatcherImpl(MapMatchingService service) {
this.service = Objects.requireNonNull(service);
}
@Override
public FeatureCollection mapMatch(FeatureCollection featureCollection) throws MapMatchingException {
Call<MapMatchingResult> result = service.mapMatch(featureCollection);
Response<MapMatchingResult> response = null;
try {
response = result.execute();
} catch (IOException e) {
return new FeatureCollection();
}
List<Geometry> geometries = response.body().getMatchedPoints().stream()
.map(MatchedPoint::getPointOnRoad)
.map(Feature::getGeometry)
.collect(toList());
List<Feature> features = featureCollection.getFeatures();
if (geometries.size() != features.size()) {
throw new MapMatchingException(String.format("service returned wrong number of geometries, expected %d but was %d",
features.size(), geometries.size()));
}
IntStream.range(0, features.size()).forEach(i -> features.get(i).setGeometry(geometries.get(i)));
return featureCollection;
}
}
|
<gh_stars>0
import { NgModule } from '@angular/core';
import { Routes, RouterModule } from '@angular/router';
import { ProductFormComponent } from './components/product-form/product-form.component';
import { NavComponent } from './components/nav/nav.component';
import { ProductsTableComponent } from './components/products-table/products-table.component';
import { ProductsDashboardComponent } from './components/products-dashboard/products-dashboard.component';
import { ProductsListComponent } from './components/products-list/products-list.component';
import { ProductFormularioComponent } from './components/product-formulario/product-formulario.component';
import { ProductEditComponent } from './components/product-edit/product-edit.component';
const routes: Routes = [
{
path: '',
component: NavComponent,
children: [
{
path: 'create',
component: ProductFormComponent
},
{
path: 'list',
component: ProductsTableComponent
},
{
path: 'dashboard',
component: ProductsDashboardComponent
},
{
path: 'products',
component: ProductsListComponent
},
{
path: 'products/create',
component: ProductFormularioComponent
},
{
path: 'products/edit/:id',
component: ProductEditComponent
}
]
}
];
@NgModule({
imports: [RouterModule.forChild(routes)],
exports: [RouterModule]
})
export class AdminRoutingModule { }
|
#!/bin/sh
MACOS_DIR=`dirname $0`
# Setting DYLD_FRAMEWORK_PATH allows us to avoid
# running install_name_tool on plugins.
export DYLD_FRAMEWORK_PATH=${MACOS_DIR}/../Frameworks
export DYLD_LIBRARY_PATH=${MACOS_DIR}/../Frameworks
${MACOS_DIR}/vaa3d $@
|
def remove_long_strings(strings):
"""Function to remove words with more than 4 characters"""
short_strings = []
# loop through the strings and keep only those with <= 4 characters
for s in strings:
if len(s) <= 4:
short_strings.append(s)
return short_strings
strings = ["cat", "dog", "elephant", "monkey"]
short_strings = remove_long_strings(strings)
print(short_strings) # Output: ['cat', 'dog'] |
pip freeze
nosetests --with-coverage --cover-package moban_handlebars --cover-package tests tests docs/source moban_handlebars
|
#!/bin/bash
## Copyright (C) 2016 International Business Machines Corporation
## All Rights Reserved
################### parameters used in this script ##############################
#set -o xtrace
#set -o pipefail
namespace=sample
composite=LiveDNSPacketDPDKSourceBasic
here=$( cd ${0%/*} ; pwd )
projectDirectory=$( cd $here/.. ; pwd )
[[ -f $STREAMS_INSTALL/toolkits/com.ibm.streamsx.network/info.xml ]] && toolkitDirectory=$STREAMS_INSTALL/toolkits
[[ -f $here/../../../../toolkits/com.ibm.streamsx.network/info.xml ]] && toolkitDirectory=$( cd $here/../../../../toolkits ; pwd )
[[ -f $here/../../../com.ibm.streamsx.network/info.xml ]] && toolkitDirectory=$( cd $here/../../.. ; pwd )
[[ $toolkitDirectory ]] || die "sorry, could not find 'toolkits' directory"
buildDirectory=$projectDirectory/output/build/$composite
unbundleDirectory=$projectDirectory/output/unbundle/$composite
dataDirectory=$projectDirectory/data
dpdkDirectory=$RTE_SDK/build/lib
coreCount=$( cat /proc/cpuinfo | grep processor | wc -l )
toolkitList=(
$toolkitDirectory/com.ibm.streamsx.network
)
compilerOptionsList=(
--verbose-mode
--rebuild-toolkits
--spl-path=$( IFS=: ; echo "${toolkitList[*]}" )
--standalone-application
--optimized-code-generation
--static-link
--main-composite=$namespace::$composite
--output-directory=$buildDirectory
--data-directory=data
--num-make-threads=$coreCount
)
gccOptions="-g3"
ldOptions="-Wl,-L -Wl,$dpdkDirectory -Wl,--no-as-needed -Wl,-export-dynamic -Wl,--whole-archive -Wl,-ldpdk -Wl,-libverbs -Wl,-lrt -Wl,-lm -Wl,-ldl -Wl,--no-whole-archive"
compileTimeParameterList=(
)
submitParameterList=(
nicPort=0
nicQueue=0
timeoutInterval=10.0
)
traceLevel=3 # ... 0 for off, 1 for error, 2 for warn, 3 for info, 4 for debug, 5 for trace
################### functions used in this script #############################
die() { echo ; echo -e "\e[1;31m$*\e[0m" >&2 ; exit 1 ; }
step() { echo ; echo -e "\e[1;34m$*\e[0m" ; }
################################################################################
cd $projectDirectory || die "Sorry, could not change to $projectDirectory, $?"
step "checking for DPDK libraries ..."
[[ -d $dpdkDirectory ]] || die "sorry, could not find DPDK directory '$dpdkDirectory'"
[[ -f $dpdkDirectory/libdpdk.a ]] || die "sorry, could not find DPDK library '$dpdkDirectory/libdpdk.a'"
[[ -d $toolkitDirectory/com.ibm.streamsx.network/impl/src/source/dpdk/build/lib ]] || die "sorry, could not find DPDK glue library directory '$toolkitDirectory/com.ibm.streamsx.network/impl/src/source/dpdk/build/lib'"
[[ -f $toolkitDirectory/com.ibm.streamsx.network/impl/src/source/dpdk/build/lib/libstreams_source.a ]] || die "sorry, could not find DPDK glue library '$toolkitDirectory/com.ibm.streamsx.network/impl/src/source/dpdk/build/lib/libstreams_source.a'"
step "checking for 'dpdk' user group ..."
[[ $( groups ) =~ dpdk ]] || die "sorry, user '$USER' no in group 'dpdk'"
#[ ! -d $buildDirectory ] || rm -rf $buildDirectory || die "Sorry, could not delete old '$buildDirectory', $?"
[ -d $dataDirectory ] || mkdir -p $dataDirectory || die "Sorry, could not create '$dataDirectory, $?"
step "configuration for standalone application '$namespace.$composite' ..."
( IFS=$'\n' ; echo -e "\nStreams toolkits:\n${toolkitList[*]}" )
( IFS=$'\n' ; echo -e "\nStreams compiler options:\n${compilerOptionsList[*]}" )
echo -e "\nGNU compiler parameters:\n$gccOptions"
echo -e "\nGNU linker parameters:\n$ldOptions"
( IFS=$'\n' ; echo -e "\n$composite compile-time parameters:\n${compileTimeParameterList[*]}" )
( IFS=$'\n' ; echo -e "\n$composite submission-time parameters:\n${submitParameterList[*]}" )
echo -e "\ntrace level: $traceLevel"
step "building standalone application '$namespace.$composite' ..."
sc ${compilerOptionsList[*]} "--cxx-flags=$gccOptions" "--ld-flags=$ldOptions" -- "${compileTimeParameterList[*]}" || die "Sorry, could not build '$composite', $?"
step "deleting old '/dev/hugepages/rtemap_*' files, if necessary ..."
[[ ! -f /dev/hugepages/rtemap_* ]] || sudo rm /dev/hugepages/rtemap_* || die "sorry, could not delete /dev/hugepages/rtemap_* files, $?"
step "executing standalone application '$namespace.$composite' ..."
executable=$buildDirectory/bin/$namespace.$composite
$executable -t $traceLevel ${submitParameterList[*]}
step "deleting '/dev/hugepages/rtemap_*' files ..."
[[ ! -f /dev/hugepages/rtemap_* ]] || sudo rm /dev/hugepages/rtemap_* || die "sorry, could not delete /dev/hugepages/rtemap_* files, $?"
exit 0
|
import datetime
def totaltimer(times):
td = datetime.timedelta(0) # initial value of sum (must be a timedelta)
duration = sum([
datetime.timedelta(minutes=m, seconds=s) for m, s in times],
td)
return duration
if __name__== '__main__': # test when module run as main script
times1 = [(2, 36), # list containing tuples (minutes, seconds)
(3, 35),
(3, 45),]
times2 = [(3, 0),
(5, 13),
(4, 12),
(1, 10),]
assert totaltimer(times1) == datetime.timedelta(0, 596)
assert totaltimer(times2) == datetime.timedelta(0, 815)
print ("Tests passed.\n"
"First test total: %s\n"
"Second test total: %s" % (
totaltimer(times1), totaltimer(times2)))
|
#!/bin/bash
#SBATCH --account=def-dkulic
#SBATCH --mem=8000M # memory per node
#SBATCH --time=23:00:00 # time (DD-HH:MM)
#SBATCH --output=/project/6001934/lingheng/Double_DDPG_Job_output/continuous_RoboschoolHopper-v1_doule_ddpg_hardcopy_epsilon_greedy_seed5_run8_%N-%j.out # %N for node name, %j for jobID
module load qt/5.9.6 python/3.6.3 nixpkgs/16.09 gcc/7.3.0 boost/1.68.0 cuda cudnn
source ~/tf_cpu/bin/activate
python ./ddpg_discrete_action.py --env RoboschoolHopper-v1 --random-seed 5 --exploration-strategy epsilon_greedy --summary-dir ../Double_DDPG_Results_no_monitor/continuous/RoboschoolHopper-v1/doule_ddpg_hardcopy_epsilon_greedy_seed5_run8 --continuous-act-space-flag --target-hard-copy-flag
|
public enum UiState
{
UserManagement,
PluginManagement,
SourceManagement
}
public class UIManager
{
public void UpdateUI(UiState state)
{
HeaderView.Visibility = Visibility.Visible;
HeaderView.UiTitel = GetUiTitle(state);
switch (state)
{
case UiState.UserManagement:
UserManagementView.Visibility = Visibility.Visible;
break;
case UiState.PluginManagement:
case UiState.SourceManagement:
Width = 1100;
Height = 600;
PluginManagementView.Visibility = state == UiState.PluginManagement ? Visibility.Visible : Visibility.Collapsed;
break;
}
}
private string GetUiTitle(UiState state)
{
switch (state)
{
case UiState.UserManagement:
return "User Management";
case UiState.PluginManagement:
return "Plugin Management";
case UiState.SourceManagement:
return "Source Management";
default:
return string.Empty;
}
}
// Define properties for HeaderView, UserManagementView, PluginManagementView, Width, and Height
} |
<gh_stars>1-10
const request = require('node-superfetch'),
{ MessageEmbed } = require('discord.js');
exports.run = async (client, msg, args) => {
try {
if (args.length < 1) return msg.channel.send(`${client.settings.prefix}${exports.help.usage}`);
const query = args.join(' ');
const company = await search(query);
if (!company) return msg.channel.send('Could not find any results.');
const stocks = await fetchStocks(company.symbol, client.settings.alphavantage_key);
if (!stocks) return msg.channel.send('Could not find any results.');
return msg.channel.send(new MessageEmbed()
.setTitle(`Stocks for ${company.name} (${stocks.symbol.toUpperCase()})`)
.setColor(0x9797FF)
.setFooter('Last Updated')
.setTimestamp(stocks.lastRefresh)
.addField('โฏ Open', `$${client.formatNumber(stocks.open)}`, true)
.addField('โฏ Close', `$${client.formatNumber(stocks.close)}`, true)
.addField('โฏ Volume', client.formatNumber(stocks.volume), true)
.addField('โฏ High', `$${client.formatNumber(stocks.high)}`, true)
.addField('โฏ Low', `$${client.formatNumber(stocks.low)}`, true)
.addField('\u200B', '\u200B', true)
);
} catch (err) {
return msg.channel.send(new MessageEmbed()
.setColor('RED')
.setTimestamp()
.setTitle('Please report this on GitHub')
.setURL('https://github.com/william5553/triv/issues')
.setDescription(`**Stack Trace:**\n\`\`\`${err.stack}\`\`\``)
.addField('**Command:**', `${msg.content}`)
);
}
};
async function fetchStocks(symbol, key) {
const { body } = await request
.get('https://www.alphavantage.co/query')
.query({
function: 'TIME_SERIES_INTRADAY',
symbol,
interval: '1min',
apikey: key
});
if (body['Error Message'] || !body['Time Series (1min)']) return null;
const data = Object.values(body['Time Series (1min)'])[0];
return {
symbol,
open: data['1. open'],
high: data['2. high'],
low: data['3. low'],
close: data['4. close'],
volume: data['5. volume'],
lastRefresh: new Date(body['Meta Data']['3. Last Refreshed'])
};
}
async function search(query) {
const { body } = await request
.get('http://d.yimg.com/autoc.finance.yahoo.com/autoc')
.query({
query,
region: 1,
lang: 'en'
});
if (!body.ResultSet.Result.length) return null;
return body.ResultSet.Result[0];
}
exports.conf = {
enabled: true,
guildOnly: false,
aliases: ['stock', 'stonk', 'stonks'],
permLevel: 0
};
exports.help = {
name: 'stocks',
description: 'Responds with the current stocks for a company',
usage: 'stocks [company]'
};
|
<reponame>Hyperparticle/lct-master<gh_stars>1-10
#!/usr/bin/env python3
import pandas as pd
import numpy as np
if __name__ == "__main__":
data = pd.read_csv('creditcard.csv')[['V' + str(i+1) for i in range(28)] + ['Amount', 'Class']]
true = data[data.Class == 1]
false = data[data.Class == 0][:5000 - len(true)]
s0, s1 = int(len(true) * 0.8), int(len(false) * 0.8)
true_train, true_test = true[:s0], true[s0:]
false_train, false_test = false[:s1], false[s1:]
train = true_train.append(false_train).sample(frac=1).reset_index(drop=True)
test = true_test.append(false_test).sample(frac=1).reset_index(drop=True)
train.to_csv('train.txt', header=False, index=False)
test.to_csv('test.txt', header=False, index=False)
|
#!/usr/bin/env bash
# Copyright Monwoo 2017, service@monwoo.com, code by Miguel Monwoo
GREEN='\033[32m'
RED='\033[41m'
COLOR_RESET='\033[0m'
# set -x # echo commandes
# echo commandes in green colors
trap 'echo -e "${GREEN}# $BASH_COMMAND$COLOR_RESET";export DEBUGTRAP=$BASH_COMMAND' DEBUG
function err_handler ()
{
local error_code=${1:-$?}
echo -e "${RED}# [ERR: $error_code] $DEBUGTRAP$COLOR_RESET"
exit "${error_code}"
}
trap err_handler ERR INT TERM
set -e # exit on errors
# $(exit 42) # => throw error code 42 if you use this commande
# starter inspired from : https://www.npmjs.com/package/graphql-server-lambda
if [ "$1" = "install" ]; then
# bootstrap package :
yarn install
aws configure
# TODO : try with miguel2, to fine tune only needable access
# AWS Access Key ID [None]: your key ID
# AWS Secret Access Key [None]: your key access
# Default region name [None]: us-east-1
# Default output format [None]: json
fi
# We use the stack-name prod to mean production but any stack name can be used.
STACK_NAME=prod
# Bonus config, not working yet :
STACK_POLICY_FILE="file://$PWD/bonus/MiguelStackPolicy.json"
EMPTY_STACK_TEMPLATE="file://$PWD/bonus/EmptyStackTemplate.yml"
STACK_PARAMETERS="file://$PWD/bonus/stack-parameters.dist.json"
# http://docs.aws.amazon.com/fr_fr/AmazonS3/latest/dev/UsingBucket.html#access-bucket-intro
# https://t4n3k2xzbe.execute-api.us-east-1.amazonaws.com/prod/graphiql
# https://t4n3k2xzbe.execute-api.us-east-1.amazonaws.com/prod/graphql
# Read and transform the template, created in previous step.
# Package and upload the artifact to the S3 bucket
# and generate another template for the deployment.
# need IAM user with cloudformation access,
# check CloudWatch logs to see access errors if troubles
# check uploaded packages at :
# https://console.aws.amazon.com/s3/buckets/demo.awslambda.monwoo.com/?region=us-east-1&tab=overview#
aws cloudformation package \
--template-file template.yaml \
--output-template-file serverless-output.yaml \
--s3-bucket demo.awslambda.monwoo.com
#################### => instead, use IAM user with IAMFullAccess roles
# ## NOT WORKING YET, may be need user with specific roles to do it...
# ## for now only need code in prod
# # adpat permission for the bucket (can be associated by user too if prefered)
# # better to use stack policy for quick dev tests
#
# # validate stack template
# aws cloudformation validate-template --template-body "$EMPTY_STACK_TEMPLATE"
# # create stack
# aws cloudformation create-stack --stack-name $STACK_NAME \
# --template-body "$EMPTY_STACK_TEMPLATE" \
# --parameters "$STACK_PARAMETERS"
#
# # list all stack and ensure our's is inside
# aws cloudformation list-stacks --region us-east-1 --output table \
# --query '"StackSummaries[*].StackName";"StackSummaries[*].StackStatus" "StackSummaries[*].CreationTime"'
# # --query '{name:StackSummaries[*].StackName,status:StackSummaries[*].StackStatus,date:StackSummaries[*].CreationTime}'
# # --query '[StackSummaries[*].StackName,[StackSummaries[*].StackStatus,[StackSummaries[*].CreationTime' \
# # --stack-status-filter 'CREATE_COMPLETE'
#
# # validate and update stack policy to Amazon (you can use --debug to see http debugs...)
# aws cloudformation set-stack-policy --stack-name $STACK_NAME --stack-policy-body "$STACK_POLICY_FILE"
#
# # ensure our policy have been updated :
# aws cloudformation get-stack-policy --stack-name $STACK_NAME
####################
# create the Lambda Function and API Gateway for GraphQL.
aws cloudformation deploy \
--template-file serverless-output.yaml \
--stack-name $STACK_NAME \
--capabilities CAPABILITY_IAM
# list all available lambda function
aws lambda list-functions
# configure the test acordingly to generated function name
LAMBDA_TEST_FUNCTION_NAME=prod-GraphQL-96TD40MRFT5Z
# LAMBDA_TEST_PAYLOAD='{"key1":"value1", "key2":"value2", "key3":"value3"}'
# LAMBDA_TEST_PAYLOAD=$(cat bonus/event-fetch-users.js | tr '\n' ' ' | sed -E 's, +, ,g')
LAMBDA_TEST_PAYLOAD='{"queryStringParameters":{"query":"{ users { firstname } }"},"httpMethod":"GET","method":"GET"}'
# test the lambda function :
aws lambda invoke \
--invocation-type RequestResponse \
--function-name "$LAMBDA_TEST_FUNCTION_NAME" \
--region us-east-1 \
--log-type Tail \
--payload "$LAMBDA_TEST_PAYLOAD" \
outputfile.txt && cat outputfile.txt
# it may output some json object with LogResult as base64 content.
# Can be decoded online or with commande line :
# B64CONTENT=<your b64 content>
# echo "$B64CONTENT" | base64 --decode
# access the lambda function via get :
# => go in Amazon console => API Gateway service
# => you should see lambda api ready to deploy on the uploaded package
# checking in get :
# https://t4n3k2xzbe.execute-api.us-east-1.amazonaws.com/prod/graphql
# transform the stack in api ready for frontend :
# https://console.aws.amazon.com/apigateway/home?region=us-east-1#/apis
# liste errors : CloudWatch
# https://console.aws.amazon.com/cloudwatch/home?region=us-east-1#logs:
# Checking for uploaded packages :
# https://console.aws.amazon.com/s3/buckets/demo.awslambda.monwoo.com/?region=us-east-1&tab=overview#
# Checking for get acces in API builder => direclty launch test from Ressource
# https://console.aws.amazon.com/apigateway/home?region=us-east-1#/apis/ghcekje7sa/resources/gnu4zv/methods/GET
# Checking for available lambda :
# https://console.aws.amazon.com/lambda/home?region=us-east-1#/functions
# Checking for available stacks :
# https://console.aws.amazon.com/cloudformation/home?region=us-east-1#/stacks?filter=active
# test lambda locally :
# alias lambda-local="$PWD/node_modules/.bin/lambda-local"
lambdaLocal="$PWD/node_modules/.bin/lambda-local"
$lambdaLocal -l graphql.js -h graphqlHandler -e bonus/event-fetch-users.js
|
import FieldEnum from 'types/enums/field-enum';
export default interface Field {
types: Array<FieldEnum>;
}
|
<reponame>ChristopherChudzicki/mathbox<gh_stars>1-10
export default /* glsl */ `uniform sampler2D dataTexture;
vec4 sample2D(vec2 uv) {
return texture2D(dataTexture, uv);
}
`;
|
#!/bin/bash
TASK=8
SHOT=1
LANG=de
MODEL=uc2
MODEL_CONFIG=uc2_base
TASKS_CONFIG=iglue_test_tasks_boxes36.dtu
TRTASK=RetrievalxFlickrCO${LANG}_${SHOT}
TETASK=RetrievalxFlickrCO${LANG}
TEXT_PATH=/home/projects/ku_00062/data/xFlickrCO/annotations/${LANG}/test.jsonl
FEAT_PATH=/home/projects/ku_00062/data/xFlickrCO/features/xflickrco-test_boxes36.lmdb
here=$(pwd)
source /home/projects/ku_00062/envs/iglue/bin/activate
cd ../../../../../../volta
best=-1
best_lr=-1
for lr in 1e-4 5e-5 1e-5; do
f=${here}/train.${lr}.log
s=`tail -n1 $f | cut -d ' ' -f 4`
d=$(echo "$s>$best" | bc)
if [[ $d -eq 1 ]]; then
best=$s
best_lr=$lr
fi
done
echo "Best lr: " $best_lr
PRETRAINED=/home/projects/ku_00062/checkpoints/iglue/few_shot/xflickrco/${TRTASK}/${MODEL}/${best_lr}/RetrievalFlickr30k_${MODEL_CONFIG}/pytorch_model_best.bin
OUTPUT_DIR=/home/projects/ku_00062/results/iglue/few_shot/xflickrco/${MODEL}/${best_lr}/${TRTASK}_${MODEL_CONFIG}/$TETASK/test
python eval_retrieval.py \
--bert_model /home/projects/ku_00062/huggingface/xlm-roberta-base --config_file config/${MODEL_CONFIG}.json \
--from_pretrained ${PRETRAINED} \
--tasks_config_file config_tasks/${TASKS_CONFIG}.yml --task $TASK \
--split test_${LANG} --batch_size 1 \
--caps_per_image 1 --val_annotations_jsonpath ${TEXT_PATH} --val_features_lmdbpath ${FEAT_PATH} \
--output_dir ${OUTPUT_DIR} \
deactivate
|
#!/bin/bash
##
## This file is part of the `src-run/bash-writer-library` project.
##
## (c) https://github.com/src-run/bash-writer-library/graphs/contributors
##
## For the full copyright and license information, please view the LICENSE.md
## file that was distributed with this source code.
##
#
# write out command status
#
function write_status()
{
local index="${1}"
local value="${2}"
local color_fb="${3}"
local color_bg="${4}"
local style_bold="${5:-0}"
local style_custom="${6:-x}"
write_section_init "${index}"
if [[ ${style_custom} -ne "x" ]]; then
out_custom " ${value} " "fg:${color_fb} style:reverse bg:${color_bg} style:${style_custom}"
elif [[ ${style_bold} -eq 0 ]]; then
out_custom " ${value} " "fg:${color_fb} style:reverse bg:${color_bg} style:bold"
else
out_custom " ${value} " "fg:${color_fb} style:reverse bg:${color_bg}"
fi
write_section_stop
}
#
# write out command status okay
#
function write_status_okay()
{
write_status "${1}" "${2:-okay}" green black
}
#
# write out command status warn
#
function write_status_warn()
{
write_status "${1}" "${2:-warn}" red white
}
#
# write out command status skip
#
function write_status_skip()
{
write_status "${1}" "${2:-skip}" blue black 1
}
|
package com.balceda.reservationsapp.controller;
import java.util.List;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.DeleteMapping;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.PutMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import com.balceda.reservationsapp.controller.vo.ClientVO;
import com.balceda.reservationsapp.model.Client;
import com.balceda.reservationsapp.service.ClientService;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiResponse;
import io.swagger.annotations.ApiResponses;
@RestController
@RequestMapping("/api/client")
@Api(tags = "client")
public class ClientController {
private final ClientService clientService;
public ClientController(ClientService clientService) {
this.clientService = clientService;
}
@PostMapping
@ApiOperation(value = "Create a client", notes = "This service creates a new client")
@ApiResponses(value = { @ApiResponse(code = 201, message = "Client created successfully"),
@ApiResponse(code = 400, message = "Invalid request") })
public ResponseEntity<Client> createClient(@RequestBody ClientVO clientVO) {
Client client = new Client();
client.setIdNumber(clientVO.getIdNumber());
client.setFirstName(clientVO.getFirstName());
client.setLastName(clientVO.getLastName());
client.setAddress(clientVO.getAddress());
client.setPhone(clientVO.getPhone());
client.setEmail(clientVO.getEmail());
return new ResponseEntity<>(this.clientService.create(client), HttpStatus.CREATED);
}
@PutMapping("/{idNumber}")
@ApiOperation(value = "Update a client", notes = "This service updates an existing client")
@ApiResponses(value = { @ApiResponse(code = 200, message = "Client updated successfully"),
@ApiResponse(code = 404, message = "Client not found") })
public ResponseEntity<Client> updateClient(@PathVariable("idNumber") String idNumber, ClientVO clientVO) {
Client client = this.clientService.findByIdNumber(idNumber);
if (client == null) {
return new ResponseEntity<>(HttpStatus.NOT_FOUND);
} else {
client.setFirstName(clientVO.getFirstName());
client.setLastName(clientVO.getLastName());
client.setAddress(clientVO.getAddress());
client.setPhone(clientVO.getPhone());
client.setEmail(clientVO.getEmail());
return new ResponseEntity<>(this.clientService.update(client), HttpStatus.OK);
}
}
@DeleteMapping("/{idNumber}")
@ApiOperation(value = "Delete a client", notes = "This service deletes an existing client")
@ApiResponses(value = { @ApiResponse(code = 200, message = "Client deleted successfully"),
@ApiResponse(code = 404, message = "Client not found") })
public void removeClient(@PathVariable("idNumber") String idNumber) {
Client client = this.clientService.findByIdNumber(idNumber);
if (client != null) {
this.clientService.delete(client);
}
}
@GetMapping
@ApiOperation(value = "List all clients", notes = "This service lists all existing clients")
@ApiResponses(value = { @ApiResponse(code = 200, message = "Clients listed successfully"),
@ApiResponse(code = 404, message = "Clients not found") })
public ResponseEntity<List<Client>> findAllClients() {
List<Client> clients = this.clientService.findAll();
if (clients != null) {
return new ResponseEntity<>(clients, HttpStatus.OK);
}
return new ResponseEntity<>(HttpStatus.NO_CONTENT);
}
@GetMapping("/{idNumber}")
@ApiOperation(value = "Find a client by id", notes = "This service gets an existing client by a given id_number")
@ApiResponses(value = { @ApiResponse(code = 200, message = "Client found"),
@ApiResponse(code = 404, message = "Client not found") })
public ResponseEntity<Client> findByIdNumberClient(@PathVariable("idNumber") String idNumber) {
Client client = this.clientService.findByIdNumber(idNumber);
if (client != null) {
return new ResponseEntity<>(client, HttpStatus.OK);
}
return new ResponseEntity<>(client, HttpStatus.NOT_FOUND);
}
@GetMapping("/lastname/{lastName}")
@ApiOperation(value = "Find a client by lastName", notes = "This service gets an existing client by a given lastName")
@ApiResponses(value = { @ApiResponse(code = 200, message = "Client found"),
@ApiResponse(code = 404, message = "Client not found") })
public ResponseEntity<List<Client>> findByLastNameClient(@PathVariable("lastName") String lastName) {
List<Client> clients = this.clientService.findByLastName(lastName);
if (!clients.isEmpty()) {
return new ResponseEntity<>(clients, HttpStatus.OK);
}
return new ResponseEntity<>(HttpStatus.NO_CONTENT);
}
@GetMapping("/phone/{phone}")
@ApiOperation(value = "Find a client by phone", notes = "This service gets an existing client by a given phone")
@ApiResponses(value = { @ApiResponse(code = 200, message = "Client found"),
@ApiResponse(code = 404, message = "Client not found") })
public ResponseEntity<Client> findByPhoneClient(@PathVariable("phone") String phone) {
Client client = this.clientService.findByPhone(phone);
if (client != null) {
return new ResponseEntity<>(client, HttpStatus.OK);
}
return new ResponseEntity<>(client, HttpStatus.NOT_FOUND);
}
@GetMapping("/email/{email}")
@ApiOperation(value = "Find a client by email", notes = "This service gets an existing client by a given email")
@ApiResponses(value = { @ApiResponse(code = 200, message = "Client found"),
@ApiResponse(code = 404, message = "Client not found") })
public ResponseEntity<Client> findByEmailClient(@PathVariable("email") String email) {
Client client = this.clientService.findByEmail(email);
if (client != null) {
return new ResponseEntity<>(client, HttpStatus.OK);
}
return new ResponseEntity<>(client, HttpStatus.NOT_FOUND);
}
}
|
#!/bin/bash
DEVELOPER_NAME=developer
DEV_HOME=/home/${DEVELOPER_NAME}
# x11vncใธใฎ่ฟฝๅ ๅผๆฐ
if [ -n "$X11VNC_ARGS" ]; then
sed -i "s/^command=gosu {{USER}} x11vnc.*/& ${X11VNC_ARGS}/" /etc/supervisor/conf.d/supervisord.conf
fi
# VNCใในใฏใผใ่จญๅฎ
if [ -n "$PASSWORD" ]; then
mkdir -p ${DEV_HOME}/.x11vnc
echo -n "$PASSWORD" > ${DEV_HOME}/.x11vnc/password1
x11vnc -storepasswd $(cat ${DEV_HOME}/.x11vnc/password1) ${DEV_HOME}/.x11vnc/password2
chmod 400 ${DEV_HOME}/.x11vnc/password*
sed -i "s!command=gosu {{USER}} x11vnc.*!& -rfbauth ${DEV_HOME}/.x11vnc/password2!" /etc/supervisor/conf.d/supervisord.conf
export PASSWORD=
fi
# VNC่งฃๅๅบฆ่จญๅฎ
if [ -n "$RESOLUTION" ]; then
sed -i "s/1024x768/$RESOLUTION/" /usr/local/bin/xvfb.sh
fi
# OpenBoxใธใฎ่ฟฝๅ ๅผๆฐ
if [ -n "$OPENBOX_ARGS" ]; then
sed -i "s#^command=/usr/bin/openbox\$#& ${OPENBOX_ARGS}#" /etc/supervisor/conf.d/supervisord.conf
fi
sed -i -e "s|{{USER}}|${DEVELOPER_NAME}|" -e "s|{{HOME}}|${DEV_HOME}|" /etc/supervisor/conf.d/supervisord.conf
# home folder
if [ ! -x "${DEV_HOME}/.config/pcmanfm/LXDE/" ]; then
mkdir -p ${DEV_HOME}/.config/pcmanfm/LXDE/
ln -sf /usr/local/share/lxde/desktop-items-0.conf ${DEV_HOME}/.config/pcmanfm/LXDE/
chown -R ${DEVELOPER_NAME}:${DEVELOPER_NAME} ${DEV_HOME}
fi
if [ "$(id -u)" == "0" ]; then
if [ "${HOST_USER_ID}" != "$(gosu ${DEVELOPER_NAME} id -u)" ]; then
# ใในใPCใจUSER ID/GROUP IDใๅใใใ(ใใกใคใซใขใฏใปในใงใใชใใชใ็บ)
usermod -u ${HOST_USER_ID} -o -m -d /home/${DEVELOPER_NAME} ${DEVELOPER_NAME}
groupmod -g ${HOST_GROUP_ID} ${DEVELOPER_NAME}
chown -R ${DEVELOPER_NAME}:${DEVELOPER_NAME} /home/${DEVELOPER_NAME}
else
chown -R ${DEVELOPER_NAME}:${DEVELOPER_NAME} /home/${DEVELOPER_NAME}
fi
su - ${DEVELOPER_NAME}
fi
exec /bin/tini -- supervisord -n -c /etc/supervisor/supervisord.conf
|
/**
* Module dependencies.
**/
var express = require('express')
, routes = require('./routes')
, http = require('http')
, path = require('path')
, calculator = require('./routes/calculator')
, home = require('./routes/home')
, userProfile = require('./routes/userProfile')
, session = require('client-sessions')
, products = require('./routes/products')
, mongo = require('./routes/mongo');
var passport = require('passport');
require('./routes/passport')(passport);
//var session = require('client-sessions');
var expressSessions = require("express-session");
var mongoStore = require("connect-mongo/es5")(expressSessions);
var mongoSessionConnectURL = "mongodb://localhost:27017/ebay";
var app = express();
// all environments
//configure the sessions with our application
app.use(session({
cookieName: 'session',
secret: 'cmpe273_test_string',
duration: 30 * 60 * 1000, //setting the time for active session
activeDuration: 5 * 60 * 1000, })); // setting time for the session to be active when the window is open // 5 minutes set currently
app.set('port', process.env.PORT || 3000);
app.set('views', __dirname + '/views');
app.set('view engine', 'ejs');
//add middleware
//app.use(favicon(path.join(__dirname,'public','images','favicon.ico')));
app.use(express.favicon());
//app.use(express.logger('dev'));
//parse json
app.use(express.bodyParser());
app.use(app.router);
app.use(express.static(path.join(__dirname, 'public')));
/*
app.use(expressSessions({
secret: "CMPE273_passport",
resave: false,
saveUninitialized: false,
duration: 30 * 60 * 1000,
activeDuration: 5 * 6 * 1000,
store: new mongoStore({
url: mongoSessionConnectURL
})
}));
*/
// development only // default error handler
if ('development' == app.get('env')) {
app.use(express.errorHandler());
}
//app.get('/', products.getProductsPage);
app.get('/', home.redirectToHome);
app.get('/signup',home.signup);
app.get('/signin',home.signin);
app.post('/checksignup',home.checksignup);
app.post('/checksignupWithConnectionPool',home.checksignupWithConnectionPool);
app.post('/checksignupWithoutRabbitMQ',home.checksignupWithoutRabbitMQ);
app.post('/afterSignup',home.afterSignup);
//app.post('/checklogin',home.checklogin);// change this method.
app.use(passport.initialize());
///***************passport****/
app.post('/checklogin', function(req, res,next) {
passport.authenticate('login', function(err, user, info) {
if(err) {
return next(err);
}
if(!user) {
response={"statusCode" : 401};
res.send(response);
return res.redirect('/');
}
req.logIn(user, {session:false}, function(err) {
if(err) {
return next(err);
}
req.session.userid = user.EmailId; //userid = EmailId
console.log("session initilized");
//change to json responce
var json_responses = {"statusCode": 200};
res.send(json_responses);
})
})(req, res, next);
});
app.get('/', isAuthenticated, function(req, res) {
//res.render('successLogin', {user:{username: req.session.userid}});
//change to json responce
var json_responses = {"statusCode": 200};
res.send(json_responses);
});
function isAuthenticated(req, res, next) {
Console.log('inside isAuthenticated.');
if(req.session.userid) {
console.log(req.session.userid);
return next();
}
res.redirect('/');
};
// catch 404 and forward to error handler
/*app.use(function(req, res, next) {
Console.log('inside app.use.');
var err = new Error('Not Found');
err.status = 404;
next(err);
});*/
// error handlers
// development error handler
// will print stacktrace
/*if (app.get('env') === 'development') {
app.use(function(err, req, res, next) {
res.status(err.status || 500);
res.render('error', {
message: err.message,
error: err
});
});
}*/
// production error handler
// no stacktraces leaked to user
/*
app.use(function(err, req, res, next) {
res.status(err.status || 500);
res.render('error', {
message: err.message,
error: {}
});
});
*/
/*******/
app.post('/signout',home.signout);
app.get('/userProfile',userProfile.accountdetails);
app.post('/getUserAccountDetails',userProfile.getUserAccountDetails);
app.post('/getAllProductsInCart',userProfile.getAllProductsInCart);
app.post('/removeItemFromCart',userProfile.removeItemFromCart);
app.post('/buyItemsInCart',userProfile.buyItemsInCart);
app.post('/getAllUserDirectBuyingActivities',userProfile.getAllUserDirectBuyingActivities);
app.post('/getAllSoldProducts',userProfile.getAllSoldProducts);
app.post('/getAllUserBiddingActivity',userProfile.getAllUserBiddingActivity);
app.post('/getAllWonAuctions',userProfile.getAllWonAuctions);
app.post('/updatePaymentDetailsForAuction',userProfile.updatePaymentDetailsForAuction);
app.post('/getAllAuctionProductHistory',userProfile.getAllAuctionProductHistory);
//testing
/*
app.post('/getUserAccountDetailsWithConnetionPool',userProfile.getUserAccountDetailsWithConnetionPool);
app.post('/getUserAccountDetailsWithoutConnetionPool',userProfile.getUserAccountDetailsWithoutConnetionPool);
*/
app.get('/products',products.getProductsPage);
app.post('/getAllProducts',products.getAllProducts);
app.post('/getAllProductsForAuction',products.getAllProductsForAuction);
app.post('/userAddToCart',products.userAddToCart);
app.post('/addBidOnProduct',products.addBidOnProduct);
/*app.post('/getItemType',products.getItemType);*/
app.post('/addProduct',products.addProduct);
app.post('/labProducts',products.labProducts);
app.get('/accountDetails', function (req, res) {
res.sendfile(__dirname +'/public/templates/userProfile/accountDetails.html');
});
// production error handler
// no stacktraces leaked to user
app.use(function(err, req, res, next) {
res.status(err.status || 500);
res.render('error', {
message: err.message,
error: {}
});
});
/*
http.createServer(app).listen(app.get('port'), function(){
console.log('Express server listening on port ' + app.get('port'));
});
*/
//connect to the mongo collection session and then createServer
mongo.connect(mongoSessionConnectURL, function(){
console.log('Connected to mongo at: ' + mongoSessionConnectURL);
http.createServer(app).listen(app.get('port'), function(){
console.log('Express server listening on port ' + app.get('port'));
});
});
|
from sage.coding.decoder import DecodingError
def linear_code_decoder(code):
try:
# Implement the decoding algorithm here
decoded_message = decoding_algorithm(code) # Placeholder for the actual decoding algorithm
return decoded_message
except DecodingError:
return "Decoding Error" |
<reponame>nabeelkhan/Oracle-DBA-Life
/*
* pseudoRecords.sql
* Chapter 10, Oracle10g PL/SQL Programming
* by <NAME>, <NAME> and <NAME>
*
* This script demonstrates user defined triggers.
*/
SET ECHO ON
CREATE OR REPLACE TRIGGER TempDelete
BEFORE DELETE ON temp_table
FOR EACH ROW
DECLARE
v_TempRec temp_table%ROWTYPE;
BEGIN
/* This is not a legal assignment, since :old is not truly
a record. */
v_TempRec := :old;
/* We can accomplish the same thing, however, by assigning
the fields individually. */
v_TempRec.char_col := :old.char_col;
v_TempRec.num_col := :old.num_col;
END TempDelete;
/
show errors
|
# Import KMeans
from sklearn.cluster import KMeans
# Create KMeans object
kmeans = KMeans(n_clusters=3, random_state=0)
# Train the model
kmeans.fit(X)
# Get the labels
labels = kmeans.labels_
# Get the cluster centroids
centroids = kmeans.cluster_centers_ |
let num1 = 0;
let num2 = 1;
let sum;
console.log(num1);
console.log(num2);
for (let i = 0; i < 8; i++) {
sum = num1 + num2;
console.log(sum);
num1 = num2;
num2 = sum;
} |
# Run
docker run --rm --name slate -p 4567:4567 -v $(pwd)/source:/srv/slate/source slatedocs/slate serve
|
import { ClientApplication } from '@medplum/core';
import express from 'express';
import request from 'supertest';
import { initApp } from '../app';
import { loadConfig } from '../config';
import { closeDatabase, initDatabase } from '../database';
import { isOk, repo } from '../fhir';
import { initKeys } from '../oauth';
import { seedDatabase } from '../seed';
const app = express();
let client: ClientApplication;
beforeAll(async () => {
const config = await loadConfig('file:medplum.config.json');
await initDatabase({ client: 'sqlite3' });
await seedDatabase();
await initApp(app);
await initKeys(config);
const [outcome, result] = await repo.createResource({
resourceType: 'ClientApplication',
secret: 'big-long-string',
redirectUri: 'https://example.com'
} as ClientApplication);
if (!isOk(outcome) || !result) {
console.log(JSON.stringify(outcome, undefined, 2));
throw new Error('Error creating application');
}
client = result;
});
afterAll(async () => {
await closeDatabase();
});
test('Get userinfo with profile email phone address', async (done) => {
const scope = 'openid profile email phone address';
request(app)
.post('/oauth2/authorize?response_type=code&client_id=' + client.id + '&redirect_uri=https://example.com&scope=' + encodeURIComponent(scope))
.type('form')
.send({
email: '<EMAIL>',
password: '<PASSWORD>',
nonce: 'asdf',
state: 'xyz'
})
.expect(302)
.end((err, res) => {
expect(res.status).toBe(302);
expect(res.headers.location).not.toBeUndefined();
const location = new URL(res.headers.location);
expect(location.searchParams.get('error')).toBeNull();
request(app)
.post('/oauth2/token')
.type('form')
.send({
grant_type: 'authorization_code',
code: location.searchParams.get('code')
})
.expect(200)
.end((err2, res2) => {
expect(res2.status).toBe(200);
expect(res2.body.access_token).not.toBeUndefined();
request(app)
.get(`/oauth2/userinfo`)
.set('Authorization', 'Bearer ' + res2.body.access_token)
.expect(200)
.end((err3, res3) => {
expect(res3.status).toBe(200);
expect(res3.body.sub).not.toBeUndefined();
expect(res3.body.profile).not.toBeUndefined();
expect(res3.body.name).toBe('Medplum Admin');
expect(res3.body.given_name).toBe('Medplum');
expect(res3.body.family_name).toBe('Admin');
expect(res3.body.email).toBe('<EMAIL>');
expect(res3.body.phone).toBe('415-867-5309');
expect(res3.body.address).toBe('742 Evergreen Terrace, Springfield, OR, 97403');
done();
});
});
});
});
test('Get userinfo with only openid', async (done) => {
const scope = 'openid';
request(app)
.post('/oauth2/authorize?response_type=code&client_id=' + client.id + '&redirect_uri=https://example.com&scope=' + encodeURIComponent(scope))
.type('form')
.send({
email: '<EMAIL>',
password: '<PASSWORD>',
nonce: 'asdf',
state: 'xyz'
})
.expect(302)
.end((err, res) => {
expect(res.status).toBe(302);
expect(res.headers.location).not.toBeUndefined();
const location = new URL(res.headers.location);
expect(location.searchParams.get('error')).toBeNull();
request(app)
.post('/oauth2/token')
.type('form')
.send({
grant_type: 'authorization_code',
code: location.searchParams.get('code')
})
.expect(200)
.end((err2, res2) => {
expect(res2.status).toBe(200);
expect(res2.body.access_token).not.toBeUndefined();
request(app)
.get(`/oauth2/userinfo`)
.set('Authorization', 'Bearer ' + res2.body.access_token)
.expect(200)
.end((err3, res3) => {
expect(res3.status).toBe(200);
expect(res3.body.sub).not.toBeUndefined();
expect(res3.body.profile).toBeUndefined();
expect(res3.body.name).toBeUndefined();
expect(res3.body.given_name).toBeUndefined();
expect(res3.body.family_name).toBeUndefined();
expect(res3.body.email).toBeUndefined();
expect(res3.body.phone).toBeUndefined();
expect(res3.body.address).toBeUndefined();
done();
});
});
});
});
|
# Import necessary modules
from django.core.management.base import BaseCommand
from django.apps import apps
from django.conf import settings
from django.core.files.storage import FileSystemStorage
from django.db.models import FileField
class Command(BaseCommand):
help = 'Set default file storage for all FileField instances to FileSystemStorage'
def handle(self, *args, **options):
# Set the default file storage for tests
settings.MEDIA_ROOT = settings._temp_media_dir
settings.DEFAULT_FILE_STORAGE = 'django.core.files.storage.FileSystemStorage'
# Iterate through all models in the app
for model in apps.get_models():
# Identify all FileField instances within each model
fields = [f for f in model._meta.fields if isinstance(f, FileField)]
for field in fields:
# Set the default file storage for each FileField instance to use the FileSystemStorage
field.storage = FileSystemStorage()
self.stdout.write(self.style.SUCCESS('Default file storage for all FileField instances set to FileSystemStorage')) |
# @author <NAME>
module Alimento
class PlatoAmbiental < Plato
include Comparable
include Enumerable
attr_reader :emisiones, :terreno
# @note Constructor de la clase que llama al de Plato y al propio
# @param nombre Identificador del plato
# @param lista Conjunto de alimentos del plato
# @param cantidades Vector de cantidades de cada alimento
def initialize(nombre, lista, cantidades)
super(nombre, lista, cantidades)
@emisiones = emisiones_total
@terreno = terreno_total
end
# @note Comparador de las emisiones ambientales totales de cada plato
# @param otro Objeto del mismo tipo con el cual comparar
# @return [PlatoAmbiental] Plato con mayores emisiones
def <=>(otro)
emisiones_total <=> otro.emisiones_total
end
# @note Calculo de las emisiones de gases del plato
# @return [Float] Emisiones totales del plato
def emisiones_total
total_emisiones = 0
i = 0
@lista.each{|iter|
total_emisiones += (iter.value.gei * @cantidades[i]/100)
i += 1
}
return total_emisiones.round(1)
end
# @note Calculo del terreno ocupado por los alimentos del plato
# @return [Float] Terreno total de los alimentos del plato
def terreno_total
ter_array = @lista.collect{|iter| iter.value.terreno}
total_terreno = []
i = 0
while i < @cantidades.length do
total_terreno.push(ter_array[i] * (@cantidades[i]/100))
i +=1
end
return (total_terreno.sum).round(1)
end
# @note Calculo del indice de huella nutricional del plato
# @return [Integer] Indice resultante
def huella_energia
if VCT() < 670
return 1
elsif (670..830) === VCT()
return 2
else
return 3
end
end
# @note Calculo de la huella de carbono total del plato
# @return [Integer] Indice segun la huella de carbono del plato
def huella_carbono
emi_total_dia = ((emisiones_total()*1000)/365)
if emi_total_dia < 800
return 1
elsif (800..1200) === emi_total_dia
return 2
else
return 3
end
end
# @note Calculo del indice medio entre todos los del plato
# @return [Integer] Indice de media de los demas
def huella_nutricional
return ((huella_carbono + huella_energia) / 2).floor
end
# @note Formateo del plato
# @return [String] Datos del plato que son el nombre y energia total
def to_s
string = "La eficiencia energetica de '#{nombre}' es: #{VCT()}"
return string
end
end
end
|
"""
Create an algorithm to calculate the median of a given list of numbers
"""
# Function to calculate the median of a given list of numbers
def median(nums):
# Sort the list
sorted_nums = sorted(nums)
# Check if the list is even or odd
if len(sorted_nums) % 2 == 0:
# If even, calculate the median as the average of the two middle numbers
median = (sorted_nums[int(len(sorted_nums) / 2) - 1] + sorted_nums[int(len(sorted_nums) / 2)]) / 2
else:
# If odd, calculate the median as the middle number
median = sorted_nums[int((len(sorted_nums) - 1) / 2)]
return median
# Test
nums = [1, 5, 2, 8, 4]
print(median(nums)) |
<reponame>yandld/lpc_uart_server<gh_stars>1-10
#include <pthread.h>
#include <stdio.h>
#include <sys/time.h>
#include <string.h>
#include <unistd.h>
#include <termios.h>
#include <stdlib.h>
#include <fcntl.h>
#include <sys/types.h>
#include <sys/stat.h>
#include <sys/select.h>
#include <errno.h>
#include <signal.h>
#define PORT_NUMBERS 10
#define BULK_IN_SIZE 253
#define BULK_OUT_SIZE 253
#define EVERY_PRINT 64
typedef struct
{
int index;
int baud;
int loopback;
const char* readFileName;
const char* writeFileName;
const char* devName;
const char* loopbackAttrName;
}ReadWriteThreadArgument;
typedef struct
{
int fd;
int index;
const char* sendFileName;
}WriteThreadArgument;
void startThread(void* p,int isWrite);
void serial_init(int fd,int baud)
{
struct termios options;
int realbaud = 0;
tcgetattr(fd,&options);
options.c_cflag |= ( CLOCAL | CREAD);
options.c_cflag &= ~CSIZE;
options.c_cflag &= ~CRTSCTS;
options.c_cflag |= CS8;
options.c_cflag &= ~CSTOPB;
options.c_cflag |= IGNPAR;
options.c_iflag &= ~(BRKINT | ICRNL | INPCK| ISTRIP | IXON);
options.c_oflag = 0;
options.c_lflag = 0;
switch (baud) {
case 4800:
realbaud = B4800;
break;
case 9600:
realbaud = B9600;
break;
case 38400:
realbaud = B38400;
break;
case 115200:
realbaud = B115200;
break;
default:
realbaud = B115200;
}
cfsetispeed(&options,realbaud);
cfsetospeed(&options,realbaud);
tcsetattr(fd,TCSANOW,&options);
}
void * writePort(void* data)
{
WriteThreadArgument* p = (WriteThreadArgument*) data;
int readfd = 0;
int everyinfo = EVERY_PRINT * 1024;
int everysum = 0;
int writefd = p->fd;
int index = p->index;
const char* sendfile = p->sendFileName;
unsigned char buffer[BULK_OUT_SIZE];
unsigned char tmpbuf[1024];
int length = 0;
int writelength = 0;
int readsum = 0;
int writesum = 0;
int filesize = 0;
fd_set rset,wset;
FD_ZERO(&rset);
FD_ZERO(&wset);
FD_SET(writefd,&wset);
FD_SET(readfd,&rset);
readfd = open(sendfile,O_RDWR);
if(-1 == readfd)
{
printf("can't open the %s file\n",sendfile);
pthread_exit(NULL);
}
filesize = lseek(readfd,0,SEEK_END);
lseek(readfd,0,SEEK_SET);
while((length = read(readfd,buffer,sizeof(buffer))) != 0)
{
readsum += length;
everysum += length;
while(length)
{
select(writefd + 1,NULL,&wset,NULL,NULL);
if(FD_ISSET(writefd,&wset))
{
writelength= write(writefd,buffer,length);
if(writelength == -1 && errno == EAGAIN)
{
continue;
}
length -= writelength;
writesum += writelength;
}
}
/* if(4 == index )
{
memmove(tmpbuf,buffer,writelength);
tmpbuf[writelength] = '\0';
printf("%s\n",tmpbuf);
}*/
fflush(stdout);
printf("have send data percent %d%%\r",(int)((readsum*100.0)/filesize));
}
printf("--%d--port read sum is %d -- send sum is %d\n",index,readsum,writesum);
pthread_exit(NULL);
}
void * readWritePort(void* data)
{
ReadWriteThreadArgument* p = (ReadWriteThreadArgument*) data;
WriteThreadArgument writeThreadArguments;
const char* devname = p->devName;
const char* sendfile = p->readFileName;
const char* recvfile = p->writeFileName;
const char* loopattrname = p->loopbackAttrName;
int index = p->index;
int loopback = p->loopback;
int devfd = 0;
int loopfd = 0;
int sendfd = 0;
int recvfd = 0;
int readsum = 0;
int recvsum = 0;
int writelength = 0;
int length = 0;
fd_set rset, wset;
char loopbool[2] = {'0','1'};
unsigned char buffer[BULK_IN_SIZE] = {0};
devfd = open(devname,O_RDWR | O_NOCTTY | O_NDELAY);
if(-1 == devfd)
{
printf("can't open the %s port\n",devname);
pthread_exit(NULL);
}
serial_init(devfd,p->baud);
//setup the loopback
loopfd = open(loopattrname,O_RDWR);
if(-1 == loopfd)
{
printf("can't open the loopback attribute %s\n",loopattrname);
pthread_exit(NULL);
}
if(loopback)
{
length = write(loopfd,&loopbool[1],1);
}
else
{
length = write(loopfd,&loopbool[0],1);
}
if(-1 == length)
{
printf("write the loopback attribute failed\n");
pthread_exit(NULL);
}
recvfd = open(recvfile, O_RDWR | O_CREAT | O_TRUNC,0666);
if(-1 == recvfd)
{
printf("can't open the %s file\n",recvfile);
pthread_exit(NULL);
}
/*send the data to index port*/
writeThreadArguments.fd = devfd;
writeThreadArguments.index = index;
writeThreadArguments.sendFileName = sendfile;
startThread(&writeThreadArguments,1);
FD_ZERO(&rset);
FD_ZERO(&wset);
FD_SET(devfd,&rset);
FD_SET(recvfd,&wset);
while(1)
{
if(select(devfd+1,&rset,NULL,NULL,NULL) < 0)
{
printf("--%d-- port select error\n",index);
pthread_exit(NULL);
}
if(FD_ISSET(devfd,&rset))
{
length = read(devfd,buffer,sizeof(buffer));
if(length == 0)
{
break;
}
readsum += length;
while(length)
{
select(recvfd + 1,NULL,&wset,NULL,NULL);
if(FD_ISSET(recvfd,&wset))
{
writelength= write(recvfd,buffer,length);
length -= writelength;
recvsum += writelength;
}
}
}
}
printf("--%d--port read sum is %d -- recv sum is %d\n",index,readsum,recvsum);
pthread_exit(NULL);
}
void startThread(void* p,int isWrite)
{
pthread_t tid;
int index = 0;
int ret = 0;
if(!isWrite)
{
index = ((ReadWriteThreadArgument*)p)->index;
ret = pthread_create(&tid, NULL, readWritePort, p);
}
else
{
index = ((WriteThreadArgument*)p)->index;
ret = pthread_create(&tid, NULL, writePort, p);
}
if(ret)
{
printf("create the %d thread failed\n",index);
pthread_exit(&ret);
}
}
int main(int argc, const char* argv[])
{
/**
*
* some variables
*
*/
int i = 0;
int loopback = 0;
int base = 0;
int numbers = 0;
int baud = 0;
int fds[PORT_NUMBERS] = {0};
char devnames[PORT_NUMBERS][13] = {0};
char recvfiles[PORT_NUMBERS][20] = {0};
char loopbackattr[PORT_NUMBERS][64] = {0};
unsigned char mainloop[5];
ReadWriteThreadArgument arguments[PORT_NUMBERS] = {0};
/**
* some arguments basic deal
*/
if(argc < 7)
{
printf("format is %s <base> <numbers> <sendfilename> <recvfilename> <baud0|1> <loopback>\n",argv[0]);
exit(-1);
}
base = atoi(argv[1]);
numbers = atoi(argv[2]);
baud = atoi(argv[5]);
loopback = atoi(argv[6]);
if(numbers > 10 || numbers < 1)
{
printf("please input the reasonable digital from 1 to 10\n");
exit(-1);
}
for(i = base; i < numbers ; i++)
{
snprintf(devnames[i],13,"/dev/ttyUSB%d",i);
sprintf(recvfiles[i],"%s%d",argv[4],i);
snprintf(loopbackattr[i],39,"/sys/class/tty/ttyUSB%d/device/loopback",i);
// printf("devname is %s\n",devnames[i]);
// printf("recvfile is %s\n",recvfiles[i]);
arguments[i].index = i;
arguments[i].baud = baud;
arguments[i].loopback = loopback;
arguments[i].devName = devnames[i];
arguments[i].readFileName = argv[3];
arguments[i].writeFileName = recvfiles[i];
arguments[i].loopbackAttrName = loopbackattr[i];
startThread(&arguments[i],0);
}
/**
* exit from main thread
*/
for(;;)
{
scanf("%s",mainloop);
if(strncmp(mainloop,"exit",4) == 0)
{
kill(getpid(),SIGINT);
}
}
return 0;
}
|
SELECT name, email
FROM Employee
WHERE salary > 80000; |
<gh_stars>0
exports.up = function (knex) {
return knex.schema.createTable('computers', (table) => {
table.string('codigo').primary().notNullable();
table.string('ip').notNullable();
table.string('dominio').notNullable();
table.string('fabricante').notNullable();
table.string('modelo').notNullable();
table.string('numSerie').notNullable();
table.string('versaoOS').notNullable();
table.string('chaveWindows').notNullable();
table.string('versaoOffice').notNullable();
table.string('chaveOffice').notNullable();
table.string('obs').nullable();
table.string('status').notNullable();
});
};
exports.down = function (knex) {
return knex.schema.dropTable('computers');
};
|
package workspace_th.day06.ex01.hashmapEx;
import java.util.*;
public class HashMapEx2 {
public static void main(String[] args) {
HashMap<String, Integer> map = new HashMap<String, Integer>();
map.put("๊น์๋ฐ", 90);
map.put("์ด์์ ", new Integer(900));
map.put("์ ๋ํ", new Integer(190));
map.put("๋ฐ์ ํฌ", new Integer(1000));
map.put("๊น์๋ฐ", new Integer(80));
Set set = map.entrySet();
//System.out.println(set);
Iterator it = set.iterator();
while( it.hasNext() ) {
Map.Entry e = (Map.Entry)it.next();
System.out.println("์ด๋ฆ : " + e.getKey() + ", ์ ์ : " + e.getValue());
} // while end
set = map.keySet();
System.out.println("์ฐธ๊ฐ์ ๋ช
๋จ : " + set);
Collection value = map.values();
System.out.println(value);
it = value.iterator();
int total = 0;
while( it.hasNext() ) {
Integer i = (Integer)it.next();
total += i.intValue();
} // while end
System.out.println("์ด์ : " + total);
System.out.println("ํ๊ท : " + (float)total / set.size() );
System.out.println("์ต๊ณ ์ ์ : " + Collections.max(value));
System.out.println("์ต์ ์ ์ : " + Collections.min(value));
}
}
|
import React from 'react';
import axios from 'axios';
class App extends React.Component {
constructor(props) {
super(props);
this.state = {
products: []
};
}
componentDidMount() {
axios.get('/api/products')
.then(response => this.setState({ products: response.data }));
}
render() {
return (
<div>
<h1>Products</h1>
<ul>
{this.state.products.map(product => (
<li key={product.id}>
<h2>{product.name}</h2>
<p>{product.description}</p>
<p>${product.price}</p>
</li>
))}
</ul>
</div>
);
}
}
export default App; |
/*
*
*/
package net.community.chest.svnkit.core.wc;
import java.util.ArrayList;
import java.util.Collection;
import org.tmatesoft.svn.core.SVNException;
import org.tmatesoft.svn.core.wc.ISVNInfoHandler;
import org.tmatesoft.svn.core.wc.SVNInfo;
/**
* <P>Copyright as per GPLv2</P>
* @author <NAME>.
* @since Nov 8, 2010 3:12:32 PM
*
*/
public class SVNInfoCollection extends ArrayList<SVNInfo> implements ISVNInfoHandler {
/**
*
*/
private static final long serialVersionUID = -7825144260277851890L;
public SVNInfoCollection ()
{
super();
}
public SVNInfoCollection (Collection<? extends SVNInfo> c)
{
super(c);
}
public SVNInfoCollection (int initialCapacity)
{
super(initialCapacity);
}
/*
* @see org.tmatesoft.svn.core.wc.ISVNInfoHandler#handleInfo(org.tmatesoft.svn.core.wc.SVNInfo)
*/
@Override
public void handleInfo (SVNInfo info) throws SVNException
{
if ((info != null) && add(info))
return;
}
}
|
#!/bin/bash
# Kaldi ASR baseline for the CHiME-4 Challenge (1ch track: single channel track)
#
# Copyright 2016 University of Sheffield (Jon Barker, Ricard Marxer)
# Inria (Emmanuel Vincent)
# Mitsubishi Electric Research Labs (Shinji Watanabe)
# Apache 2.0 (http://www.apache.org/licenses/LICENSE-2.0)
. ./path.sh
. ./cmd.sh
# Config:
stage=0 # resume training with --stage=N
flatstart=false
. utils/parse_options.sh || exit 1;
# Set bash to 'debug' mode, it will exit on :
# -e 'error', -u 'undefined variable', -o ... 'error in pipeline', -x 'print commands',
set -e
set -u
set -o pipefail
#####check data and model paths################
# Set a main root directory of the CHiME4 data
# If you use scripts distributed in the CHiME4 package,
chime4_data=`pwd`/../..
# Otherwise, please specify it, e.g.,
chime4_data=/db/laputa1/data/processed/public/CHiME4
if [ ! -d $chime4_data ]; then
echo "$chime4_data does not exist. Please specify chime4 data root correctly" && exit 1
fi
# Set a model directory for the CHiME4 data.
modeldir=$chime4_data/tools/ASR_models
for d in $modeldir $modeldir/data/{lang,lang_test_tgpr_5k,lang_test_5gkn_5k,lang_test_rnnlm_5k_h300,local} \
$modeldir/exp/{tri3b_tr05_multi_noisy,tri4a_dnn_tr05_multi_noisy,tri4a_dnn_tr05_multi_noisy_smbr_i1lats}; do
[ ! -d ] && echo "$0: no such directory $d. specify models correctly or execute './run.sh --flatstart true' first" && exit 1;
done
#####check data and model paths finished#######
#####main program start################
# You can execute run_init.sh only "once"
# This creates 3-gram LM, FSTs, and basic task files
if [ $stage -le 0 ] && $flatstart; then
local/run_init.sh $chime4_data
fi
# In this script, we use non-enhanced 6th microphone signals.
enhancement_method=isolated_1ch_track
enhancement_data=$chime4_data/data/audio/16kHz/$enhancement_method
#if [ $stage -le 1 ]; then
# put your single channel enhancement
#fi
# GMM based ASR experiment without "retraining"
# Please set a directory of your speech enhancement method.
# run_gmm_recog.sh can be done every time when you change a speech enhancement technique.
# The directory structure and audio files must follow the attached baseline enhancement directory
if [ $stage -le 2 ]; then
if $flatstart; then
local/run_gmm.sh $enhancement_method $enhancement_data $chime4_data
else
local/run_gmm_recog.sh $enhancement_method $enhancement_data $modeldir
fi
fi
# DNN based ASR experiment
# Since it takes time to evaluate DNN, we make the GMM and DNN scripts separately.
# You may execute it after you would have promising results using GMM-based ASR experiments
if [ $stage -le 3 ]; then
if $flatstart; then
local/run_dnn.sh $enhancement_method
else
local/run_dnn_recog.sh $enhancement_method $modeldir
fi
fi
# LM-rescoring experiment with 5-gram and RNN LMs
# It takes a few days to train a RNNLM.
if [ $stage -le 4 ]; then
if $flatstart; then
local/run_lmrescore.sh $chime4_data $enhancement_method
else
local/run_lmrescore_recog.sh $enhancement_method $modeldir
fi
fi
echo "Done."
|
<gh_stars>0
package api
import (
"context"
"errors"
"github.com/ONSdigital/dp-collection-api/collections"
"github.com/ONSdigital/dp-collection-api/models"
"github.com/ONSdigital/dp-collection-api/pagination"
"github.com/ONSdigital/log.go/v2/log"
"net/http"
)
var (
// errors that should return a 400 status
badRequest = map[error]bool{
pagination.ErrInvalidLimitParameter: true,
pagination.ErrInvalidOffsetParameter: true,
pagination.ErrLimitOverMax: true,
collections.ErrInvalidOrderBy: true,
collections.ErrNameSearchTooLong: true,
collections.ErrCollectionNameEmpty: true,
collections.ErrInvalidID: true,
collections.ErrNoIfMatchHeader: true,
ErrUnableToParseJSON: true,
}
notFound = map[error]bool{
collections.ErrCollectionNotFound: true,
}
conflictRequest = map[error]bool{
collections.ErrCollectionNameAlreadyExists: true,
collections.ErrCollectionConflict: true,
}
ErrUnableToParseJSON = errors.New("failed to parse json body")
)
func handleError(ctx context.Context, err error, w http.ResponseWriter, logData log.Data) {
var status int
switch {
case badRequest[err]:
status = http.StatusBadRequest
case notFound[err]:
status = http.StatusNotFound
case conflictRequest[err]:
status = http.StatusConflict
default:
status = http.StatusInternalServerError
}
if logData == nil {
logData = log.Data{}
}
response := models.ErrorsResponse{
Errors: []models.ErrorResponse{
{
Message: err.Error(),
},
},
}
w.Header().Set("Content-Type", "application/json; charset=utf-8")
w.WriteHeader(status)
WriteJSONBody(ctx, response, w, logData)
log.Error(ctx, "request unsuccessful", err, logData)
}
|
#!/bin/bash
g++ -std=c++17 gen.cpp -o gen
cp-gcc h.cpp -o a
cp-gcc brute.cpp -o brute
for((i=1;;i++)); do
echo $i
./gen $i > in
./a < in > o1
./brute < in > o2
diff <(./a < in) <(./brute < in) || break
done
cat in
#sed -i 's/\r$//' filename ----- remover \r do txt
|
"""Steps for features of Onezone login page.
"""
__author__ = "<NAME>"
__copyright__ = "Copyright (C) 2016 ACK CYFRONET AGH"
__license__ = "This software is released under the MIT license cited in " \
"LICENSE.txt"
from pytest_bdd import given, when, then, parsers
from tests.gui.utils.generic import parse_seq
@then(parsers.parse('user of {browser_id} should see login button '
'for {provider_name}'))
def login_provider_buttons(selenium, browser_id, provider_name):
driver = selenium[browser_id]
assert driver.find_element_by_css_selector(
'.login-box a.login-icon-box.idp-{name}'.format(name=provider_name)
), 'login for provider {} not found'.format(provider_name)
def _click_login_provider_button(driver, provider_name):
driver.find_element_by_css_selector(
'.login-box a.login-icon-box.idp-{:s}'.format(provider_name)
).click()
@given(parsers.re('users? of (?P<browser_id_list>.*) clicked on the '
'"(?P<provider_name>.*)" login button'))
def g_click_login_provider_button(selenium, browser_id_list, provider_name):
for browser_id in parse_seq(browser_id_list):
driver = selenium[browser_id]
_click_login_provider_button(driver, provider_name)
@when(parsers.re('users? of (?P<browser_id_list>.*) clicks on the '
'"(?P<provider_name>.*)" login button'))
def w_click_login_provider_button(selenium, browser_id_list, provider_name):
for browser_id in parse_seq(browser_id_list):
driver = selenium[browser_id]
_click_login_provider_button(driver, provider_name)
@given(parsers.re('users? of (?P<browser_id_list>.*) logged '
'as (?P<user_id_list>.*)'))
def log_to_user_in_each_browser(selenium, browser_id_list,
user_id_list):
for browser_id, user_id in zip(parse_seq(browser_id_list),
parse_seq(user_id_list)):
driver = selenium[browser_id]
driver.find_element_by_link_text(user_id).click()
|
<reponame>droideparanoico/gameoflife
package main.java.model;
import java.util.Random;
public class Universe {
private final int universeSize;
private int livingCells;
private boolean[][] matrix;
public Universe(int universeSize) {
this.universeSize = universeSize;
matrix = new boolean[universeSize][universeSize];
initializeUniverse();
}
public void initializeUniverse() {
livingCells = 0;
for(int i = 0; i < universeSize; i++) {
for(int j = 0; j < universeSize; j++) {
matrix[i][j] = new Random().nextBoolean();
if ((matrix[i][j])) {
livingCells++;
}
}
}
}
public int getUniverseSize() {
return universeSize;
}
public int getLivingCells() {
return livingCells;
}
public void setLivingCells(int livingCells) {
this.livingCells = livingCells;
}
public boolean[][] getMatrix() {
return matrix;
}
public void setMatrix(boolean[][] matrix) {
this.matrix = matrix;
}
}
|
#!/bin/bash
#SBATCH --nodes=1
#SBATCH --gres=gpu:v100:0
#SBATCH --ntasks-per-node=1
#SBATCH --cpus-per-task=80
#SBATCH --mem=400G
#SBATCH --time=48:00:00
#SBATCH --signal=SIGUSR1@300
#SBATCH --output=host/job/dp/log/mnd_i7-%j.out
module load singularity
export NCCL_DEBUG=INFO
export HYDRA_FULL_ERROR=1
export NCCL_ASYNC_ERROR_HANDLING=1
export KALDI_ROOT="kaldi"
export CUDA_LAUNCH_BLOCKING=1
srun singularity exec --nv nemo_v6.sif python3 host/job/dp/make_noisy_dataset_no_tar.py --run_index 7
|
<filename>src/test/java/com/labkit/test/personapi/patch/jax/rs/EntityStorageBeanTest.java<gh_stars>0
package com.labkit.test.personapi.patch.jax.rs;
import org.junit.Test;
import com.vidhya.java.http.patch.jax.rs.EntityStorageBean;
import com.vidhya.java.http.patch.jax.rs.entity.Person;
import com.vidhya.java.http.patch.jax.rs.entity.PersonUtil;
import junit.framework.Assert;
public class EntityStorageBeanTest {
static EntityStorageBean bean = new EntityStorageBean();
@Test
public void setPersonTest(){
bean.setPerson(PersonUtil.getDefaultPerson());
}
@Test
public void getPersonTest(){
bean.getPerson();
}
}
|
<html>
<body>
<form action="#" method="post">
<label for="username">Username:</label>
<input type="text" name="username" id="username" />
<br><br>
<label for="password">Password:</label>
<input type="password" name="password" id="password" />
<br><br>
<input type="submit" value="Submit" />
</form>
<script>
const form = document.querySelector('form');
form.addEventListener('submit', (e) => {
e.preventDefault();
const username = document.getElementById('username').value;
const password = document.getElementById('password').value;
// Send data to server for validation
fetch('/validate', {
method: 'POST',
body: JSON.stringify({username, password})
})
.then(response => response.json())
.then(data => {
if (data.status === 'ok') {
// Show success message
} else {
// Show error message
}
});
});
</script>
</body>
</html> |
<gh_stars>1-10
// import mongoose from 'mongoose'
const mongoose = require('mongoose');
const DBConnection = async () => {
try {
mongoose.connect(`${process.env.DBSERVER}`, {
useNewUrlParser: true,
useUnifiedTopology: true,
// useCreateIndex: true
});
mongoose.connection.once('open', async function () {
console.log('Database connection was successful')
})
} catch (error) {
console.error(error)
}
}
export { DBConnection }; |
#!/usr/bin/env bash
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# This file is sourced when running various Spark programs.
# Copy it as spark-env.sh and edit that to configure Spark for your site.
# Options read when launching programs locally with
# ./bin/run-example or ./bin/spark-submit
# - HADOOP_CONF_DIR, to point Spark towards Hadoop configuration files
# - SPARK_LOCAL_IP, to set the IP address Spark binds to on this node
# - SPARK_PUBLIC_DNS, to set the public dns name of the driver program
# - SPARK_CLASSPATH, default classpath entries to append
# Options read by executors and drivers running inside the cluster
# - SPARK_LOCAL_IP, to set the IP address Spark binds to on this node
# - SPARK_PUBLIC_DNS, to set the public DNS name of the driver program
# - SPARK_CLASSPATH, default classpath entries to append
# - SPARK_LOCAL_DIRS, storage directories to use on this node for shuffle and RDD data
# - MESOS_NATIVE_JAVA_LIBRARY, to point to your libmesos.so if you use Mesos
# Options read in YARN client mode
# - HADOOP_CONF_DIR, to point Spark towards Hadoop configuration files
# - SPARK_EXECUTOR_INSTANCES, Number of executors to start (Default: 2)
# - SPARK_EXECUTOR_CORES, Number of cores for the executors (Default: 1).
# - SPARK_EXECUTOR_MEMORY, Memory per Executor (e.g. 1000M, 2G) (Default: 1G)
# - SPARK_DRIVER_MEMORY, Memory for Driver (e.g. 1000M, 2G) (Default: 1G)
# Options for the daemons used in the standalone deploy mode
# - SPARK_MASTER_HOST, to bind the master to a different IP address or hostname
# - SPARK_MASTER_PORT / SPARK_MASTER_WEBUI_PORT, to use non-default ports for the master
# - SPARK_MASTER_OPTS, to set config properties only for the master (e.g. "-Dx=y")
# - SPARK_WORKER_CORES, to set the number of cores to use on this machine
# - SPARK_WORKER_MEMORY, to set how much total memory workers have to give executors (e.g. 1000m, 2g)
# - SPARK_WORKER_PORT / SPARK_WORKER_WEBUI_PORT, to use non-default ports for the worker
# - SPARK_WORKER_INSTANCES, to set the number of worker processes per node
# - SPARK_WORKER_DIR, to set the working directory of worker processes
# - SPARK_WORKER_OPTS, to set config properties only for the worker (e.g. "-Dx=y")
# - SPARK_DAEMON_MEMORY, to allocate to the master, worker and history server themselves (default: 1g).
# - SPARK_HISTORY_OPTS, to set config properties only for the history server (e.g. "-Dx=y")
# - SPARK_SHUFFLE_OPTS, to set config properties only for the external shuffle service (e.g. "-Dx=y")
# - SPARK_DAEMON_JAVA_OPTS, to set config properties for all daemons (e.g. "-Dx=y")
# - SPARK_PUBLIC_DNS, to set the public dns name of the master or workers
# Generic options for the daemons used in the standalone deploy mode
# - SPARK_CONF_DIR Alternate conf dir. (Default: ${SPARK_HOME}/conf)
# - SPARK_LOG_DIR Where log files are stored. (Default: ${SPARK_HOME}/logs)
# - SPARK_PID_DIR Where the pid file is stored. (Default: /tmp)
# - SPARK_IDENT_STRING A string representing this instance of spark. (Default: $USER)
# - SPARK_NICENESS The scheduling priority for daemons. (Default: 0)
export JAVA_HOME=/usr/local/src/jdk1.8.0_141
export SCALA_HOME=/usr/local/src/scala-2.11.8
export HADOOP_HOME=/usr/local/src/hadoop-2.6.5
export HADOOP_CONF_DIR=$HADOOP_HOME/etc/hadoop
SPARK_MASTER_IP=master
SPARK_LOCAL_DIRS=/usr/local/src/spark-2.3.3-bin-hadoop2.6
SPARK_DRIVER_MEMORY=1G
|
/**
* Copyright(c) 2004-2018 bianfeng
*/
package com.shareyi.molicode.manager.conf.impl;
import com.shareyi.molicode.common.enums.columns.AcProjectColumn;
import org.apache.commons.collections4.CollectionUtils;
import org.springframework.stereotype.Component;
import javax.annotation.Resource;
import com.shareyi.molicode.domain.conf.AcProject;
import com.shareyi.molicode.dao.conf.AcProjectDao;
import com.shareyi.molicode.manager.conf.AcProjectManager;
import com.shareyi.molicode.manager.AbstractManager;
import com.shareyi.molicode.builder.impl.AcProjectBuilder;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* ้กน็ฎManagerๅฎ็ฐ็ฑป
* @author david
* @date 2018-08-22
*/
@Component("acProjectManager")
public class AcProjectManagerImpl extends AbstractManager<AcProject> implements AcProjectManager {
@Resource
private AcProjectDao acProjectDao;
@Resource(name="acProjectBuilder")
AcProjectBuilder acProjectBuilder;
@Override
public AcProject getByProjectKey(String projectKey) {
Map<String, Object> queryParam = new HashMap<>();
queryParam.put(AcProjectColumn.projectKey.name(), projectKey);
List<AcProject> list = acProjectDao.getListByExample(queryParam);
return CollectionUtils.isEmpty(list) ? null : list.get(0);
}
/******* getter and setter ***/
public AcProjectBuilder getBuilder() {
return acProjectBuilder;
}
public AcProjectDao getDao() {
return acProjectDao;
}
}
|
import torch
from math import sqrt
# Currently, this code is using transpose function several times by 'channels_last'.
# This is to sustain the compatibility with Nvidia Location sensitive attention module.
# If you want to apply this code to other model, consider and modify that.
class Location_Sensitive_Stepwise_Monotonic_Attention(torch.nn.Module):
def __init__(
self,
attention_rnn_channels,
memory_size,
attention_size,
attention_location_channels,
attention_location_kernel_size,
sigmoid_noise= 2.0,
score_bias= False,
normalize= False,
channels_last= False,
**kwargs
):
super(Location_Sensitive_Stepwise_Monotonic_Attention, self).__init__()
self.sigmoid_noise = sigmoid_noise
self.normalize = normalize
self.channels_last = channels_last
self.layer_Dict = torch.nn.ModuleDict()
self.layer_Dict['Query'] = ConvNorm(
in_channels= attention_rnn_channels,
out_channels= attention_size,
kernel_size= 1,
bias= False,
w_init_gain= 'tanh'
)
self.layer_Dict['Memory'] = ConvNorm(
in_channels= memory_size,
out_channels= attention_size,
kernel_size= 1,
bias= False,
w_init_gain= 'tanh'
)
self.layer_Dict['Location'] = torch.nn.Sequential()
self.layer_Dict['Location'].add_module('Conv', ConvNorm(
in_channels= 2,
out_channels= attention_location_channels,
kernel_size= attention_location_kernel_size,
padding= (attention_location_kernel_size - 1) // 2,
bias= False
))
self.layer_Dict['Location'].add_module('Conv1x1', ConvNorm(
in_channels= attention_location_channels,
out_channels= attention_size,
kernel_size= 1,
bias= False,
w_init_gain= 'tanh'
))
self.layer_Dict['Score'] = torch.nn.Sequential()
self.layer_Dict['Score'].add_module('Tanh', torch.nn.Tanh())
self.layer_Dict['Score'].add_module('Conv', ConvNorm(
in_channels= attention_size,
out_channels= 1,
kernel_size= 1,
bias= score_bias
))
if normalize:
torch.nn.utils.weight_norm(self.layer_Dict['Score'])
def forward(self, queries, memories, processed_memories, previous_alignments, cumulated_alignments, masks= None):
'''
queries: [Batch, Att_RNN_dim]
memories: [Batch, Enc_dim, Memory_t] or [Batch, Memory_t, Enc_dim] (when channels_list is True)
processed_memories: [Batch, Att_dim, Memory_t] or [Batch, Memory_t, Att_dim] (when channels_last is True)
attention_weights_cats: [Batch, 2, Memory_t]
mask: None or [Batch, Memory_t]
'''
if self.channels_last:
memories = memories.transpose(2, 1)
processed_memories = processed_memories.transpose(2, 1)
scores = self.Calc_Score( # [Batch, Mem_t]
queries= queries,
memories= processed_memories,
attention_weights_cats= torch.cat([previous_alignments.unsqueeze(1), cumulated_alignments.unsqueeze(1)], dim= 1)
)
contexts, alignments = self.Apply_Score(scores, memories, previous_alignments, masks) # [Batch, Att_dim], [Batch, Att_dim]
return contexts, alignments
def Calc_Score(self, queries, memories, attention_weights_cats):
queries = self.layer_Dict['Query'](queries.unsqueeze(2)) # [Batch, Att_dim, 1]
locations = self.layer_Dict['Location'](attention_weights_cats) # [Batch, Att_dim, Mem_t]
return self.layer_Dict['Score'](queries + memories + locations).squeeze(1)
def Apply_Score(self, scores, memories, previous_alignments, masks= None):
previous_alignments = previous_alignments.unsqueeze(1)
if self.sigmoid_noise > 0.0:
scores += self.sigmoid_noise * torch.randn_like(scores)
if not masks is None:
scores.data.masked_fill_(masks, -torch.finfo(scores.dtype).max)
p_choose_i = torch.sigmoid(scores).unsqueeze(1) # [Batch, 1, Mem_t]
pad = torch.zeros(p_choose_i.size(0), 1, 1).to(device= p_choose_i.device, dtype= p_choose_i.dtype) # [Batch, 1, 1]
alignments = previous_alignments * p_choose_i + torch.cat(
[pad, previous_alignments[..., :-1] * (1.0 - p_choose_i[..., :-1])],
dim= -1
) # [Batch, 1, Mem_t]
contexts = alignments @ memories.transpose(2, 1) # [Batch, 1, Att_dim]
return contexts.squeeze(1), alignments.squeeze(1)
def Get_Processed_Memory(self, memories):
if self.channels_last:
return self.layer_Dict['Memory'](memories.transpose(2, 1)).transpose(2, 1)
return self.layer_Dict['Memory'](memories)
def Get_Initial_Alignment(self, memories):
return torch.nn.functional.one_hot(
memories.new_zeros(memories.size(0)).long(),
num_classes= memories.size(1) if self.channels_last else memories.size(2)
).to(dtype= memories.dtype)
class ConvNorm(torch.nn.Conv1d):
def __init__(self, w_init_gain='linear', *args, **kwargs):
super(ConvNorm, self).__init__(*args, **kwargs)
torch.nn.init.xavier_uniform_(self.weight, gain= torch.nn.init.calculate_gain(w_init_gain)) |
<reponame>surfliner/surfliner-mirror
# frozen_string_literal: true
RSpec.describe CometObjectShowPresenter do
subject(:presenter) { described_class.new(document, ability) }
let(:document) { SolrDocument.new({}) }
let(:ability) { :FAKE_ABILITY }
describe "#member_presenters" do
let(:document) { SolrDocument.new(member_ids_ssim: member_ids) }
let(:member_ids) { members.map(&:id) }
let(:members) { [] }
it "is empty" do
expect(presenter.member_presenters).to be_none
end
context "with members" do
let(:members) do
[Hyrax::FileSet.new(id: "fs_1", title: ["first title"]),
Hyrax::FileSet.new(id: "fs_2", title: ["second title"])]
end
before { Hyrax.index_adapter.save_all(resources: members) }
it "lists the file sets" do
expect(presenter.member_presenters)
.to contain_exactly(
have_attributes(id: "fs_1", title: ["first title"]),
have_attributes(id: "fs_2", title: ["second title"])
)
end
end
end
end
|
package elasta.composer.flow.builder.impl;
import elasta.composer.Events;
import elasta.composer.MsgEnterEventHandlerP;
import elasta.composer.States;
import elasta.composer.flow.builder.FindAllChildFlowBuilder;
import elasta.core.flow.Flow;
import java.util.Objects;
/**
* Created by sohan on 5/28/2017.
*/
final public class FindAllChildFlowBuilderImpl implements FindAllChildFlowBuilder {
final MsgEnterEventHandlerP startHandler;
final MsgEnterEventHandlerP authorizeHandler;
final MsgEnterEventHandlerP queryAllChildConversionToCriteriaHandler;
final MsgEnterEventHandlerP queryAllChildFindAllHandler;
final MsgEnterEventHandlerP endHandler;
public FindAllChildFlowBuilderImpl(MsgEnterEventHandlerP startHandler, MsgEnterEventHandlerP authorizeHandler, MsgEnterEventHandlerP queryAllChildConversionToCriteriaHandler, MsgEnterEventHandlerP queryAllChildFindAllHandler, MsgEnterEventHandlerP endHandler) {
Objects.requireNonNull(startHandler);
Objects.requireNonNull(authorizeHandler);
Objects.requireNonNull(queryAllChildConversionToCriteriaHandler);
Objects.requireNonNull(queryAllChildFindAllHandler);
Objects.requireNonNull(endHandler);
this.startHandler = startHandler;
this.authorizeHandler = authorizeHandler;
this.queryAllChildConversionToCriteriaHandler = queryAllChildConversionToCriteriaHandler;
this.queryAllChildFindAllHandler = queryAllChildFindAllHandler;
this.endHandler = endHandler;
}
@Override
public Flow build() {
return Flow.builder()
.when(States.start, Flow.on(Events.next, States.authorize))
.when(
States.authorize,
Flow.on(Events.next, States.conversionToCriteria),
Flow.on(Events.authorizationError, States.end)
)
.when(States.conversionToCriteria, Flow.on(Events.next, States.findAll))
.when(States.findAll, Flow.on(Events.next, States.end))
.when(States.end, Flow.end())
.handlersP(States.start, startHandler)
.handlersP(States.authorize, authorizeHandler)
.handlersP(States.conversionToCriteria, queryAllChildConversionToCriteriaHandler)
.handlersP(States.findAll, queryAllChildFindAllHandler)
.handlersP(States.end, endHandler)
.initialState(States.start)
.build();
}
}
|
<filename>core/src/main/java/org/hisp/dhis/android/core/systeminfo/DHISVersion.java<gh_stars>0
/*
* Copyright (c) 2004-2021, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.hisp.dhis.android.core.systeminfo;
public enum DHISVersion {
V2_29,
V2_30,
V2_31,
V2_32,
V2_33,
V2_34,
V2_35,
V2_36;
private static final String V2_29_STR = "2.29";
private static final String V2_30_STR = "2.30";
private static final String V2_31_STR = "2.31";
private static final String V2_32_STR = "2.32";
private static final String V2_33_STR = "2.33";
private static final String V2_34_STR = "2.34";
private static final String V2_35_STR = "2.35";
private static final String V2_36_STR = "2.36";
public static DHISVersion getValue(String versionStr) {
if (versionStr.startsWith(V2_29_STR)) {
return V2_29;
} else if (versionStr.startsWith(V2_30_STR)) {
return V2_30;
} else if (versionStr.startsWith(V2_31_STR)) {
return V2_31;
} else if (versionStr.startsWith(V2_32_STR)) {
return V2_32;
} else if (versionStr.startsWith(V2_33_STR)) {
return V2_33;
} else if (versionStr.startsWith(V2_34_STR)) {
return V2_34;
} else if (versionStr.startsWith(V2_35_STR)) {
return V2_35;
} else if (versionStr.startsWith(V2_36_STR)) {
return V2_36;
} else {
return null;
}
}
public static boolean isAllowedVersion(String versionStr) {
return getValue(versionStr) != null;
}
public static String[] allowedVersionsAsStr() {
return new String[]{V2_29_STR, V2_30_STR, V2_31_STR, V2_32_STR, V2_33_STR, V2_34_STR, V2_35_STR, V2_36_STR};
}
} |
#!/bin/bash
# Create a file named "temp"
touch temp
# Check if the file creation was successful
if [ $? -eq 0 ]; then
# If successful, exit with the exit status code equal to the value of the variable "status"
exit $status
else
# If unsuccessful, handle the error (if needed) and exit with a specific error status code
exit 1
fi |
def square_numbers(input_string):
squared_numbers = []
for num in input_string:
squared_numbers.append(int(num) ** 2)
return squared_numbers
# test
squared_numbers = square_numbers("123456")
print(squared_numbers) # [1, 4, 9, 16, 25, 36] |
/* **** Notes
Convert into an array.
Remarks:
After calling fn. cv_wv, please call fn. rl to unmap the buffer allocated on the RAM.
Based on UTF-16
*/
# define CAR
# include "./../../../incl/config.h"
signed(__cdecl cv_wv(signed short(*cache),signed short(**di),signed short(**si))) {
auto signed r;
if(!di) return(0x00);
if(!si) return(0x00);
if(!(*si)) return(0x00);
r = concatenate_w(0x00,di,*si);
if(!r) return(0x00);
if(cache) {
if(!(concatenate_w(0x00,di,cache))) return(0x00);
}
si++;
return(0x01+(cv_wv(cache,di,si)));
}
|
#!/bin/bash
max=${arr[0]}
for i in "${arr[@]}"
do
if [[ $i -gt $max ]]
then
max=$i
fi
done
echo "The largest number from given array is: $max" |
# Author: <NAME>
# Date: 12/25/17
# Output names from a list by accessing each element
names = ['Adomous','Niles','Ryan','Bob']
print(names[0])
print(names[1])
print(names[2])
print(names[3])
|
/* eslint-disable no-param-reassign */
/* eslint-disable @typescript-eslint/ban-ts-comment */
import {EnhancerResult, EnhancerContext, isFunction, PROPS} from '@truefit/bach';
import {useWatch} from 'react-hook-form';
import {UseWatchProps} from '../types';
type UseWatchPropsGenerator<P> = (props: P) => UseWatchProps;
type WatchPropsParameter<P> = UseWatchProps | UseWatchPropsGenerator<P> | undefined;
type UseWatchResult = unknown | Record<string, unknown>;
// options
interface PropsPattern<P> {
Static: (options: UseWatchProps) => UseWatchProps;
Dynamic: (options: UseWatchPropsGenerator<P>, props: P) => UseWatchProps;
undefined: () => undefined;
}
const matchOptions =
<P>(pattern: PropsPattern<P>) =>
(options: WatchPropsParameter<P>) =>
(props: P): UseWatchProps => {
if (options === undefined) {
return pattern.undefined();
}
if (isFunction(options)) {
return pattern.Dynamic(options as UseWatchPropsGenerator<P>, props);
}
return pattern.Static(options as UseWatchProps);
};
interface AssignmentPattern<P> {
String: (options: UseWatchProps, props: P, watchValues: unknown) => void;
Array: (options: UseWatchProps, props: P, watchValues: Record<string, unknown>) => void;
undefined: (props: P, watchValues: Record<string, unknown>) => void;
}
// assignment
const matchAssignment =
<P>(pattern: AssignmentPattern<P>) =>
(options: UseWatchProps, props: P, watchValues: UseWatchResult): void => {
if (options === undefined || options.name === undefined) {
return pattern.undefined(props, watchValues as Record<string, unknown>);
}
if (Array.isArray(options.name)) {
return pattern.Array(options, props, watchValues as Record<string, unknown>);
}
return pattern.String(options, props, watchValues);
};
// enhancer
const WATCH_VALUES = 'watchValues';
export default <P>(options?: WatchPropsParameter<P>) =>
({generateNewVariable}: EnhancerContext): EnhancerResult => {
// variables
const optionsAlias = generateNewVariable();
const resolveAlias = generateNewVariable();
const assignmentAlias = generateNewVariable();
const watchValuesAlias = generateNewVariable();
// match setup
const resolve = matchOptions({
Static: (o) => o,
Dynamic: (o, p) => o(p),
undefined: () => undefined,
})(options);
const assignment = matchAssignment({
String: (o, p, w) => {
// @ts-ignore
p[o.name as string] = w;
},
Array: (o, p, w) => {
(o.name as string[]).forEach((s) => {
// @ts-ignore
p[s] = w[s];
});
},
undefined: (p, w) => {
// @ts-ignore
p[WATCH_VALUES] = w;
},
});
return {
dependencies: {
useWatch,
[resolveAlias]: resolve,
[assignmentAlias]: assignment,
},
initialize: `
const ${optionsAlias} = ${resolveAlias}(${PROPS});
const ${watchValuesAlias} = useWatch(${optionsAlias});
${assignmentAlias}(${optionsAlias}, ${PROPS}, ${watchValuesAlias});
`,
props: [],
};
};
|
#!/bin/bash
set -euo pipefail
commit=${CODEBUILD_RESOLVED_SOURCE_VERSION:-}
# CODEBUILD_RESOLVED_SOURCE_VERSION is not defined (i.e. local build or CodePipeline build),
# use the HEAD commit hash
if [ -z "${commit}" ]; then
commit="$(git rev-parse --verify HEAD)"
fi
VERSION=$(node -p "require('./package.json').version.replace(/\\+[0-9a-f]+\$/, '')")
cat > lib/version.ts <<HERE
// Generated at $(date -u +"%Y-%m-%dT%H:%M:%SZ") by generate.sh
/** The qualified version number for this JSII compiler. */
export const VERSION = '${VERSION} (build ${commit:0:7})';
HERE
|
# %%
from xgboost import XGBRegressor
from sklearn import metrics
from sklearn.model_selection import train_test_split
import pandas as pd
import matplotlib.pyplot as plt
import numpy as np
import pickle
# %%
training = pd.read_excel('data/train.xlsx', skiprows=1)
testing = pd.read_excel('data/test.xlsx', skiprows=1)
df = training.append(testing)
X = df.filter(items=["DBH", "H", "TX", "d"]).values.reshape(-1, 4)
y = df.filter(items=["V"])
X_train, X_test, y_train, y_test = train_test_split(X, y,
test_size=0.25,
random_state=1504,
stratify=df.Cod_Volume)
model = XGBRegressor(
objective='reg:squarederror',
n_estimators=2000,
max_depth=7,
learning_rate=0.0367,
n_jobs=10,
gamma=0.0000806,
booster="gbtree",
min_child_weight=20
)
model.fit(X_train, y_train)
predicted_vol = model.predict(X_test)
print("Mean Absolute Error:", metrics.r2_score(y_test, predicted_vol))
model_results = pd.DataFrame(
{"actual": y_test["V"].array, "predicted": predicted_vol}
)
model_results["residual_abs"] = model_results["actual"] - \
model_results["predicted"]
# %%
plt.figure(figsize=(8, 7), dpi=300)
plt.scatter(model_results["actual"],
model_results["predicted"],
s=15,
edgecolors='black',
linewidth=0.4,
alpha=0.6)
plt.title("Volumetria Eucalipto - Modelo XGBoost")
plt.xlabel("Volume observado (mยณ)")
plt.ylabel("Volume predito (mยณ)")
z = np.polyfit(model_results["actual"],
model_results["predicted"],
1)
p = np.poly1d(z)
plt.plot(model_results["predicted"],
p(model_results["predicted"]),
"r--",
color="black")
# %%
model.fit(X, y)
# %%
|
#include <iostream>
using namespace std;
int factorial(int n)
{
int res = 1;
for (int i = 2; i <= n; i++)
res *= i;
return res;
}
int main()
{
int num = 5;
cout << "Factorial of "
<< num << " = " << factorial(num);
return 0;
}
# Output: Factorial of 5 = 120 |
<reponame>hispindia/BIHAR-2.7
package org.hisp.dhis.dd.action.dataelement;
/*
* Copyright (c) 2004-2012, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* * Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import org.hisp.dhis.attribute.AttributeService;
import org.hisp.dhis.dataelement.DataElement;
import org.hisp.dhis.dataelement.DataElementCategoryCombo;
import org.hisp.dhis.dataelement.DataElementCategoryService;
import org.hisp.dhis.dataelement.DataElementGroup;
import org.hisp.dhis.dataelement.DataElementService;
import org.hisp.dhis.option.OptionService;
import org.hisp.dhis.option.OptionSet;
import org.hisp.dhis.system.util.AttributeUtils;
import org.hisp.dhis.system.util.ConversionUtils;
import com.opensymphony.xwork2.Action;
/**
* @author <NAME>
*/
public class AddDataElementAction
implements Action
{
// -------------------------------------------------------------------------
// Dependencies
// -------------------------------------------------------------------------
private DataElementService dataElementService;
public void setDataElementService( DataElementService dataElementService )
{
this.dataElementService = dataElementService;
}
private DataElementCategoryService dataElementCategoryService;
public void setDataElementCategoryService( DataElementCategoryService dataElementCategoryService )
{
this.dataElementCategoryService = dataElementCategoryService;
}
private AttributeService attributeService;
public void setAttributeService( AttributeService attributeService )
{
this.attributeService = attributeService;
}
private OptionService optionService;
public void setOptionService( OptionService optionService )
{
this.optionService = optionService;
}
// -------------------------------------------------------------------------
// Input
// -------------------------------------------------------------------------
private String name;
public void setName( String name )
{
this.name = name;
}
private String alternativeName;
public void setAlternativeName( String alternativeName )
{
this.alternativeName = alternativeName;
}
private String shortName;
public void setShortName( String shortName )
{
this.shortName = shortName;
}
private String code;
public void setCode( String code )
{
this.code = code;
}
private String description;
public void setDescription( String description )
{
this.description = description;
}
private String formName;
public void setFormName( String formName )
{
this.formName = formName;
}
private String domainType;
public void setDomainType( String domainType )
{
this.domainType = domainType;
}
private String numberType;
public void setNumberType( String numberType )
{
this.numberType = numberType;
}
private String valueType;
public void setValueType( String valueType )
{
this.valueType = valueType;
}
private String aggregationOperator;
public void setAggregationOperator( String aggregationOperator )
{
this.aggregationOperator = aggregationOperator;
}
private String url;
public void setUrl( String url )
{
this.url = url;
}
private Collection<String> aggregationLevels;
public void setAggregationLevels( Collection<String> aggregationLevels )
{
this.aggregationLevels = aggregationLevels;
}
private Integer selectedCategoryComboId;
public void setSelectedCategoryComboId( Integer selectedCategoryComboId )
{
this.selectedCategoryComboId = selectedCategoryComboId;
}
private boolean zeroIsSignificant;
public void setZeroIsSignificant( Boolean zeroIsSignificant )
{
this.zeroIsSignificant = zeroIsSignificant;
}
private List<String> jsonAttributeValues;
public void setJsonAttributeValues( List<String> jsonAttributeValues )
{
this.jsonAttributeValues = jsonAttributeValues;
}
private Collection<String> selectedGroups = new HashSet<String>();
public void setSelectedGroups( Collection<String> selectedGroups )
{
this.selectedGroups = selectedGroups;
}
private Integer selectedOptionSetId;
public void setSelectedOptionSetId( Integer selectedOptionSetId )
{
this.selectedOptionSetId = selectedOptionSetId;
}
// -------------------------------------------------------------------------
// Action implementation
// -------------------------------------------------------------------------
public String execute()
{
// ---------------------------------------------------------------------
// Prepare values
// ---------------------------------------------------------------------
if ( alternativeName != null && alternativeName.trim().length() == 0 )
{
alternativeName = null;
}
if ( code != null && code.trim().length() == 0 )
{
code = null;
}
if ( description != null && description.trim().length() == 0 )
{
description = null;
}
// ---------------------------------------------------------------------
// Create data element
// ---------------------------------------------------------------------
DataElement dataElement = new DataElement();
DataElementCategoryCombo categoryCombo = dataElementCategoryService
.getDataElementCategoryCombo( selectedCategoryComboId );
OptionSet optionSet = optionService.getOptionSet( selectedOptionSetId );
dataElement.setName( name );
dataElement.setAlternativeName( alternativeName );
dataElement.setShortName( shortName );
dataElement.setCode( code );
dataElement.setDescription( description );
dataElement.setFormName( formName );
dataElement.setActive( true );
dataElement.setDomainType( domainType );
dataElement.setType( valueType );
dataElement.setNumberType( numberType );
dataElement.setAggregationOperator( aggregationOperator );
dataElement.setUrl( url );
dataElement.setZeroIsSignificant( zeroIsSignificant );
dataElement.setCategoryCombo( categoryCombo );
dataElement.setAggregationLevels( new ArrayList<Integer>( ConversionUtils
.getIntegerCollection( aggregationLevels ) ) );
dataElement.setOptionSet( optionSet );
if ( jsonAttributeValues != null )
{
AttributeUtils.updateAttributeValuesFromJson( dataElement.getAttributeValues(), jsonAttributeValues,
attributeService );
}
dataElementService.addDataElement( dataElement );
for ( String id : selectedGroups )
{
DataElementGroup group = dataElementService.getDataElementGroup( Integer.parseInt( id ) );
if ( group != null )
{
group.addDataElement( dataElement );
dataElementService.updateDataElementGroup( group );
}
}
dataElementService.updateDataElement( dataElement );
return SUCCESS;
}
}
|
#!/bin/bash
#SBATCH -J Act_maxout-3_1
#SBATCH --mail-user=eger@ukp.informatik.tu-darmstadt.de
#SBATCH --mail-type=FAIL
#SBATCH -e /work/scratch/se55gyhe/log/output.err.%j
#SBATCH -o /work/scratch/se55gyhe/log/output.out.%j
#SBATCH -n 1 # Number of cores
#SBATCH --mem-per-cpu=2000
#SBATCH -t 23:59:00 # Hours, minutes and seconds, or '#SBATCH -t 10' -only mins
#module load intel python/3.5
python3 /home/se55gyhe/Act_func/progs/meta.py maxout-3 1 Adadelta 1 0.44997208308473535 398 0.4641751714881186 he_uniform PE-infersent
|
_base_ = [
'../_base_/models/faster_rcnn_r50_fpn.py',
'../_base_/datasets/coco500_detection_augm.py',
'../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py'
]
# optimizer
optimizer = dict(type='SGD', lr=0.04, momentum=0.9, weight_decay=0.0001)
# learning policy
lr_config = dict(step=[75, 95])
total_epochs = 100
|
<reponame>m-nakagawa/sample
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jena.sparql.junit;
import java.util.ArrayList ;
import java.util.List ;
import org.apache.jena.atlas.iterator.Iter ;
import org.apache.jena.query.Dataset ;
import org.apache.jena.query.DatasetFactory ;
import org.apache.jena.query.Syntax ;
import org.apache.jena.rdf.model.Model ;
import org.apache.jena.rdf.model.Property ;
import org.apache.jena.rdf.model.Resource ;
import org.apache.jena.rdf.model.Statement ;
import org.apache.jena.riot.checker.CheckerLiterals ;
import org.apache.jena.sparql.core.DatasetGraph ;
import org.apache.jena.sparql.core.DatasetGraphFactory ;
import org.apache.jena.sparql.vocabulary.TestManifestUpdate_11 ;
import org.apache.jena.update.UpdateAction ;
import org.apache.jena.update.UpdateFactory ;
import org.apache.jena.update.UpdateRequest ;
import org.apache.jena.util.FileManager ;
import org.apache.jena.util.iterator.ClosableIterator ;
import org.apache.jena.util.junit.TestUtils ;
import org.apache.jena.vocabulary.RDFS ;
public class UpdateTest extends EarlTestCase
{
private Resource action ;
private Resource result ;
private String updateFile ;
private Dataset input ;
private Dataset output ;
public static UpdateTest create(String testName, EarlReport earl, Resource entry, Resource action, Resource result)
{
if ( ! action.hasProperty(TestManifestUpdate_11.request) )
{
System.err.println("No request in action: "+testName) ;
return null ;
}
return new UpdateTest(testName, earl, entry, action, result) ;
}
private UpdateTest(String testName, EarlReport earl, Resource entry, Resource action, Resource result)
{
super(TestUtils.safeName(testName), entry.getURI(), earl) ;
this.action = action ;
this.result = result ;
/*
* mf:action [ ut:query <insert-data-spo1.rq> ;
ut:data <empty.ttl>
] ;
mf:result [ ut:result ut:success ;
ut:data <spo.ttl>
] .
*/
updateFile = action.getProperty(TestManifestUpdate_11.request).getResource().getURI() ;
}
private boolean oldWarningFlag ;
@Override
public void setUpTest()
{
super.setUpTest() ;
// Turn parser warnings off for the test data.
oldWarningFlag = CheckerLiterals.WarnOnBadLiterals ;
//CheckerLiterals.WarnOnBadLiterals = false ;
input = getDataset(action) ;
output = getDataset(result) ;
}
@Override
public void tearDownTest()
{
// if ( resetNeeded )
// ARQ.setFalse(ARQ.strictGraph) ;
CheckerLiterals.WarnOnBadLiterals = oldWarningFlag ;
input = null ;
output = null ;
super.tearDownTest() ;
}
@Override
protected void runTestForReal()
{
try {
UpdateRequest request = UpdateFactory.read(updateFile, Syntax.syntaxSPARQL_11) ;
UpdateAction.execute(request, input) ;
boolean b = datasetSame(input, output, false) ;
if ( ! b )
{
System.out.println("---- "+getName()) ;
System.out.println("---- Got: ") ;
System.out.println(input.asDatasetGraph()) ;
System.out.println("---- Expected") ;
System.out.println(output.asDatasetGraph()) ;
datasetSame(input, output, true) ;
System.out.println("----------------------------------------") ;
}
assertTrue("Datasets are different", b) ;
} catch (RuntimeException ex)
{
ex.printStackTrace(System.err) ;
throw ex ;
}
}
private boolean datasetSame(Dataset ds1, Dataset ds2, boolean verbose)
{
List<String> names1 = Iter.toList(ds1.listNames()) ;
List<String> names2 = Iter.toList(ds2.listNames()) ;
if ( ! names1.equals(names2) )
{
if ( verbose )
{
System.out.println("Different named graphs") ;
System.out.println(" "+names1) ;
System.out.println(" "+names2) ;
}
return false ;
}
if ( !ds1.getDefaultModel().isIsomorphicWith(ds2.getDefaultModel()) )
{
if ( verbose )
System.out.println("Default graphs differ") ;
return false ;
}
for ( String gn : names1 )
{
Model m1 = ds1.getNamedModel(gn) ;
Model m2 = ds2.getNamedModel(gn) ;
if ( ! m1.isIsomorphicWith(m2) )
{
if ( verbose )
System.out.println("Different on named graph "+gn) ;
return false ;
}
}
return true ;
}
static Dataset getDataset(Resource r)
{
//DataSource ds = DatasetFactory.create() ;
DatasetGraph dsg = DatasetGraphFactory.createMem() ;
// Growing. dataset.
Dataset ds = DatasetFactory.wrap(dsg) ;
List<String> dftData = getAll(r, TestManifestUpdate_11.data) ;
for ( String x : dftData )
FileManager.get().readModel(ds.getDefaultModel(), x) ;
ClosableIterator<Statement> cIter = r.listProperties(TestManifestUpdate_11.graphData) ;
for ( ; cIter.hasNext() ; )
{
// An graphData entry can be a URI or a [ ut ... ; rdfs:label "foo" ] ;
Statement stmt = cIter.next() ;
Resource gn = stmt.getResource() ;
if ( gn.isAnon() )
{
if ( ! gn.hasProperty(TestManifestUpdate_11.graph) )
System.err.println("No data for graphData") ;
String fn = gn.getProperty(TestManifestUpdate_11.graph).getResource().getURI() ;
String name = gn.getProperty(RDFS.label).getString() ;
Model m = FileManager.get().loadModel(fn) ;
ds.addNamedModel(name, m) ;
}
else
{
String x = gn.getURI() ;
Model m = FileManager.get().loadModel(x) ;
ds.addNamedModel(x, m) ;
}
}
cIter.close() ;
return ds ;
}
static List<String> getAll(Resource r, Property p)
{
List<String> l = new ArrayList<>() ;
ClosableIterator<Statement> cIter = r.listProperties(p) ;
for ( ; cIter.hasNext() ; )
{
Statement stmt = cIter.next() ;
String df = stmt.getObject().asResource().getURI() ;
l.add(df) ;
}
cIter.close() ;
return l ;
}
}
|
<gh_stars>0
import { IConfig, IResult, IVersion, ITipRule } from "./interface";
import { tmpdir } from 'os';
import { join } from 'path';
import { existsSync, readFileSync, writeFileSync, mkdirSync} from 'fs';
import { exec, formatVersion, matchVersion } from "./utils";
export default async (moduleName: string, currentVersion: string, options?: IConfig): Promise<IResult> => {
const { level, timeout, ignoreInformalVersion, registry } = {
level: ['major', 'minor', 'patch'],
timeout: 24 * 60 * 60 * 1000,
ignoreInformalVersion: true,
registry: '',
...options,
}
const curVersion = formatVersion(currentVersion);
const cacheDir = join(tmpdir(), `npmModInfoCache`);
if (!existsSync(cacheDir)) {
mkdirSync(cacheDir, 0o777)
}
const cacheFile = join(cacheDir, `${(moduleName + '_' + currentVersion).replace(/[^\w]/g, '_')}_cache.json`);
let cache: { time: number, value: IResult };
if (existsSync(cacheFile)) {
cache = JSON.parse(readFileSync(cacheFile, 'utf-8'));
}
if (cache?.time && (Date.now() - cache.time < timeout)) {
return cache.value;
}
let npmCmd = `npm`;
if (registry) {
npmCmd = `npm --registry=${registry}`;
} else if (process.env.LANG === 'zh_CN.UTF-8') {
npmCmd = 'npm --registry=https://registry.npmmirror.com';
}
let result: IResult = {
update: false,
tips: [],
version: '',
}
try {
const data = await exec({
cmd: `${npmCmd} view ${moduleName} --json`,
baseDir: process.env.HOME,
timeout: 2000
});
const { versions, 'module-info-tips': tipRules = [] } = JSON.parse(data as string);
let filterVersions: IVersion[] = versions.map(formatVersion).filter((version: IVersion) => {
if (ignoreInformalVersion && version.tag) {
return;
}
// patch: major and minor is same and minor is diff
if (level.includes('patch')) {
if (version.major === curVersion.major && version.minor === curVersion.minor && version.pacth > curVersion.pacth) {
return true;
}
}
// minor: major is same and minor is diff
if (level.includes('minor')) {
if (version.major === curVersion.major && version.minor > curVersion.minor) {
return true;
}
}
// major: only check major diff
if (level.includes('major')) {
if (version.major > curVersion.major) {
return true;
}
}
});
filterVersions = filterVersions.sort((aVer: IVersion, bVer: IVersion) => {
return bVer.score - aVer.score;
});
let update = false;
let newVersion;
if (filterVersions.length) {
update = true;
newVersion = filterVersions[0].version;
}
const tips = tipRules.filter((rule: ITipRule) => {
if (!rule?.tip) {
return false;
}
const ignore = [].concat(rule.ignore || []).find(rule => matchVersion(curVersion, rule));
if (ignore) {
return false;
}
const match = [].concat(rule.match || []).find(rule => matchVersion(curVersion, rule));
if (match) {
return true;
}
return false;
});
result = {
update,
version: newVersion,
tips: tips.map((rule: ITipRule) => rule.tip),
}
} catch {
//
}
writeFileSync(cacheFile, JSON.stringify({ time: Date.now(), value: result }))
return result;
} |
package io.shadowstack.candidates.registrars;
import feign.Feign;
import feign.Headers;
import feign.Logger;
import feign.RequestLine;
import feign.jackson.JacksonDecoder;
import feign.jackson.JacksonEncoder;
import feign.okhttp.OkHttpClient;
import feign.slf4j.Slf4jLogger;
/**
* Registrar which is a REST API client for calling the registration endpoint on an oracle service.
*/
public interface RestCandidateRegistrar extends CandidateRegistrar {
@RequestLine("POST /candidate/register")
@Headers("Content-Type: application/json")
RegistrationResponse register(RegistrationRequest request);
/**
* Create a new client for the given oracle host, conforming to the CandidateRegistrar interface, which
* will forward candidate registration requests to the oracle's "register" endpoint.
* @param host The host name of the oracle, including the protocol and port, e.g. "http://localhost:8080".
* @return An instance of CandidateRegistrar that forwards to the oracle's "register" endpoint.
*/
static CandidateRegistrar createClient(String host) {
return Feign.builder()
.client(new OkHttpClient())
.encoder(new JacksonEncoder())
.decoder(new JacksonDecoder())
.logger(new Slf4jLogger(RestCandidateRegistrar.class))
.logLevel(Logger.Level.BASIC)
.target(RestCandidateRegistrar.class, host);
}
}
|
package com.bjdvt.platform.mapper;
import com.bjdvt.platform.model.Company;
import com.bjdvt.platform.model.CompanyExample;
import java.util.List;
import org.apache.ibatis.annotations.Param;
public interface CompanyMapper {
int countByExample(CompanyExample example);
int deleteByExample(CompanyExample example);
int deleteByPrimaryKey(String id);
int insert(Company record);
int insertSelective(Company record);
List<Company> selectByExample(CompanyExample example);
Company selectByPrimaryKey(String id);
int updateByExampleSelective(@Param("record") Company record, @Param("example") CompanyExample example);
int updateByExample(@Param("record") Company record, @Param("example") CompanyExample example);
int updateByPrimaryKeySelective(Company record);
int updateByPrimaryKey(Company record);
} |
<reponame>natalijamitic/OSKernel
#include "kersem.h"
#include "semaphor.h"
#include "SCHEDULE.H"
#include "system.h"
#include "pcb.h"
#include <iostream.h>
#include "list.h"
#include "semlist.h"
#include "idle.h"
SemList* KernelSem::semaphoresList = new SemList();
int KernelSem::semaphoreId = 0;
KernelSem::KernelSem(int init):value(init){
blockedThreadsList = new List();
timeBlockedThreadsList = new List();
KernelSem::semaphoresList->add(this);
}
KernelSem::~KernelSem(){
System::lock();
KernelSem::semaphoresList->remove(this);
for(List::Element* cur = blockedThreadsList->getFirst(); cur != 0; cur = cur->next){
if (cur->data->getState() != DONE){
cur->data->setState(READY);
Scheduler::put(cur->data);
}
}
for(List::Element* cur2 = timeBlockedThreadsList->getFirst(); cur2 != 0; cur2 = cur2->next){
if (cur2->data->getState() != DONE){
cur2->data->setState(READY);
Scheduler::put(cur2->data);
}
}
delete blockedThreadsList;
delete timeBlockedThreadsList;
value = 0;
System::unlock();
}
int KernelSem::val() const{
return value;
}
void KernelSem::deblock(int timeDeblock){
System::lock();
PCB* deblockedThread;
if (timeDeblock == 1){
deblockedThread = timeBlockedThreadsList->removeGetFirst();
deblockedThread->setTimeUnblocked(1);
}
else{
PCB* normal = blockedThreadsList->getFirstPCB();
PCB* time = timeBlockedThreadsList->getByLowestSemIDPCB();
if (normal == 0 && time == 0)
deblockedThread = 0;
else if (normal == 0 && time != 0)
deblockedThread = timeBlockedThreadsList->removeGetLowestSemIDAndUpdateTimeSem(time);
else if (normal != 0 && time == 0)
deblockedThread = blockedThreadsList->removeGetFirst();
else if (normal != 0 && time != 0)
deblockedThread = (normal->getMySemaphoreId() < time->getMySemaphoreId() ? blockedThreadsList->removeGetFirst() : timeBlockedThreadsList->removeGetLowestSemIDAndUpdateTimeSem(time));
}
if (deblockedThread != 0){
if (deblockedThread->getState() != DONE) {
deblockedThread->setState(READY);
Scheduler::put(deblockedThread);
}
}
System::unlock();
}
void KernelSem::block(Time maxTimeToWait){
System::lock();
PCB::running->setState(BLOCKED);
PCB::running->setMySemaphoreId(KernelSem::semaphoreId++); //da bi se znao redosled kojim se ubacuje u listu cekanja (da se zna sta prvo da se odblokira)
if (maxTimeToWait == 0)
blockedThreadsList->add(PCB::running);
else
timeBlockedThreadsList->addByTime(PCB::running, maxTimeToWait);
System::unlock();
}
int KernelSem::wait(Time maxTimeToWait){
System::lock();
int ret = 1;
if (--value < 0){
block(maxTimeToWait);
System::unlock();
dispatch();
System::lock();
if (PCB::running->getTimeUnblocked() == 1){
ret = 0;
PCB::running->setTimeUnblocked(0);//reset flaga
}
}
System::unlock();
return ret;
}
int KernelSem::signal(int n, int timeDeblock){
System::lock();
if (timeDeblock == 1){
value++;
deblock(timeDeblock);
System::unlock();
return n;
}
else if (n > 0) {
int deblocked = 0;
if (value < 0){
deblocked = ( (n < -1 * value) ? n : -1 * value );
for (int i = 0; i < deblocked; i++)
deblock();
}
value += n;
System::unlock();
return deblocked;
}
else if (n == 0){
if (value++ < 0)
deblock();
}
System::unlock();
return n;
}
List* KernelSem::getBlockedThreadsList(){ return blockedThreadsList; }
List* KernelSem::getTimeBlockedThreadsList(){ return timeBlockedThreadsList; }
SemList* KernelSem::getSemaphoresList(){ return KernelSem::semaphoresList; }
void KernelSem::deleteSemaphoresList(){
if (KernelSem::semaphoresList != 0)
delete KernelSem::semaphoresList;
KernelSem::semaphoresList = 0;
}
void KernelSem::updateTimeOnSemaphores(){
if (semaphoresList != 0){
SemList::updateTime(semaphoresList);
}
}
|
/*
## filtering
*/
define([
'angular',
'app',
'lodash'
],
function (angular, app, _) {
'use strict';
var module = angular.module('kibana.panels.filtering', []);
app.useModule(module);
module.controller('filtering', function($scope, filterSrv, $rootScope, dashboard) {
$scope.panelMeta = {
status : "Stable",
description : "A controllable list of all filters currently applied to the dashboard. You "+
"almost certainly want one of these on your dashboard somewhere."
};
// Set and populate defaults
var _d = {
};
_.defaults($scope.panel,_d);
$scope.$on('filter', function() {
$scope.row.notice = true;
});
$scope.init = function() {
$scope.filterSrv = filterSrv;
};
$scope.remove = function(id) {
filterSrv.remove(id);
};
// This function should be moved to the service
$scope.toggle = function(id) {
filterSrv.list[id].active = !filterSrv.list[id].active;
dashboard.refresh();
};
$scope.add = function(query) {
query = query || '*';
filterSrv.set({
editing : true,
type : 'querystring',
query : query,
mandate : 'must'
},undefined,true);
};
$scope.refresh = function() {
dashboard.refresh();
};
$scope.render = function() {
$rootScope.$broadcast('render');
};
$scope.show_key = function(key) {
return !_.contains(['type','id','alias','mandate','active','editing'],key);
};
$scope.getFilterClass = function(filter) {
if(filter.active !== true) {
return 'muted';
} else {
switch (filter.mandate)
{
case 'must':
return 'text-success';
case 'mustNot':
return 'text-error';
case 'either':
return 'text-warning';
default:
return 'text-info';
}
}
};
$scope.isEditable = function(filter) {
var uneditable = ['time'];
if(_.contains(uneditable,filter.type)) {
return false;
} else {
return true;
}
};
});
}); |
ROOT="$( cd "$(dirname "$0")" ; pwd -P )"
RES_DIR="$ROOT/res"
rm -rf "$RES_DIR"
mkdir "$RES_DIR"
echo -e "MediaLibraryServer.webroot = www_client/www\nMediaLibrarServer.port = 8080\nDatabase.type = sqlite\nDatabase.path = res/MediaLibrarServer.sqlite3" > "$RES_DIR/MediaLibraryServer.properties" |
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package helloworld;
import static kiss.API.abs;
import static kiss.API.pause;
/**
*
* @author grace
*/
public class TimezoneClock extends Clock {
double timezoneShift = 0.0;
@Override
double getHours() {
return 0;
}
void testGetTime(){
Clock clock = new TimezoneClock();
double hours = clock.getHours();
double minutes = clock.getMinutes();
double seconds = clock.getSeconds();
}
void testGetCorrectTime(){
Clock clock = new TimezoneClock();
clock.setHours(6.50);
assert clock.getHours() == 6.5;
assert clock.getMinutes() == 30.0;
assert clock.getSeconds() == 0.0;
}
void testGetFlowingTime(){
Clock clock= new TimezoneClock();
clock.setHours(1.00);
clock.start();
pause(1.0);
double now = clock.getHours();
double shouldBe = 1.0 + 1.0/3600.0;
assert abs (now-shouldBe) < 0.1 / 3600.0;
}
@Override
public boolean equals(Object object){
if (object instanceof TimezoneClock){
return timezoneShift == ((TimezoneClock)object).timezoneShift
&& super.equals(object);
}else{
return false;
}
}
}
|
package handlers
import (
"bytes"
"encoding/json"
"fmt"
"image/png"
"io/ioutil"
"net/http"
mgc "github.com/mh-cbon/mdl-go-components"
"github.com/mh-cbon/mdl-go-components/components"
"github.com/vincent-petithory/dataurl"
)
func Cropper(w http.ResponseWriter, r *http.Request) {
data := &viewData{
Title: "Cropper",
Components: make([]mgc.ViewComponentRenderer, 0),
}
var line *components.Slice
var t1 *components.Cropper
var form *components.Form
// -
line = components.NewSlice()
t1 = components.NewCropper()
t1.SetCurrentImg("/static/avatar-1.png")
t1.SetName("result")
t1.SetLabel("base64 result")
form = components.NewForm()
form.AddComponent(t1)
form.SetMethod("POST")
form.SetAction("/cropper_post")
form.AddHidden("result_type").SetValue("base64")
form.AddSubmit("vv").SetLabel("Submit")
line.AddComponent(form)
data.Components = append(data.Components, line)
// -
line = components.NewSlice()
t1 = components.NewCropper()
t1.SetCurrentImg("/static/avatar-1.png")
t1.SetName("result")
t1.SetResultMode("geometry")
t1.SetLabel("geometry result")
form = components.NewForm()
form.AddComponent(t1)
form.SetMethod("POST")
form.SetAction("/cropper_post")
form.AddHidden("result_type").SetValue("geometry")
form.AddSubmit("vv").SetLabel("Submit")
line.AddComponent(form)
data.Components = append(data.Components, line)
//- add an example to show how to manage the response on the server side
// -
renderComponents(w, data)
}
func CropperPost(w http.ResponseWriter, r *http.Request) {
// we will just print out the interesting data
// and demo the interesting code
// then return to /cropper
r.ParseForm()
result := r.Form.Get("result")
result_type := r.Form.Get("result_type")
fmt.Println(result_type)
if result_type == "base64" {
fmt.Println(result[0:50])
dataURL, err := dataurl.DecodeString(result)
if err != nil {
panic(err)
}
fmt.Println(dataURL.ContentType())
var b bytes.Buffer
b.Write(dataURL.Data)
config, err := png.DecodeConfig(&b)
if err != nil {
panic(err)
}
fmt.Println(config)
b.Truncate(0)
b.Write(dataURL.Data)
img, err := png.Decode(&b)
if err != nil {
panic(err)
}
// write the file
png.Encode(ioutil.Discard, img)
} else if result_type == "geometry" {
fmt.Println(result)
geo := &components.GeometryResult{}
if err := json.Unmarshal([]byte(result), geo); err != nil {
panic(err)
}
fmt.Println(geo)
// rest is up to you !
}
http.Redirect(w, r, "/cropper", http.StatusSeeOther)
}
|
'use strict';
const hello = require('hello-common')
exports.main = async (event) => {
let res = await hello.getVersion();
console.log(res)
return res
};
|
# Create an array of numbers
nums = [8, 5, 11, 6, 9, 15]
# Sort the array in descending order
nums.sort(reverse=True)
# Get the second highest number
second_highest = nums[1]
# Output the result
print("The second highest number is", second_highest) |
public void removeDuplicates(int[] arr) {
Set<Integer> set = new HashSet<>();
int j = 0;
for(int i = 0 ; i < arr.length ; i++) {
if(!set.contains(arr[i])) {
arr[j++] = arr[i];
set.add(arr[i]);
}
}
int[] output = new int[set.size()];
int k = 0;
for(Integer val : set) {
output[k++] = val;
}
for(int i = 0 ; i < output.length ; i++ ) {
arr[i] = output[i];
}
} |
/* $Id$ */
/***************************************************************************
* (C) Copyright 2003-2010 - Stendhal *
***************************************************************************
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
package games.stendhal.client.scripting;
import games.stendhal.client.ClientSingletonRepository;
import games.stendhal.client.gui.chatlog.HeaderLessEventLine;
import games.stendhal.common.NotificationType;
import marauroa.common.game.RPAction;
/**
* Parses the input in the chat box and invokes the appropriate action.
*/
public abstract class ChatLineParser {
/**
* parses a chat/command line and processes the result.
*
* @param input
* string to handle
*
* @return <code>true</code> if command was valid enough to process,
* <code>false</code> otherwise.
*/
public static boolean parseAndHandle(final String input) {
// get line
final String text = input.trim();
if (text.length() == 0) {
return false;
}
if (text.charAt(0) == '/') {
final SlashActionCommand command = SlashActionParser.parse(text.substring(1));
final String[] params = command.getParams();
if (command.hasError()) {
ClientSingletonRepository.getUserInterface().addEventLine(
new HeaderLessEventLine(command.getErrorString(),
NotificationType.ERROR));
return false;
}
/*
* Execute
*/
if (command.getAction() != null) {
return command.getAction().execute(params, command.getRemainder());
} else {
/*
* Server extension
*/
final RPAction extension = new RPAction();
extension.put("type", command.getName());
if ((params.length > 0) && (params[0] != null)) {
extension.put("target", params[0]);
extension.put("args", command.getRemainder());
}
ClientSingletonRepository.getClientFramework().send(extension);
return true;
}
} else {
// Chat command. The most frequent one.
final RPAction chat = new RPAction("chat");
chat.put("type", "chat");
chat.put("text", text);
ClientSingletonRepository.getClientFramework().send(chat);
return true;
}
}
}
|
#!/bin/bash
protocol=enip
extension=pcapng
test="# @TEST-EXEC: btest-diff output\n# @TEST-EXEC: cat output | awk '{print \$1}' | sort | uniq | wc -l > covered\n# @TEST-EXEC: cat \${DIST}/src/analyzer/protocol/$protocol/events.bif | grep \"^event "$protocol"_\" | wc -l > total\n# @TEST-EXEC: echo \`cat covered\` of \`cat total\` events triggered by trace > coverage\n# @TEST-EXEC: btest-diff coverage\n# @TEST-EXEC: btest-diff $protocol.log\n#"
for i in $( ls ); do
if [[ $i = *."$extension" ]]; then
file=`echo $i| cut -d'.' -f 1`
test0="#\n# @TEST-EXEC: bro -r \$TRACES/$protocol/$file.pcapng %DIR/events.bro > output\n$test"
echo -e $test0 > ../../scripts/base/protocols/enip/$file.bro
fi
done
|
def discrete_logarithm(n, base):
power = 1
result = 0
while base ** power <= n:
power += 1
return power - 1 |
/**
* @author <NAME> / https://github.com/Leeft
*/
import SCMAP from '../scmap';
import StarSystem from './star-system';
import { Color } from './three';
const DEFAULTS = {
id: undefined,
name: 'Unclaimed',
isRealFaction: false,
color: new Color( 0xFFFFFF ),
parentFaction: null,
};
class Faction {
constructor ( data ) {
Object.assign( this, DEFAULTS, data );
// Internals
this._claimed = {
systems: {}
};
}
get planeColor () {
return this.color.clone().offsetHSL( 0, 0.5, 0 ).multiplyScalar( 0.20 );
}
get lineColor () {
return this.color.clone().offsetHSL( 0, 0.05, -0.05 );
}
claim ( system ) {
if ( ! system instanceof StarSystem ) {
throw new Error( `A faction can only claim ownership over a system` );
}
this._claimed.systems[ system.uuid ] = true;
return this;
}
claimed ( system ) {
if ( ! system instanceof StarSystem ) {
throw new Error( `A faction can only test ownership over a system` );
}
return this._claimed.systems[ system.uuid ];
}
isHostileTo ( comparedTo ) {
if ( !( comparedTo instanceof Faction ) ) {
throw new Error( `Can only compare to other factions` );
}
// FIXME: more data in database, more logic here
// rather than lots of hardcoding
if ( comparedTo.name === 'Vanduul' ) {
return ( this.name !== 'Vanduul' );
} else {
return ( this.name === 'Vanduul' );
}
}
}
export default Faction;
|
#!/usr/bin/env bash
# validate inputs
if [ -z "${INPUT_CFN_DIRECTORY}" ] ; then
echo "Missing 'cfn_directory' parameter."
echo "Set this to the directory where your CloudFormation Templates are located."
exit 1
fi
# find templates with 'Resources'
POSSIBLE_TEMPLATES=$(grep --with-filename --recursive 'Resources' ${INPUT_CFN_DIRECTORY}/* | cut -d':' -f1 | sort -u)
for f in $POSSIBLE_TEMPLATES; do
if grep -q ".yml" <<< "${f}"; then
echo "Checking for ruleset matching template file: ${f}"
rules=${f%.*}.ruleset
echo " Found: $rules"
cg_cmd="cfn-guard validate -d ${f} -r $rules"
echo "Running command:"
echo "$ $cg_cmd"
$cg_cmd
if [ $? -ne 0 ]; then
echo "CFN GUARD FAIL!"
exit 1
fi
fi
done
echo "CloudFormation Guard Scan Complete"
|
<gh_stars>1-10
package be.kwakeroni.scratch.tv;
import be.kwakeroni.parameters.backend.es.api.ElasticSearchGroup;
import be.kwakeroni.parameters.backend.inmemory.api.EntryData;
import be.kwakeroni.parameters.backend.inmemory.support.DefaultEntryData;
import be.kwakeroni.parameters.basic.backend.es.ElasticSearchQueryBasedRangedGroup;
import be.kwakeroni.parameters.basic.backend.es.ElasticSearchSimpleGroup;
import be.kwakeroni.parameters.basic.backend.inmemory.InmemoryRangedGroup;
import be.kwakeroni.parameters.basic.backend.inmemory.InmemorySimpleGroup;
import be.kwakeroni.parameters.basic.client.model.Ranged;
import be.kwakeroni.parameters.basic.client.model.Simple;
import be.kwakeroni.parameters.basic.client.query.RangedQuery;
import be.kwakeroni.parameters.basic.client.query.ValueQuery;
import be.kwakeroni.parameters.basic.client.support.Entries;
import be.kwakeroni.parameters.basic.type.Range;
import be.kwakeroni.parameters.basic.type.Ranges;
import be.kwakeroni.parameters.client.api.model.Entry;
import be.kwakeroni.parameters.client.api.query.Query;
import be.kwakeroni.parameters.definition.api.ParameterGroupDefinition;
import be.kwakeroni.scratch.tv.definition.AbstractRangedTV;
import be.kwakeroni.test.factory.TestMap;
import java.util.HashMap;
import java.util.Map;
/**
* (C) 2017 <NAME>
*/
public interface AbstractRangedTVGroup extends AbstractRangedTV {
public static Entry entry(Slot from, Slot to, String program) {
return Entries.entryOf(SLOT, Range.of(from, to), PROGRAM, program);
}
// For test purposes
ParameterGroupDefinition<Ranged<Slot, Simple>> getDefinition();
public static EntryData entryData(Slot from, Slot to, String program) {
return DefaultEntryData.of(TestMap.of(
SLOT.getName(), Ranges.toRangeString(from, to, Slot::toString),
PROGRAM.getName(), program
));
}
public static Map<String, ?> entryData(Slot from, Slot to, String program, boolean addRangeLimits) {
if (addRangeLimits) {
Map<String, Object> map = new HashMap<>(4);
map.put(SLOT.getName(), Ranges.toRangeString(from, to, Slot::toString));
map.put(ElasticSearchQueryBasedRangedGroup.getFromParameter(SLOT.getName()), from.toInt());
map.put(ElasticSearchQueryBasedRangedGroup.getToParameter(SLOT.getName()), to.toInt());
map.put(PROGRAM.getName(), program);
return map;
} else {
return entryData(from, to, program).asMap();
}
}
// For test purposes
public static Query<Ranged<Slot, Simple>, String> programQuery(Slot slot) {
return new RangedQuery<>(slot, Slot.type,
new ValueQuery<>(MappedTVGroup.PROGRAM));
}
public static InmemoryRangedGroup inmemoryTestGroup(String name, ParameterGroupDefinition definition) {
return new InmemoryRangedGroup(SLOT.getName(), Ranges.stringRangeTypeOf(Slot.type), definition, new InmemorySimpleGroup(name, definition, SLOT.getName(), PROGRAM.getName()));
}
public static ElasticSearchGroup elasticSearchSubGroup(String name, ParameterGroupDefinition definition) {
return new ElasticSearchSimpleGroup(name, definition, SLOT.getName(), PROGRAM.getName());
}
}
|
<gh_stars>1-10
import _ from 'lodash';
import {createSelector} from 'reselect';
import loadingSelector from './loadingSelector';
import sessionsSelector from './sessionsSelector';
import selectedSelector from './selectedSelector';
import {DATA_STATE} from 'constants';
// selector
export default createSelector(
loadingSelector,
sessionsSelector,
selectedSelector,
(loading, sessions, {sessionId}) => {
if (loading.sessionDetail) {
const session = _.find(sessions, s => s.id === sessionId);
const hasDescription = session?.description?.length > 0;
return hasDescription
? DATA_STATE.LOADING_DATA_OLD_CACHED
: DATA_STATE.LOADING_DATA_NONE_CACHED;
}
return DATA_STATE.READY;
},
);
|
// #THIRD_KIND_PLAYFAB_NOTIFICATION_BUS: dbowen (2017/08/11)
#pragma once
#include <AzCore/EBus/EBus.h>
#include <PlayFabServerSdk/PlayFabMatchmakerDataModels.h>
namespace PlayFabServerSdk
{
// Request based notification bus, use this bus when you want to listen for a callback from a specific instance of an API request.
class PlayFabServer_MatchmakerNotifications
: public AZ::EBusTraits
{
public:
// The EBus has multiple addresses. Addresses are not ordered. The address corresponds to the ID of the request.
static const AZ::EBusAddressPolicy AddressPolicy = AZ::EBusAddressPolicy::ById;
typedef int BusIdType;
// ------------ General notifications (not related to a specific API call)
virtual void OnError(const PlayFabError& error) {};
// ------------ Generated API call wrappers
virtual void OnAuthUser(const MatchmakerModels::AuthUserResponse& result) {};
virtual void OnPlayerJoined(const MatchmakerModels::PlayerJoinedResponse& result) {};
virtual void OnPlayerLeft(const MatchmakerModels::PlayerLeftResponse& result) {};
virtual void OnStartGame(const MatchmakerModels::StartGameResponse& result) {};
virtual void OnUserInfo(const MatchmakerModels::UserInfoResponse& result) {};
};
using PlayFabServer_MatchmakerNotificationBus = AZ::EBus<PlayFabServer_MatchmakerNotifications>;
// Global notification bus - use this bus when you want to listen for all responses to a particular type of request.
class PlayFabServer_MatchmakerGlobalNotifications
: public AZ::EBusTraits
{
public:
// The EBus has a single address, all notifications go to this address.
static const AZ::EBusAddressPolicy AddressPolicy = AZ::EBusAddressPolicy::Single;
// ------------ General notifications (not related to a specific API call)
virtual void OnError(const PlayFabError& error, int requestId) {};
// ------------ Generated API call wrappers
virtual void OnAuthUser(const MatchmakerModels::AuthUserResponse& result, int requestId) {};
virtual void OnPlayerJoined(const MatchmakerModels::PlayerJoinedResponse& result, int requestId) {};
virtual void OnPlayerLeft(const MatchmakerModels::PlayerLeftResponse& result, int requestId) {};
virtual void OnStartGame(const MatchmakerModels::StartGameResponse& result, int requestId) {};
virtual void OnUserInfo(const MatchmakerModels::UserInfoResponse& result, int requestId) {};
};
using PlayFabServer_MatchmakerGlobalNotificationBus = AZ::EBus<PlayFabServer_MatchmakerGlobalNotifications>;
} // namespace PlayFabServerSdk
|
function printUnicode(str) {
for(var i=0; i<str.length; i++) {
console.log(str[i] + ": " + str.charCodeAt(i));
}
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.