text stringlengths 1 1.05M |
|---|
<filename>src/buffer.c<gh_stars>1-10
/*
* buffer.c -- growable data buffers.
*
* by <NAME>
*
* Copyright 2003-2012 See accompanying license
*
*/
#include "cat/buffer.h"
#include <stdlib.h>
#include <string.h>
#if CAT_USE_INLINE
#define INLINE inline
#else
#define INLINE
#endif
static INLINE void dyb_sanity(struct dynbuf *b)
{
abort_unless(b && b->mm != NULL &&
(b->off <= b->size) &&
(b->size - b->off >= b->len) &&
(b->data != NULL || b->size == 0));
}
void dyb_init(struct dynbuf *b, struct memmgr *mm)
{
b->size = 0;
b->data = NULL;
b->off = 0;
b->len = 0;
if ( mm == NULL )
b->mm = &stdmm;
else
b->mm = mm;
}
int dyb_resv(struct dynbuf *b, ulong sz)
{
byte_t *p;
dyb_sanity(b);
if ( sz <= b->size )
return 0;
/* always at least double size if possible to */
/* make the max # of allocations the ceiling of lg2 of SIZE_MAX */
if ( (b->size < ((ulong)-1) / 2) && (sz < b->size * 2) )
sz = b->size * 2;
p = mem_resize(b->mm, b->data, sz);
if ( p == NULL )
return -1;
b->data = p;
b->size = sz;
return 0;
}
void dyb_clear(struct dynbuf *b)
{
dyb_sanity(b);
mem_free(b->mm, b->data);
dyb_init(b, b->mm);
}
void *dyb_release(struct dynbuf *b)
{
void *p;
dyb_sanity(b);
p = b->data;
b->data = NULL;
b->size = 0;
b->len = 0;
b->off = 0;
return p;
}
void dyb_empty(struct dynbuf *b)
{
dyb_sanity(b);
b->len = 0;
b->off = 0;
}
int dyb_cat(struct dynbuf *b, void *p, ulong len)
{
dyb_sanity(b);
if ( b->size - b->off - b->len < len )
return -1;
memmove(b->data + b->off + b->len, p, len);
b->len += len;
return 0;
}
int dyb_cat_a(struct dynbuf *b, void *p, ulong len)
{
dyb_sanity(b);
if ( b->size - b->off - b->len < len ) {
if ( ((ulong)-1) - b->off - b->len < len )
return -1;
if ( dyb_resv(b, b->off + b->len + len) < 0 )
return -1;
}
memmove(b->data + b->off + b->len, p, len);
b->len += len;
return 0;
}
int dyb_set(struct dynbuf *b, ulong off, void *p, ulong len)
{
dyb_sanity(b);
if ( b->size < off || b->size - off < len )
return -1;
b->off = off;
b->len = len;
memmove(b->data + off, p, len);
return 0;
}
int dyb_set_a(struct dynbuf *b, ulong off, void *p, ulong len)
{
ulong tlen;
dyb_sanity(b);
if ( ((ulong)-1) - off > len )
return -1;
tlen = off + len;
if ( b->size < tlen ) {
if ( dyb_resv(b, tlen) < 0 )
return -1;
}
b->off = off;
b->len = len;
memmove(b->data + off, p, len);
return 0;
}
int dyb_copy(struct dynbuf *db, struct dynbuf *sb)
{
dyb_sanity(db);
dyb_sanity(sb);
if ( dyb_resv(db, db->size) < 0 )
return -1;
db->off = sb->off;
db->len = sb->len;
memmove(db->data + db->off, sb->data + sb->off, sb->len);
return 0;
}
|
<reponame>tony-aq/optic
import { BodyShapeDiff, ParsedDiff, DiffLocation } from './parse-diff';
import {
BodyPreview,
CurrentSpecContext,
IChangeType,
IDiffDescription,
} from './Interfaces';
import { getExpectationsForShapeTrail } from './shape-diff-dsl-rust';
import { code, ICopy, plain } from '<src>/pages/diffs/components/ICopyRender';
import { IJsonObjectKey } from '@useoptic/cli-shared/build/diffs/json-trail';
import { ICoreShapeKinds, IHttpInteraction } from '@useoptic/optic-domain';
import { toJsonExample } from '@useoptic/shape-hash';
import { namer, nameForCoreShapeKind } from './quick-namer';
const getJsonBodyToPreview = (
location: DiffLocation,
interaction: IHttpInteraction
): BodyPreview => {
const body =
(location.isQueryParameter() && interaction.request.query) ||
(location.isRequest() && interaction.request.body.value) ||
(location.isResponse() && interaction.response.body.value);
if (body) {
const { shapeHashV1Base64, asText, asJsonString } = body;
if (asJsonString && !location.isQueryParameter()) {
return {
asJson: JSON.parse(asJsonString),
asText: null,
noBody: false,
empty: false,
};
}
if (shapeHashV1Base64 && !location.isQueryParameter()) {
return {
asJson: toJsonExample(shapeHashV1Base64),
asText: null,
noBody: false,
empty: false,
};
}
if (asText) {
return { asJson: null, asText: asText, noBody: false, empty: false };
}
return { asJson: null, asText: null, noBody: false, empty: true };
} else {
return { asJson: null, asText: null, noBody: true, empty: false };
}
};
export function descriptionForNewRegions(
diff: ParsedDiff,
location: DiffLocation
): IDiffDescription {
let title: ICopy[] = [];
if (location.isQueryParameter()) {
title = [plain('undocumented query parameters observed')];
}
const requestDescriptor = location.getRequestDescriptor();
if (requestDescriptor) {
title = [
plain('undocumented'),
code(requestDescriptor.contentType),
plain('request observed'),
];
}
const responseDescriptor = location.getResponseDescriptor();
if (responseDescriptor) {
title = [
plain('undocumented'),
code(responseDescriptor.statusCode.toString()),
plain('response with'),
code(responseDescriptor.contentType || 'No Body'),
plain('observed'),
];
}
return {
title,
changeType: IChangeType.Added,
location,
diffHash: diff.diffHash,
assertion: [plain('Undocumented Body Observed')],
getJsonBodyToPreview: getJsonBodyToPreview.bind(null, location),
};
}
export async function descriptionForShapeDiff(
asShapeDiff: BodyShapeDiff,
query: any,
currentSpecContext: CurrentSpecContext
): Promise<IDiffDescription> {
const location = asShapeDiff.location;
const jsonTrailPath = asShapeDiff.jsonTrail.path;
const jsonTrailLast = jsonTrailPath[jsonTrailPath.length - 1]!;
const expected = await getExpectationsForShapeTrail(
asShapeDiff.shapeTrail,
asShapeDiff.jsonTrail,
query,
currentSpecContext
);
//root handler
if (jsonTrailPath.length === 0) {
return {
title: [plain('root shape did not match'), code(expected.shapeName())],
location,
changeType: IChangeType.Changed,
assertion: [plain('expected'), code(expected.shapeName())],
diffHash: asShapeDiff.diffHash(),
getJsonBodyToPreview: getJsonBodyToPreview.bind(null, location),
};
}
//known field handler
if (expected.isField()) {
if (asShapeDiff.isUnmatched) {
let shapeName = location.isQueryParameter()
? namer(Array.from(expected.expectedShapes()), (kind) => {
if (kind === ICoreShapeKinds.ListKind) {
return 'multiple';
} else {
return nameForCoreShapeKind(kind);
}
})
: expected.shapeName();
return {
title: [
plain('values of '),
code(expected.fieldKey()),
plain('did not match'),
code(shapeName),
],
location,
changeType: IChangeType.Changed,
diffHash: asShapeDiff.diffHash(),
assertion: [plain('expected'), code(shapeName)],
getJsonBodyToPreview: getJsonBodyToPreview.bind(null, location),
};
}
}
//undocumented field handler
const lastIsField = (jsonTrailLast as IJsonObjectKey).JsonObjectKey;
if (asShapeDiff.isUnspecified && lastIsField) {
const undocumentedLocation = location.isQueryParameter()
? 'undocumented query parameter'
: 'undocumented field';
return {
title: [
plain(undocumentedLocation),
code(lastIsField.key),
plain('observed'),
],
location,
changeType: IChangeType.Added,
diffHash: asShapeDiff.diffHash(),
assertion: [code(undocumentedLocation)],
getJsonBodyToPreview: getJsonBodyToPreview.bind(null, location),
};
}
//list item handler
if (expected.isListItemShape()) {
return {
title: [plain('list items did not match'), code(expected.shapeName())],
location,
changeType: IChangeType.Changed,
diffHash: asShapeDiff.diffHash(),
assertion: [plain('expected'), code(expected.shapeName())],
getJsonBodyToPreview: getJsonBodyToPreview.bind(null, location),
};
}
//we shouldn't ever get there
return {
title: [plain('unknown diff kind')],
location,
changeType: IChangeType.Changed,
assertion: [],
unknownDiffBehavior: true,
diffHash: asShapeDiff.diffHash(),
getJsonBodyToPreview: getJsonBodyToPreview.bind(null, location),
};
}
|
#!/bin/bash
#
# Copyright (c) 2019-2020 P3TERX <https://p3terx.com>
#
# This is free software, licensed under the MIT License.
# See /LICENSE for more information.
#
# https://github.com/P3TERX/Actions-OpenWrt
# File name: diy-part1.sh
# Description: OpenWrt DIY script part 1 (Before Update feeds)
#
# Uncomment a feed source
#sed -i 's/^#\(.*helloworld\)/\1/' feeds.conf.default
# Add a feed source
#echo 'src-git helloworld https://github.com/fw876/helloworld' >>feeds.conf.default
#echo 'src-git passwall https://github.com/xiaorouji/openwrt-passwall' >>feeds.conf.default
echo 'src-git helloworld https://github.com/fw876/helloworld' >>feeds.conf.default
echo 'src-git passwall https://github.com/xiaorouji/openwrt-passwall' >>feeds.conf.default
echo 'src-git kenzo https://github.com/kenzok8/openwrt-packages' >>feeds.conf.default
echo 'src-git small https://github.com/kenzok8/small' >>feeds.conf.default
#echo 'src-git liuran001_packages https://github.com/liuran001/openwrt-packages' >>feeds.conf.default
|
<filename>migrations/2_deploy_contracts.js
var slotMachine = artifacts.require("./slotMachine.sol");
module.exports = function(deployer) {
deployer.deploy(slotMachine);
};
|
#!/bin/bash
# Setup Build environment for compiling matlab scripts at Broad
# These settings work with versions 7.2, 7.3
# Set in global_vars.sh
# MATLAB_ROOT=/broad/tools/apps/matlab7.2
# MATLAB_ROOT=/broad/tools/apps/matlab73
# MATLAB_ROOT=/broad/tools/apps/matlab76
source $(dirname $0)/global_vars.sh
echo "Using Mortar: $MORTARPATH"
# MATLAB path and runtime version
MATLAB_ROOT=${MATLAB_ROOT:?"MATLAB_ROOT not set"}
MCR_VERSION=${MCR_VERSION:?"MCR_VERSION not set"}
#update Mortar
if [ $UPDATEMORTAR -eq 1 ]; then
echo "Updating Mortar..."
(cd $MORTARPATH; git pull)
fi
# add mortar to includes
MCC_INCLUDE=$(find $MORTARPATH -type d|egrep -v "\.git|mortar/doc|$MORTARPATH/ext|$MORTARPATH/tools|tests|templates|resources|\+mortar|node_modules|\+work|scratch|$MORTARPATH/js"| sed 's/^/-I /')
# add yaml parser
MCC_INCLUDE="$MCC_INCLUDE -I $MORTARPATH/ext/yamlmatlab"
MCC_INCLUDE="$MCC_INCLUDE -I $MORTARPATH/ext/jsonlab"
MCC_INCLUDE="$MCC_INCLUDE -I $MORTARPATH/tests"
# Add resources to CTF archive
MCC_ADD="-a $MORTARPATH/resources \
-a $MORTARPATH/ext/mongo-matlab-driver \
-a $MORTARPATH/ext/bin \
-a $MORTARPATH/ext/bh_tsne \
-a $MORTARPATH/ext/smi2fp \
-a $MORTARPATH/ext/jars \
-a $MORTARPATH/templates \
-a $MORTARPATH/tests/assets \
-a $MORTARPATH/tests"
# Matlab76 requires gcc >= 4.0.0 and <= 4.2.0
# Prepend /util/gcc-4.1.1/bin to PATH and
# /util/gcc-4.1.1/lib64:/util/gcc-4.1.1/lib to LD_LIBRARY_PATH
# configure the "dotkit" environment maintenance system
#eval `/broad/tools/dotkit/init`
#use gcc-4.3.0
export MATLAB_ROOT=$MATLAB_ROOT
export MATLAB_ARCH=$MATLAB_ARCH
export PATH=$GCC_PATH/bin:$PATH
# Java runtime for the selected Matlab
#JREFULL=$(find $MATLAB_ROOT/sys/java/jre/$MATLAB_ARCH/ -name 'jre*' -type d)
#JRE=$(basename $JREFULL)
# export LD_LIBRARY_PATH=$GCC_PATH/lib64:$GCC_PATH/lib:\
# $LD_LIBRARY_PATH:\
# $MATLAB_ROOT/sys/os/$MATLAB_ARCH:\
# $MATLAB_ROOT/bin/$MATLAB_ARCH:\
# $MATLAB_ROOT/sys/java/jre/$MATLAB_ARCH/$JRE/lib/i386/native_threads:\
# $MATLAB_ROOT/sys/java/jre$MATLAB_ARCH/$JRE/lib/i386/client:\
# $MATLAB_ROOT/sys/java/jre/$MATLAB_ARCH/$JRE/lib/i386
# export XAPPLRESDIR=$MATLAB_ROOT/X11/app-defaults
LD_LIBRARY_PATH=.:${MATLAB_ROOT}/runtime/$MATLAB_ARCH ;
LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:${MATLAB_ROOT}/bin/$MATLAB_ARCH ;
LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:${MATLAB_ROOT}/sys/os/$MATLAB_ARCH;
MCRJRE=$(find $MATLAB_ROOT/sys/java/jre/$MATLAB_ARCH/ -name 'jre' -type d)
LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:${MCRJRE}/native_threads ;
LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:${MCRJRE}/server ;
LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:${MCRJRE}/client ;
LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:${MCRJRE} ;
XAPPLRESDIR=${MATLAB_ROOT}/X11/app-defaults ;
export LD_LIBRARY_PATH;
export XAPPLRESDIR;
|
module.exports = {
plugins: [
require('windicss/plugin/typography'),
require('windicss/plugin/forms'),
require('windicss/plugin/aspect-ratio'),
require('windicss/plugin/line-clamp'),
require('windicss/plugin/filters'),
require('windicss/plugin/scroll-snap'),
],
};
|
'use strict';
angular.module('guityApp')
.controller('SearchCtrl', function ($scope, $location, guityAPI) {
//TODO: Should be better both style and code.
//TODO: If click elsewhere but the result page, then close it.
$scope.showSearch = false;
$scope.$watch('searchField', function(newVal) {
if (newVal) {
guityAPI.getSearch(newVal).then(function (data) {
$scope.results = data.tracks.items;
console.log($scope.results)
var artists = [],
albums = [];
angular.forEach($scope.results, function(result) {
artists.push(result.artists[0])
albums.push(result.album)
}, artists, albums);
$scope.artists = artists;
$scope.albums = albums;
});
$scope.showSearch = true;
$scope.hideSearch = function () {
$scope.showSearch = false;
}
} else {
$scope.artist = [];
$scope.showSearch = false;
}
});
});
|
<gh_stars>1-10
// Copyright 2020 Chaos Mesh Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// See the License for the specific language governing permissions and
// limitations under the License.
package v1alpha1
import (
"fmt"
"reflect"
"k8s.io/apimachinery/pkg/runtime"
"k8s.io/apimachinery/pkg/util/validation/field"
logf "sigs.k8s.io/controller-runtime/pkg/log"
"sigs.k8s.io/controller-runtime/pkg/webhook"
)
// log is for logging in this package.
var podchaoslog = logf.Log.WithName("podchaos-resource")
// +kubebuilder:webhook:path=/mutate-chaos-mesh-org-v1alpha1-podchaos,mutating=true,failurePolicy=fail,groups=chaos-mesh.org,resources=podchaos,verbs=create;update,versions=v1alpha1,name=mpodchaos.kb.io
var _ webhook.Defaulter = &PodChaos{}
// Default implements webhook.Defaulter so a webhook will be registered for the type
func (in *PodChaos) Default() {
podchaoslog.Info("default", "name", in.Name)
in.Spec.Selector.DefaultNamespace(in.GetNamespace())
}
// +kubebuilder:webhook:verbs=create;update,path=/validate-chaos-mesh-org-v1alpha1-podchaos,mutating=false,failurePolicy=fail,groups=chaos-mesh.org,resources=podchaos,versions=v1alpha1,name=vpodchaos.kb.io
var _ webhook.Validator = &PodChaos{}
// ValidateCreate implements webhook.Validator so a webhook will be registered for the type
func (in *PodChaos) ValidateCreate() error {
podchaoslog.Info("validate create", "name", in.Name)
return in.Validate()
}
// ValidateUpdate implements webhook.Validator so a webhook will be registered for the type
func (in *PodChaos) ValidateUpdate(old runtime.Object) error {
podchaoslog.Info("validate update", "name", in.Name)
if !reflect.DeepEqual(in.Spec, old.(*PodChaos).Spec) {
return ErrCanNotUpdateChaos
}
return in.Validate()
}
// ValidateDelete implements webhook.Validator so a webhook will be registered for the type
func (in *PodChaos) ValidateDelete() error {
podchaoslog.Info("validate delete", "name", in.Name)
// Nothing to do?
return nil
}
// Validate validates chaos object
func (in *PodChaos) Validate() error {
specField := field.NewPath("spec")
allErrs := in.Spec.validateContainerName(specField.Child("containerName"))
if len(allErrs) > 0 {
return fmt.Errorf(allErrs.ToAggregate().Error())
}
return nil
}
// validateContainerName validates the ContainerName
func (in *PodChaosSpec) validateContainerName(containerField *field.Path) field.ErrorList {
allErrs := field.ErrorList{}
if in.Action == ContainerKillAction {
if len(in.ContainerSelector.ContainerNames) == 0 {
err := fmt.Errorf("the name of container should not be empty on %s action", in.Action)
allErrs = append(allErrs, field.Invalid(containerField, in.ContainerNames, err.Error()))
}
}
return allErrs
}
|
import { loadStripe } from "@stripe/stripe-js";
export async function getStripeJs() {
const stripJs = await loadStripe(process.env.NEXT_PUBLIC_STRIPE_PUBLIC_KEY);
return stripJs;
}
|
#!/bin/bash
#set -x
CMD_SCREEN_SHOT="screenshot"
TOP=$(pwd)
TARGET_SAVE_DIR="/data/screenShot"
HOST_SAVE_DIR=$TOP/screenShot
MAX_NUM=100
#TMP=$(mktmp)
((DEL_FILE_NR=$MAX_NUM/2))
DEL_FILE_LIST="find $HOST_SAVE_DIR|sed -n "1,"$MAX_NUM"p""
num=0
sum=0
adb shell "mkdir $TARGET_SAVE_DIR" >/dev/null 2>&1
while true
do
((num+=1))
PNG=screenShot_$(date "+%y%m%d%H%M%S").png
adb shell "cd $TARGET_SAVE_DIR;$CMD_SCREEN_SHOT $PNG" >/dev/null 2>&1
adb pull $TARGET_SAVE_DIR $HOST_SAVE_DIR >/dev/null 2>&1
adb shell "cd $TARGET_SAVE_DIR;rm *" >/dev/null 2>&1
if [ $num -gt $MAX_NUM ];then
((num=0))
rm $(eval $DEL_FILE_LIST)
fi
((sum+=1))
echo "$PNG total screensot $sum"
done
#set +x
|
<gh_stars>0
package dev.johanness.processor.test._internal;
import dev.johanness.processor.test.mock.element.TypeElementMock;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.lang.model.element.Element;
import javax.lang.model.element.ElementVisitor;
import javax.lang.model.element.ExecutableElement;
import javax.lang.model.element.ModuleElement;
import javax.lang.model.element.PackageElement;
import javax.lang.model.element.TypeElement;
import javax.lang.model.element.TypeParameterElement;
import javax.lang.model.element.VariableElement;
import javax.lang.model.type.DeclaredType;
import javax.lang.model.type.TypeKind;
import javax.lang.model.type.TypeMirror;
import javax.lang.model.util.AbstractElementVisitor9;
import java.util.Iterator;
import java.util.List;
import static dev.johanness.processor.ElementCast.asExecutableElement;
import static dev.johanness.processor.ElementCast.asModuleElement;
import static dev.johanness.processor.ElementCast.asPackageElement;
import static dev.johanness.processor.ElementCast.asTypeElement;
import static dev.johanness.processor.ElementCast.asTypeParameterElement;
import static dev.johanness.processor.ElementCast.asVariableElement;
import static dev.johanness.processor.TypeCast.toArrayType;
import static dev.johanness.processor.TypeCast.toDeclaredType;
import static dev.johanness.processor.TypeCast.toTypeVariable;
public final class ComparisonUtil {
// TODO: Test!
private ComparisonUtil() {} // This class cannot be instantiated
public static boolean matches(@NotNull DeclaredType type, @NotNull Class<?> clazz) {
return matches(type.asElement(), new TypeElementMock(clazz));
}
public static boolean matches(@NotNull DeclaredType type1, @NotNull DeclaredType type2) {
return matches(type1.asElement(), type2.asElement());
}
public static boolean matches(@Nullable Element element1, @Nullable Element element2) {
return element1 == element2 ||
element1 != null && element2 != null && element1.accept(COMPARATOR, element2);
}
private static final @NotNull ElementVisitor<Boolean, Element> COMPARATOR = new AbstractElementVisitor9<>() {
private @NotNull Boolean compare(@Nullable Element element1, @Nullable Element element2) {
return element1 == element2 ||
element1 != null && element2 != null && visit(element1, element2);
}
@Override
public Boolean visitModule(ModuleElement e, Element otherElement) {
ModuleElement other = asModuleElement(otherElement);
return other != null &&
e.getQualifiedName().contentEquals(other.getQualifiedName()) &&
enclosingElementMatch(e, other);
}
@Override
public Boolean visitPackage(PackageElement e, Element otherElement) {
PackageElement other = asPackageElement(otherElement);
return other != null &&
e.getQualifiedName().contentEquals(other.getQualifiedName()) &&
enclosingElementMatch(e, other);
}
@Override
public Boolean visitType(TypeElement e, Element otherElement) {
TypeElement other = asTypeElement(otherElement);
if (e.getSimpleName().contentEquals("")) {
// Anonymous class
return e.equals(other);
}
else {
return other != null &&
e.getSimpleName().contentEquals(other.getSimpleName()) &&
enclosingElementMatch(e, other);
}
}
@Override
public Boolean visitVariable(VariableElement e, Element otherElement) {
VariableElement other = asVariableElement(otherElement);
return other != null &&
e.getKind() == other.getKind() &&
e.getSimpleName().contentEquals(other.getSimpleName()) &&
enclosingElementMatch(e, other);
}
@Override
public Boolean visitExecutable(ExecutableElement e, Element otherElement) {
ExecutableElement other = asExecutableElement(otherElement);
return other != null &&
e.getSimpleName().contentEquals(other.getSimpleName()) &&
e.getKind() == other.getKind() &&
typesMatch(e.getReceiverType(), other.getReceiverType()) &&
typesMatch(e.getReturnType(), other.getReturnType()) &&
typesMatch(e.getParameters(), other.getParameters()) &&
enclosingElementMatch(e, other);
}
@Override
public Boolean visitTypeParameter(TypeParameterElement e, Element otherElement) {
TypeParameterElement other = asTypeParameterElement(otherElement);
return other != null &&
e.getKind() == other.getKind() &&
e.getSimpleName().contentEquals(other.getSimpleName()) &&
enclosingElementMatch(e, other);
}
private @NotNull Boolean typesMatch(@NotNull TypeMirror type1, @NotNull TypeMirror type2) {
TypeKind kind = type1.getKind();
if (type1 == type2) {
return Boolean.TRUE;
}
else if (kind != type2.getKind()) {
return Boolean.FALSE;
}
switch (kind) {
case BOOLEAN:
case BYTE:
case SHORT:
case INT:
case LONG:
case CHAR:
case FLOAT:
case DOUBLE:
case VOID:
case NONE:
return Boolean.TRUE;
case ARRAY:
return typesMatch(toArrayType(type1).getComponentType(), toArrayType(type2).getComponentType());
case DECLARED:
return compare(toDeclaredType(type1).asElement(), toDeclaredType(type2).asElement());
case ERROR:
return type1.toString().equals(type2.toString());
case TYPEVAR:
return compare(toTypeVariable(type1).asElement(), toTypeVariable(type2).asElement());
default:
throw new IllegalArgumentException(kind + ": " + type1);
}
}
private @NotNull Boolean typesMatch(@NotNull List<? extends VariableElement> params1, @NotNull List<? extends VariableElement> params2) {
Iterator<? extends VariableElement> iterator1 = params1.iterator();
Iterator<? extends VariableElement> iterator2 = params2.iterator();
while (iterator1.hasNext() && iterator2.hasNext()) {
if (!typesMatch(iterator1.next().asType(), iterator2.next().asType())) {
return false;
}
}
return !iterator1.hasNext() && !iterator2.hasNext();
}
private @NotNull Boolean enclosingElementMatch(@NotNull Element element1, @NotNull Element element2) {
return compare(element1.getEnclosingElement(), element2.getEnclosingElement());
}
};
}
|
apidoc -i src/ -o apidoc/
|
""" Visualization code for point clouds and 3D bounding boxes with mayavi.
Modified by <NAME>
Date: September 2017
Ref: https://github.com/hengck23/didi-udacity-2017/blob/master/baseline-04/kitti_data/draw.py
"""
import numpy as np
import mayavi.mlab as mlab
try:
raw_input # Python 2
except NameError:
raw_input = input # Python 3
def normalize(vec):
"""normalizes an Nd list of vectors or a single vector
to unit length.
The vector is **not** changed in place.
For zero-length vectors, the result will be np.nan.
:param numpy.array vec: an Nd array with the final dimension
being vectors
::
numpy.array([ x, y, z ])
Or an NxM array::
numpy.array([
[x1, y1, z1],
[x2, y2, z2]
]).
:rtype: A numpy.array the normalized value
"""
# calculate the length
# this is a duplicate of length(vec) because we
# always want an array, even a 0-d array.
return (vec.T / np.sqrt(np.sum(vec ** 2, axis=-1))).T
def rotation_matrix_numpy0(axis, theta, dtype=None):
# dtype = dtype or axis.dtype
# make sure the vector is normalized
if not np.isclose(np.linalg.norm(axis), 1.0):
axis = normalize(axis)
thetaOver2 = theta * 0.5
sinThetaOver2 = np.sin(thetaOver2)
return np.array(
[
sinThetaOver2 * axis[0],
sinThetaOver2 * axis[1],
sinThetaOver2 * axis[2],
np.cos(thetaOver2),
]
)
def rotation_matrix_numpy(axis, theta):
mat = np.eye(3, 3)
axis = axis / np.sqrt(np.dot(axis, axis))
a = np.cos(theta / 2.0)
b, c, d = -axis * np.sin(theta / 2.0)
return np.array(
[
[a * a + b * b - c * c - d * d, 2 * (b * c - a * d), 2 * (b * d + a * c)],
[2 * (b * c + a * d), a * a + c * c - b * b - d * d, 2 * (c * d - a * b)],
[2 * (b * d - a * c), 2 * (c * d + a * b), a * a + d * d - b * b - c * c],
]
)
def rotx(t):
""" 3D Rotation about the x-axis. """
c = np.cos(t)
s = np.sin(t)
return np.array([[1, 0, 0], [0, c, -s], [0, s, c]])
def roty(t):
""" Rotation about the y-axis. """
c = np.cos(t)
s = np.sin(t)
return np.array([[c, 0, s], [0, 1, 0], [-s, 0, c]])
def rotz(t):
""" Rotation about the z-axis. """
c = np.cos(t)
s = np.sin(t)
return np.array([[c, -s, 0], [s, c, 0], [0, 0, 1]])
def draw_lidar_simple(pc, color=None):
""" Draw lidar points. simplest set up. """
fig = mlab.figure(
figure=None, bgcolor=(0, 0, 0), fgcolor=None, engine=None, size=(1600, 1000)
)
if color is None:
color = pc[:, 2]
# draw points
mlab.points3d(
pc[:, 0],
pc[:, 1],
pc[:, 2],
color,
color=None,
mode="point",
colormap="gnuplot",
scale_factor=1,
figure=fig,
)
# draw origin
mlab.points3d(0, 0, 0, color=(1, 1, 1), mode="sphere", scale_factor=0.2)
# draw axis
axes = np.array(
[[2.0, 0.0, 0.0, 0.0], [0.0, 2.0, 0.0, 0.0], [0.0, 0.0, 2.0, 0.0]],
dtype=np.float64,
)
mlab.plot3d(
[0, axes[0, 0]],
[0, axes[0, 1]],
[0, axes[0, 2]],
color=(1, 0, 0),
line_width=2.0,
tube_radius=None,
figure=fig,
)
mlab.plot3d(
[0, axes[1, 0]],
[0, axes[1, 1]],
[0, axes[1, 2]],
color=(0, 1, 0),
tube_radius=None,
figure=fig,
)
mlab.plot3d(
[0, axes[2, 0]],
[0, axes[2, 1]],
[0, axes[2, 2]],
color=(0, 0, 1),
tube_radius=None,
figure=fig,
)
mlab.view(
azimuth=180,
elevation=70,
focalpoint=[12.0909996, -1.04700089, -2.03249991],
distance=62.0,
figure=fig,
)
return fig
# pts_mode='sphere'
def draw_lidar(
pc,
color=None,
fig=None,
bgcolor=(0, 0, 0),
pts_scale=0.3,
pts_mode="sphere",
pts_color=None,
color_by_intensity=False,
pc_label=False,
):
""" Draw lidar points
Args:
pc: numpy array (n,3) of XYZ
color: numpy array (n) of intensity or whatever
fig: mayavi figure handler, if None create new one otherwise will use it
Returns:
fig: created or used fig
"""
# ind = (pc[:,2]< -1.65)
# pc = pc[ind]
pts_mode = "point"
print("====================", pc.shape)
if fig is None:
fig = mlab.figure(
figure=None, bgcolor=bgcolor, fgcolor=None, engine=None, size=(1600, 1000)
)
if color is None:
color = pc[:, 2]
if pc_label:
color = pc[:, 4]
if color_by_intensity:
color = pc[:, 2]
mlab.points3d(
pc[:, 0],
pc[:, 1],
pc[:, 2],
color,
color=pts_color,
mode=pts_mode,
colormap="gnuplot",
scale_factor=pts_scale,
figure=fig,
)
# draw origin
mlab.points3d(0, 0, 0, color=(1, 1, 1), mode="sphere", scale_factor=0.2)
# draw axis
axes = np.array(
[[2.0, 0.0, 0.0, 0.0], [0.0, 2.0, 0.0, 0.0], [0.0, 0.0, 2.0, 0.0]],
dtype=np.float64,
)
mlab.plot3d(
[0, axes[0, 0]],
[0, axes[0, 1]],
[0, axes[0, 2]],
color=(1, 0, 0),
tube_radius=None,
figure=fig,
)
mlab.plot3d(
[0, axes[1, 0]],
[0, axes[1, 1]],
[0, axes[1, 2]],
color=(0, 1, 0),
tube_radius=None,
figure=fig,
)
mlab.plot3d(
[0, axes[2, 0]],
[0, axes[2, 1]],
[0, axes[2, 2]],
color=(0, 0, 1),
tube_radius=None,
figure=fig,
)
# draw fov (todo: update to real sensor spec.)
fov = np.array(
[[20.0, 20.0, 0.0, 0.0], [20.0, -20.0, 0.0, 0.0]], dtype=np.float64 # 45 degree
)
mlab.plot3d(
[0, fov[0, 0]],
[0, fov[0, 1]],
[0, fov[0, 2]],
color=(1, 1, 1),
tube_radius=None,
line_width=1,
figure=fig,
)
mlab.plot3d(
[0, fov[1, 0]],
[0, fov[1, 1]],
[0, fov[1, 2]],
color=(1, 1, 1),
tube_radius=None,
line_width=1,
figure=fig,
)
# draw square region
TOP_Y_MIN = -20
TOP_Y_MAX = 20
TOP_X_MIN = 0
TOP_X_MAX = 40
TOP_Z_MIN = -2.0
TOP_Z_MAX = 0.4
x1 = TOP_X_MIN
x2 = TOP_X_MAX
y1 = TOP_Y_MIN
y2 = TOP_Y_MAX
mlab.plot3d(
[x1, x1],
[y1, y2],
[0, 0],
color=(0.5, 0.5, 0.5),
tube_radius=0.1,
line_width=1,
figure=fig,
)
mlab.plot3d(
[x2, x2],
[y1, y2],
[0, 0],
color=(0.5, 0.5, 0.5),
tube_radius=0.1,
line_width=1,
figure=fig,
)
mlab.plot3d(
[x1, x2],
[y1, y1],
[0, 0],
color=(0.5, 0.5, 0.5),
tube_radius=0.1,
line_width=1,
figure=fig,
)
mlab.plot3d(
[x1, x2],
[y2, y2],
[0, 0],
color=(0.5, 0.5, 0.5),
tube_radius=0.1,
line_width=1,
figure=fig,
)
# mlab.orientation_axes()
mlab.view(
azimuth=180,
elevation=70,
focalpoint=[12.0909996, -1.04700089, -2.03249991],
distance=62.0,
figure=fig,
)
return fig
def draw_gt_boxes3d(
gt_boxes3d,
fig,
color=(1, 1, 1),
line_width=1,
draw_text=True,
text_scale=(1, 1, 1),
color_list=None,
label=""
):
""" Draw 3D bounding boxes
Args:
gt_boxes3d: numpy array (n,8,3) for XYZs of the box corners
fig: mayavi figure handler
color: RGB value tuple in range (0,1), box line color
line_width: box line width
draw_text: boolean, if true, write box indices beside boxes
text_scale: three number tuple
color_list: a list of RGB tuple, if not None, overwrite color.
Returns:
fig: updated fig
"""
num = len(gt_boxes3d)
for n in range(num):
b = gt_boxes3d[n]
if color_list is not None:
color = color_list[n]
if draw_text:
mlab.text3d(
b[4, 0],
b[4, 1],
b[4, 2],
label,
scale=text_scale,
color=color,
figure=fig,
)
for k in range(0, 4):
# http://docs.enthought.com/mayavi/mayavi/auto/mlab_helper_functions.html
i, j = k, (k + 1) % 4
mlab.plot3d(
[b[i, 0], b[j, 0]],
[b[i, 1], b[j, 1]],
[b[i, 2], b[j, 2]],
color=color,
tube_radius=None,
line_width=line_width,
figure=fig,
)
i, j = k + 4, (k + 1) % 4 + 4
mlab.plot3d(
[b[i, 0], b[j, 0]],
[b[i, 1], b[j, 1]],
[b[i, 2], b[j, 2]],
color=color,
tube_radius=None,
line_width=line_width,
figure=fig,
)
i, j = k, k + 4
mlab.plot3d(
[b[i, 0], b[j, 0]],
[b[i, 1], b[j, 1]],
[b[i, 2], b[j, 2]],
color=color,
tube_radius=None,
line_width=line_width,
figure=fig,
)
# mlab.show(1)
# mlab.view(azimuth=180, elevation=70, focalpoint=[ 12.0909996 , -1.04700089, -2.03249991], distance=62.0, figure=fig)
return fig
def xyzwhl2eight(xyzwhl):
""" Draw 3d bounding box in image
qs: (8,3) array of vertices for the 3d box in following order:
7 -------- 6
/| /|
4 -------- 5 .
| | | |
. 3 -------- 2
|/ |/
0 -------- 1
"""
x, y, z, w, h, l = xyzwhl[:6]
box8 = np.array(
[
[
x + w / 2,
x + w / 2,
x - w / 2,
x - w / 2,
x + w / 2,
x + w / 2,
x - w / 2,
x - w / 2,
],
[
y - h / 2,
y + h / 2,
y + h / 2,
y - h / 2,
y - h / 2,
y + h / 2,
y + h / 2,
y - h / 2,
],
[
z - l / 2,
z - l / 2,
z - l / 2,
z - l / 2,
z + l / 2,
z + l / 2,
z + l / 2,
z + l / 2,
],
]
)
return box8.T
def draw_xyzwhl(
gt_boxes3d,
fig,
color=(1, 1, 1),
line_width=1,
draw_text=True,
text_scale=(1, 1, 1),
color_list=None,
rot=False,
):
""" Draw 3D bounding boxes
Args:
gt_boxes3d: numpy array (n,8,3) for XYZs of the box corners
fig: mayavi figure handler
color: RGB value tuple in range (0,1), box line color
line_width: box line width
draw_text: boolean, if true, write box indices beside boxes
text_scale: three number tuple
color_list: a list of RGB tuple, if not None, overwrite color.
Returns:
fig: updated fig
"""
num = len(gt_boxes3d)
for n in range(num):
print(gt_boxes3d[n])
box6 = gt_boxes3d[n]
b = xyzwhl2eight(box6)
if rot:
b = b.dot(rotz(box6[7]))
# b = b.dot(rotx(box6[6]))
# print(rotz(box6[6]))
# b = b.dot( rotz(box6[6]).dot(rotz(box6[7])) )
vec = np.array([-1, 1, 0])
b = b.dot(rotation_matrix_numpy(vec, box6[6]))
# b = b.dot(roty(box6[7]))
print(b.shape, b)
if color_list is not None:
color = color_list[n]
# if draw_text: mlab.text3d(b[4,0], b[4,1], b[4,2], '%d'%n, scale=text_scale, color=color, figure=fig)
for k in range(0, 4):
# http://docs.enthought.com/mayavi/mayavi/auto/mlab_helper_functions.html
i, j = k, (k + 1) % 4
mlab.plot3d(
[b[i, 0], b[j, 0]],
[b[i, 1], b[j, 1]],
[b[i, 2], b[j, 2]],
color=color,
tube_radius=None,
line_width=line_width,
figure=fig,
)
i, j = k + 4, (k + 1) % 4 + 4
mlab.plot3d(
[b[i, 0], b[j, 0]],
[b[i, 1], b[j, 1]],
[b[i, 2], b[j, 2]],
color=color,
tube_radius=None,
line_width=line_width,
figure=fig,
)
i, j = k, k + 4
mlab.plot3d(
[b[i, 0], b[j, 0]],
[b[i, 1], b[j, 1]],
[b[i, 2], b[j, 2]],
color=color,
tube_radius=None,
line_width=line_width,
figure=fig,
)
# mlab.show(1)
# mlab.view(azimuth=180, elevation=70, focalpoint=[ 12.0909996 , -1.04700089, -2.03249991], distance=62.0, figure=fig)
return fig
'''
def test_main():
#if __name__ == "__main__":
pc = np.loadtxt("mayavi/kitti_sample_scan.txt")
fig = draw_lidar(pc)
mlab.savefig("pc_view.jpg", figure=fig)
raw_input()
'''
def test01():
file_dir = "./data/object/training/velodyne/000000.bin"
pc = np.fromfile(file_dir, dtype=np.float32).reshape(-1, 4) # load velodyne data
fig = draw_lidar(pc)
#mlab.savefig("pc_view.jpg", figure=fig)
#raw_input()
def get_pc():
#file_dir = "./data/object/training/velodyne/000000.bin"
file_dir = './data/save/train_01.bin'
return np.fromfile(file_dir, dtype=np.float32).reshape(-1, 4) # load velodyne data
|
module VagrantPlugins
module AWS
VERSION = '0.7.3'
end
end
|
package cn.stylefeng.roses.kernel.system.modular.theme.service;
import cn.stylefeng.roses.kernel.system.api.pojo.theme.SysThemeTemplateRelRequest;
import cn.stylefeng.roses.kernel.system.modular.theme.entity.SysThemeTemplateRel;
import com.baomidou.mybatisplus.extension.service.IService;
/**
* 系统主题模板属性关系service接口
*
* @author xixiaowei
* @date 2021/12/17 16:13
*/
public interface SysThemeTemplateRelService extends IService<SysThemeTemplateRel> {
/**
* 增加系统主题模板属性关系
*
* @author xixiaowei
* @date 2021/12/24 10:56
*/
void add(SysThemeTemplateRelRequest sysThemeTemplateRelRequest);
/**
* 删除系统主题模板属性关系
*
* @author xixiaowei
* @date 2021/12/24 11:18
*/
void del(SysThemeTemplateRelRequest sysThemeTemplateRelRequest);
}
|
#!/usr/bin/env bash
set -euo pipefail
# Deploys the blockchain api to App Engine
#
# Flags:
# -n: Name of the network, maps to App Engine 'service' (integration, production, etc.)
NETWORK=""
while getopts 'a:n:' flag; do
case "${flag}" in
n) NETWORK="$OPTARG" ;;
*) error "Unexpected option ${flag}" ;;
esac
done
[ -z "$NETWORK" ] && echo "Need to set the NETWORK via the -n flag" && exit 1;
echo "Starting blockchain api deployment."
echo 'Deploying to gcloud'
gcloud --project celo-testnet app deploy -q "app.${NETWORK}.yaml"
echo 'Hitting service url to trigger update'
# This seems to be necessary to ensure get App Engine starts the service
curl "https://${NETWORK}-dot-celo-testnet.appspot.com" > /dev/null 2>&1
echo "Done deployment."
|
import React, { useState } from 'react';
// Redux & Firebase
import { useSelector, useDispatch } from 'react-redux';
import { importTransactionsAction } from '../../redux/reducers/AuthReducer';
// Parse CSV
import Papa from 'papaparse';
import { Card, Button, List, ListItem } from '@material-ui/core';
import Alert from '@material-ui/lab/Alert';
import { useDropzone } from 'react-dropzone';
import CloseTwoToneIcon from '@material-ui/icons/CloseTwoTone';
import CloudUploadTwoToneIcon from '@material-ui/icons/CloudUploadTwoTone';
import CheckIcon from '@material-ui/icons/Check';
const currencies = ['JPY', 'EUR', 'USD'];
export default function ImportCSV() {
const {
user: { uid }
} = useSelector((state) => state.auth);
const dispatch = useDispatch();
const [error, setError] = useState(false);
const {
acceptedFiles,
isDragActive,
isDragAccept,
isDragReject,
getRootProps,
getInputProps
} = useDropzone({
accept: '.csv',
maxFiles: 1
});
const files = acceptedFiles.map((file) => (
<ListItem
className="font-size-sm px-3 py-2 text-primary d-flex justify-content-between align-items-center"
key={file.path}>
<span>{file.path}</span>
<span className="badge badge-pill bg-neutral-warning text-warning">
{file.size} bytes
</span>
</ListItem>
));
const parseFile = (file) => {
Promise.resolve().then(() => {
Papa.parse(file, {
complete: (data) =>
dispatch(importTransactionsAction(uid, currencies, data, setError)),
error: () => setError(true),
header: true
});
});
};
return (
<>
<Card className="m-4 p-3 p-lg-5 shadow-xxl">
{error ? (
<div className="text-center">
<h3 className="mb-3">
There has been an error importing your transactions!
</h3>
<p>
Please try refreshing the page and repeat the process. If the
error persists, please feel free to reach out to us!
</p>
</div>
) : (
<>
<div className="dropzone">
<div {...getRootProps({ className: 'dropzone-upload-wrapper' })}>
<input {...getInputProps()} />
<div className="dropzone-inner-wrapper">
{isDragAccept && (
<div>
<div className="d-100 btn-icon mb-3 hover-scale-lg bg-success shadow-success-sm rounded-circle text-white">
<CheckIcon className="d-50" />
</div>
<div className="font-size-sm text-success">
All files will be uploaded!
</div>
</div>
)}
{isDragReject && (
<div>
<div className="d-100 btn-icon mb-3 hover-scale-lg bg-danger shadow-danger-sm rounded-circle text-white">
<CloseTwoToneIcon className="d-50" />
</div>
<div className="font-size-sm text-danger">
Some files will be rejected!
</div>
</div>
)}
{!isDragActive && (
<div>
<div className="d-100 btn-icon mb-3 hover-scale-lg bg-white shadow-light-sm rounded-circle text-primary">
<CloudUploadTwoToneIcon className="d-50" />
</div>
<div className="font-size-sm">
Drag and drop files here{' '}
<span className="font-size-xs text-dark">(.csv)</span>
</div>
</div>
)}
<small className="py-2 text-black-50">or</small>
<div>
<Button className="btn-primary hover-scale-sm font-weight-bold btn-pill px-4">
<span className="px-2">Browse Files</span>
</Button>
</div>
</div>
</div>
</div>
<div>
{files.length <= 0 && (
<div className="text-info text-center font-size-sm mt-4">
Uploaded demo files will appear here!
</div>
)}
{files.length > 0 && (
<div>
<Alert severity="success" className="text-center mb-3">
You have uploaded <b>{files.length}</b> files!
</Alert>
<List component="div" className="font-size-sm">
{files}
</List>
</div>
)}
{files.length > 0 && (
<Button
className="btn-primary hover-scale-sm font-weight-bold btn-pill px-4"
onClick={() => parseFile(acceptedFiles[0])}>
<span className="px-2">Import Transactions from CSV</span>
</Button>
)}
</div>
</>
)}
</Card>
</>
);
}
|
#!/usr/bin/env bash
set -euo pipefail
PID_FILE=/tmp/qcluster.pid
function start_qcluster() {
echo "--> Starting qcluster"
poetry run python manage.py qcluster & echo $$! > $PID_FILE
}
function kill_qcluster() {
echo "--> Killing qcluster"
pid=$(cat $PID_FILE)
rkill "$pid"
}
function restart_qcluster() {
echo "--> Restarting qcluster"
kill_qcluster
start_qcluster
}
start_qcluster
while inotifywait --exclude "[^p].$|[^y]$" -e modify -r .; do
restart_qcluster
done
|
package Control;
import Model.User;
import Model.UserInfo;
import javafx.event.ActionEvent;
import javafx.fxml.FXMLLoader;
import javafx.scene.Parent;
import javafx.scene.Scene;
import javafx.stage.Stage;
import java.io.IOException;
public class MainMenuController {
public void userBtn(ActionEvent event) {
Parent root = null;
FXMLLoader loader = new FXMLLoader();
loader.setLocation(getClass().getResource("/View/UserInfo.fxml"));
try {
root = loader.load();
} catch (IOException e) {
e.printStackTrace();
}
Stage stage = Launcher.getStage();
stage.setScene(new Scene(root));
stage.show();
}
public void writeBtn(ActionEvent event) {
Parent root = null;
FXMLLoader loader = new FXMLLoader();
loader.setLocation(getClass().getResource("/View/WriteReview.fxml"));
try {
root = loader.load();
} catch (IOException e) {
e.printStackTrace();
}
Stage stage = Launcher.getStage();
stage.setScene(new Scene(root));
stage.show();
}
public void readBtn(ActionEvent event) {
Parent root = null;
FXMLLoader loader = new FXMLLoader();
loader.setLocation(getClass().getResource("/View/ReadReview.fxml"));
try {
root = loader.load();
} catch (IOException e) {
e.printStackTrace();
}
Stage stage = Launcher.getStage();
stage.setScene(new Scene(root));
stage.show();
}
public void stopBtn(ActionEvent event) {
Parent root = null;
FXMLLoader loader = new FXMLLoader();
loader.setLocation(getClass().getResource("/View/StopInfo.fxml"));
try {
root = loader.load();
} catch (IOException e) {
e.printStackTrace();
}
Stage stage = Launcher.getStage();
stage.setScene(new Scene(root));
stage.show();
}
public void routeBtn(ActionEvent event) {
Parent root = null;
FXMLLoader loader = new FXMLLoader();
loader.setLocation(getClass().getResource("/View/RouteInfo.fxml"));
try {
root = loader.load();
} catch (IOException e) {
e.printStackTrace();
}
Stage stage = Launcher.getStage();
stage.setScene(new Scene(root));
stage.show();
}
public void cardBtn(ActionEvent event) {
Parent root = null;
FXMLLoader loader = new FXMLLoader();
loader.setLocation(getClass().getResource("/View/CardInfo.fxml"));
try {
root = loader.load();
} catch (IOException e) {
e.printStackTrace();
}
Stage stage = Launcher.getStage();
stage.setScene(new Scene(root));
stage.show();
}
public void transportBtn(ActionEvent event) {
Parent root = null;
FXMLLoader loader = new FXMLLoader();
loader.setLocation(getClass().getResource("/View/TransportInfo.fxml"));
try {
root = loader.load();
} catch (IOException e) {
e.printStackTrace();
}
Stage stage = Launcher.getStage();
stage.setScene(new Scene(root));
stage.show();
}
public void logoutBtn(ActionEvent event) {
Parent root = null;
FXMLLoader loader = new FXMLLoader();
loader.setLocation(getClass().getResource("/View/Login.fxml"));
try {
root = loader.load();
} catch (IOException e) {
e.printStackTrace();
}
Stage stage = Launcher.getStage();
stage.setScene(new Scene(root));
stage.show();
}
}
|
#!/usr/bin/env bash
set -e
for file in $(find . -name '*.sh'); do
bash -n $file
done
|
#!/bin/bash
set -ex
###########################################################
# UTILS
###########################################################
export DEBIAN_FRONTEND=noninteractive
apt-get update
apt-get install --no-install-recommends -y tzdata ca-certificates net-tools libxml2-utils git curl libudev1 libxml2-utils iptables iproute2 jq fontconfig
ln -fs /usr/share/zoneinfo/UTC /etc/localtime
dpkg-reconfigure --frontend noninteractive tzdata
rm -rf /var/lib/apt/lists/*
curl https://raw.githubusercontent.com/spring-io/concourse-java-scripts/v0.0.4/concourse-java.sh > /opt/concourse-java.sh
curl --output /opt/concourse-release-scripts.jar https://repo.spring.io/release/io/spring/concourse/releasescripts/concourse-release-scripts/0.3.2/concourse-release-scripts-0.3.2.jar
###########################################################
# JAVA
###########################################################
mkdir -p /opt/openjdk
pushd /opt/openjdk > /dev/null
for jdk in java8 java11 java16
do
JDK_URL=$( /get-jdk-url.sh $jdk )
mkdir $jdk
pushd $jdk > /dev/null
curl -L ${JDK_URL} | tar zx --strip-components=1
test -f bin/java
test -f bin/javac
popd > /dev/null
done
popd
###########################################################
# GRADLE ENTERPRISE
###########################################################
cd /
mkdir ~/.gradle
echo 'systemProp.user.name=concourse' > ~/.gradle/gradle.properties
|
import * as tmp from 'tmp'
import { privateKeyToAddress } from '@celo/utils/lib/address'
import { LocalAccount } from './accounts';
import AccountsDB, { decryptLocalKey, encryptLocalKey } from './accountsdb'
tmp.setGracefulCleanup()
test('accountsDB test', () => {
const tmpobj = tmp.fileSync();
const db = new AccountsDB(tmpobj.name)
let accounts = db.readAccounts()
expect(accounts.length).toEqual(0)
const pKey0 = "0xf2f48ee19680706196e2e339e5da3491186e0c4c5030670656b0e0164837257d"
const addr0 = privateKeyToAddress(pKey0)
db.addAccount({
type: "address-only",
name: "test0",
address: addr0,
})
accounts = db.readAccounts()
expect(accounts.length).toEqual(1)
expect(accounts[0].name).toEqual('test0')
const pw = "pw"
const encryptedKey0 = encryptLocalKey({privateKey: pKey0}, pw)
expect(() => {
db.addAccount({
type: "local",
name: "test1",
address: addr0,
encryptedData: encryptedKey0,
}, pw)
}).toThrow("already exists")
expect(db.hasPassword()).toEqual(false)
const pKey1 = "<KEY>"
const addr1 = privateKeyToAddress(pKey1)
const encryptedKey1 = encryptLocalKey({privateKey: pKey1}, pw)
db.addAccount({
type: "local",
name: "test1",
address: addr1,
encryptedData: encryptedKey1,
}, pw)
expect(db.hasPassword()).toEqual(true)
expect(() => {
db.changePassword("<PASSWORD>", "<PASSWORD>")
}).toThrow("does not match")
db.changePassword("pw", "<PASSWORD>")
const test1 = db.readAccounts().find((a) => a.name === "test1") as LocalAccount
expect(test1?.type).toEqual("local")
expect(test1?.address).toEqual(addr1)
expect(decryptLocalKey(test1.encryptedData, "pwnew").privateKey).toEqual(pKey1)
db.close()
}) |
def arrayCountDistinct(arr):
# Create an empty object to store the distinct elements
result = {}
# Iterate through the input array
for num in arr:
# Check if the element exists in the result object
if num in result:
# Increase its count
result[num] += 1
# Else, create the new element in result
else:
result[num] = 1
# Return the result object
return result |
(defn product
[element]
(apply * element))
(product [1 2 3 4]) |
import "reflect-metadata";
import { Model } from "../";
import { IFmModelPropertyMeta, IFmModelRelationshipMeta } from "./index";
import { IDictionary } from "common-types";
/** Properties accumlated by propertyDecorators */
export declare const propertiesByModel: IDictionary<IDictionary<IFmModelPropertyMeta>>;
/** Relationships accumlated by hasMany/hasOne decorators */
export declare const relationshipsByModel: IDictionary<IDictionary<IFmModelRelationshipMeta>>;
export declare const propertyDecorator: <T extends Model>(nameValuePairs?: IDictionary<any>, property?: string) => (target: Model, key: string) => void;
/**
* Gets all the properties for a given model
*
* @param model the schema object which is being looked up
*/
export declare function getProperties(model: object): IFmModelPropertyMeta<Model>[];
/**
* Gets all the relationships for a given model
*/
export declare function getRelationships(model: object): IFmModelRelationshipMeta<Model>[];
export declare function getPushKeys(target: object): ("id" | "lastUpdated" | "createdAt" | "META")[];
|
git clone --branch fix-9-04 https://github.com/frankier/dep_search.git
cd dep_search
git submodule init
git submodule update
make
|
#! /bin/bash
kpt fn eval --image gcr.io/kpt-fn/starlark:unstable --fn-config fn-config.yaml --image-pull-policy never
|
bool load_schedule(serialization::input_archive & ar, ...) {
// Implement the logic to process the schedule and determine if the parcel was migrated or scheduled locally
// Extract necessary information from the input archive and perform the required checks
// Example pseudo-code logic:
// Read parcel details from the input archive
// Check if the parcel has a migration flag or any indication of being scheduled locally
// Based on the extracted information, return true if the parcel was migrated, and false if it was scheduled locally
// Replace ... with any additional parameters or types required for processing the schedule
// Example implementation:
// Assuming the input archive provides a flag indicating migration
bool isMigrated = ar.readParcelMigrationFlag(); // Example function to read migration flag from the input archive
return isMigrated;
} |
<gh_stars>100-1000
from os import environ as env
import smtplib
import ssl
from email.mime.text import MIMEText
from email.mime.multipart import MIMEMultipart
from runners.helpers import log
from runners.helpers import vault
HOST = env.get('SA_SMTP_HOST', env.get('SMTP_SERVER', ''))
PORT = int(env.get('SA_SMTP_PORT', env.get('port', 587)))
USER = env.get('SA_SMTP_USER', env.get('SMTP_USER', ''))
PASSWORD = env.get('SA_SMTP_PASSWORD', env.get('SMTP_PASSWORD', ''))
USE_SSL = env.get('SA_SMTP_USE_SSL', env.get('SMTP_USE_SSL', True))
USE_TLS = env.get('SA_SMTP_USE_TLS', env.get('SMTP_USE_TLS', True))
def handle(
alert,
type='smtp',
sender_email=None,
recipient_email=None,
text=None,
html=None,
subject=None,
reply_to=None,
cc=None,
bcc=None,
host=HOST,
port=PORT,
user=USER,
password=PASSWORD,
use_ssl=USE_SSL,
use_tls=USE_TLS,
):
user = vault.decrypt_if_encrypted(user)
password = vault.decrypt_if_encrypted(password)
sender_email = sender_email or user
if recipient_email is None:
log.error(f"param 'recipient_email' required")
return None
if text is None:
log.error(f"param 'text' required")
return None
# Create the base MIME message.
if html is None:
message = MIMEMultipart()
else:
message = MIMEMultipart('alternative')
# Add HTML/plain-text parts to MIMEMultipart message
# The email client will try to render the last part first
# Turn these into plain/html MIMEText objects
textPart = MIMEText(text, 'plain')
message.attach(textPart)
if html is not None:
htmlPart = MIMEText(html, 'html')
message.attach(htmlPart)
message['Subject'] = subject
message['From'] = sender_email
message['To'] = recipient_email
recipients = recipient_email.split(',')
if cc is not None:
message['Cc'] = cc
recipients = recipients + cc.split(',')
if bcc is not None:
recipients = recipients + bcc.split(',')
if reply_to is not None:
message.add_header('reply-to', reply_to)
if use_ssl is True:
context = ssl.create_default_context()
if use_tls is True:
smtpserver = smtplib.SMTP(host, port)
smtpserver.starttls(context=context)
else:
smtpserver = smtplib.SMTP_SSL(host, port, context=context)
else:
smtpserver = smtplib.SMTP(host, port)
if user and password:
smtpserver.login(user, password)
result = smtpserver.sendmail(sender_email, recipients, message.as_string())
smtpserver.close()
return result
|
const canvas = document.getElementById('canvas');
const ctx = canvas.getContext('2d');
ctx.beginPath();
ctx.arc(100, 75, 50, 0, 2 * Math.PI);
ctx.fillStyle = 'red';
ctx.fill(); |
#!/usr/bin/zsh
function lxcd ()
{
machine=$1
if [ "$machine" != "reveng" -a "$machine" != "target-arch" -a "$machine" \
!= "target-ubuntu" ]; then
lxc delete -i "$machine"
else
echo "Error: $machine is a template and should not be destroyed."
fi
}
function lxcs ()
{
lxc exec $! -- sudo --user ubuntu --login
}
alias lxcc="lxc copy"
alias lxcfp="lxc file push"
|
const test = require('tap').test
const server = require('../server')
const sinon = require('sinon')
const index = require('../../lib/index')['create']
const semver = require('semver')
var ARROW
var CONNECTOR
test('### Start Arrow ###', function (t) {
server()
.then((inst) => {
ARROW = inst
CONNECTOR = ARROW.getConnector('appc.mysql')
ARROW.Connector = CONNECTOR
ARROW.Connector.Capabilities = {}
t.ok(ARROW, 'Arrow has been started')
t.end()
})
.catch((err) => {
t.threw(err)
})
})
test('### Test Index.js Error Case ###', sinon.test(function (t) {
const semverLtStub = this.stub(semver, 'lt', function (actualVersion, desiredVersion) {
return true
})
t.throws(index.bind(CONNECTOR, ARROW),
'This connector requires at least version 1.2.53 of Arrow please run `appc use latest`.')
t.ok(semverLtStub.calledOnce)
t.end()
}))
test('### Test Index.js Valid case ###', sinon.test(function (t) {
const semverLtStub = this.stub(semver, 'lt', function (actualVersion, desiredVersion) { return false })
const extendSpy = this.spy(function () { })
ARROW.Connector.extend = extendSpy
index.bind(CONNECTOR, ARROW)()
t.ok(semverLtStub.calledOnce)
t.ok(extendSpy.calledOnce)
t.end()
}))
test('### Stop Arrow ###', function (t) {
ARROW.stop(function () {
t.pass('Arrow has been stopped!')
t.end()
})
})
|
<reponame>osl2/RBLS<filename>RBLS-Teamprojekt/src/main/java/praesentation/FormelAnsicht.java
package praesentation;
import java.awt.Color;
import java.awt.Dimension;
import java.awt.FlowLayout;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.WindowAdapter;
import java.awt.event.WindowEvent;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;
import javax.swing.BorderFactory;
import javax.swing.BoxLayout;
import javax.swing.JDialog;
import javax.swing.JLabel;
import javax.swing.JPanel;
import javax.swing.JScrollPane;
import javax.swing.SwingConstants;
import javax.swing.WindowConstants;
import javax.swing.plaf.basic.BasicScrollBarUI;
import steuerung.FormelEditor;
/**
* Grafische Formelansicht zum Eingeben von Formeln als Pop-Up-Fenster. Zeigt
* Buttons mit Zeichen und Buttons mit atomaren Aussagen an.
*
* @author Nick
*/
public class FormelAnsicht {
private FormelEditor strg;
private JDialog ansicht;
private JLabel formelAnzeige = new JLabel();
private Schaltflaeche bestaetige = new Schaltflaeche("Bestätige");
private Schaltflaeche abbruch = new Schaltflaeche("Abbruch", 2);
private Schaltflaeche entferne = new Schaltflaeche("Entferne", 3);
private int breite = 800;
private int hoehe = 400;
private String formel = "";
private Schaltflaeche[] atomareAussagen;
private Schaltflaeche und = new Schaltflaeche("\u2227", 3);
private Schaltflaeche oder = new Schaltflaeche("\u2228", 3);
private Schaltflaeche nicht = new Schaltflaeche("\u00AC", 3);
private Schaltflaeche impliziert = new Schaltflaeche("\u2192", 3);
private Schaltflaeche aequivalent = new Schaltflaeche("\u2194", 3);
private Schaltflaeche xor = new Schaltflaeche("\u2295", 3);
private Schaltflaeche klammerAuf = new Schaltflaeche("(", 3);
private Schaltflaeche klammerZu = new Schaltflaeche(")", 3);
private ArrayList<Schaltflaeche> zeichen = new ArrayList<Schaltflaeche>(
Arrays.asList(new Schaltflaeche[] { und, oder, nicht, impliziert, aequivalent, xor,
klammerAuf, klammerZu }));
private ArrayList<Character> symbole = new ArrayList<Character>(
Arrays.asList(new Character[] { 'u', 'o', 'n', 'f', 'a', 'x', '(', ')' }));
/**
* Erzeugt eine Ansicht fuer den Formeleditor als Fenster.
*
* @param aussagen atomare Aussagen, die als Schaltflaechen verfuegbar sein
* sollen
* @param strg Formeleditor zur Kommunikation
*/
public FormelAnsicht(String[] aussagen, FormelEditor strg, String formelAlt) {
this.strg = strg;
this.formel = formelAlt;
JPanel aussagenPanel = new JPanel();
aussagenPanel.setLayout(new FlowLayout());
atomareAussagen = new Schaltflaeche[aussagen.length];
for (int j = 0; j < aussagen.length; j++) {
atomareAussagen[j] = new Schaltflaeche(aussagen[j], 3);
atomareAussagen[j].addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
schreibe(e.getActionCommand().substring(0, 1));
for (int i = 0; i < atomareAussagen.length; i++) {
if (atomareAussagen[i].getActionCommand() == e.getActionCommand()) {
fuegeHinzu((char) (i + '0'));
}
}
}
});
zeichen.add(atomareAussagen[j]);
symbole.add((char) (j + '0'));
aussagenPanel.add(atomareAussagen[j]);
}
JPanel operatorPanel1 = new JPanel();
operatorPanel1.setLayout(new FlowLayout());
und.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
schreibe(und.getText());
fuegeHinzu('u');
}
});
operatorPanel1.add(und);
oder.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
schreibe(oder.getText());
fuegeHinzu('o');
}
});
operatorPanel1.add(oder);
xor.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
schreibe(xor.getText());
fuegeHinzu('x');
}
});
operatorPanel1.add(xor);
nicht.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
schreibe(nicht.getText());
fuegeHinzu('n');
}
});
operatorPanel1.add(nicht);
JPanel operatorPanel2 = new JPanel();
operatorPanel2.setLayout(new FlowLayout());
impliziert.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
schreibe(impliziert.getText());
fuegeHinzu('f');
}
});
operatorPanel2.add(impliziert);
aequivalent.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
schreibe(aequivalent.getText());
fuegeHinzu('a');
}
});
operatorPanel2.add(aequivalent);
klammerAuf.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
schreibe("(");
fuegeHinzu('(');
}
});
operatorPanel2.add(klammerAuf);
klammerZu.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
schreibe(")");
fuegeHinzu(')');
}
});
operatorPanel2.add(klammerZu);
JPanel entfernePanel = new JPanel();
entfernePanel.setLayout(new FlowLayout());
entferne.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
loescheZeichen();
}
});
entfernePanel.add(entferne);
JPanel menuePanel = new JPanel();
menuePanel.setLayout(new FlowLayout());
abbruch.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
brecheAb();
}
});
menuePanel.add(abbruch);
bestaetige.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
bestaetige();
}
});
menuePanel.add(bestaetige);
formelAnzeige = new JLabel(formel, SwingConstants.CENTER);
JPanel formelPanel = new JPanel();
formelPanel.setLayout(new FlowLayout());
formelPanel.add(formelAnzeige);
aussagenPanel.setBackground(Color.WHITE);
operatorPanel1.setBackground(Color.WHITE);
operatorPanel2.setBackground(Color.WHITE);
entfernePanel.setBackground(Color.WHITE);
menuePanel.setBackground(Color.WHITE);
formelPanel.setBackground(new Color(186, 185, 219));
formelAnzeige.setBackground(Color.LIGHT_GRAY);
ansicht = new JDialog();
ansicht.getContentPane().setLayout(new BoxLayout(ansicht.getContentPane(), BoxLayout.Y_AXIS));
JScrollPane formelRegler = new JScrollPane(formelPanel, JScrollPane.VERTICAL_SCROLLBAR_NEVER,
JScrollPane.HORIZONTAL_SCROLLBAR_AS_NEEDED);
formelRegler.setBorder(BorderFactory.createEmptyBorder());
formelRegler.getHorizontalScrollBar().setUI(new BasicScrollBarUI() {
protected void configureScrollBarColors() {
this.thumbColor = new Color(255, 102, 0);
this.trackColor = new Color(186, 185, 219);
}
});
formelRegler.setPreferredSize(new Dimension(Integer.MAX_VALUE, 52));
ansicht.getContentPane().add(formelRegler);
ansicht.getContentPane().add(aussagenPanel);
ansicht.getContentPane().add(operatorPanel1);
ansicht.getContentPane().add(operatorPanel2);
ansicht.getContentPane().add(entfernePanel);
ansicht.getContentPane().add(menuePanel);
ansicht.setTitle("Formel-Editor");
ansicht.setSize(breite, hoehe);
ansicht.setResizable(false);
ansicht.setLocationRelativeTo(null);
ansicht.setAlwaysOnTop(true);
ansicht.setModal(true);
ansicht.getContentPane().setBackground(Color.LIGHT_GRAY);
ansicht.setDefaultCloseOperation(WindowConstants.DISPOSE_ON_CLOSE);
ansicht.addWindowListener(new WindowAdapter() {
public void windowClosing(WindowEvent e) {
brecheAb();
}
});
pruefeErlaubteZeichen();
ansicht.setVisible(true);
}
public String getFormel() {
return formel;
}
private void fuegeHinzu(char zeichen) {
strg.setzeZeichen(zeichen);
pruefeErlaubteZeichen();
}
private void schreibe(String zeichen) {
formel = formel + "" + zeichen;
formelAnzeige.setVisible(false);
formelAnzeige.setText(formel);
formelAnzeige.setVisible(true);
}
private void pruefeErlaubteZeichen() {
int j = 0;
for (Iterator<Schaltflaeche> iter = zeichen.iterator(); iter.hasNext();) {
Schaltflaeche element = iter.next();
if (strg.zeichenErlaubt(symbole.get(j))) {
element.setEnabled(true);
} else {
element.setEnabled(false);
}
j++;
}
if (formel.equals("")) {
entferne.setEnabled(false);
bestaetige.setEnabled(false);
} else {
entferne.setEnabled(true);
bestaetige.setEnabled(true);
}
}
private void loescheZeichen() {
formel = formel.substring(0, formel.length() - 1);
formelAnzeige.setVisible(false);
formelAnzeige.setText(formel);
formelAnzeige.setVisible(true);
strg.entferneletzesZeichen();
pruefeErlaubteZeichen();
}
private void bestaetige() {
if (strg.bestaetige()) {
ansicht.dispose();
} else {
new FehlerDialog("Das ist keine gültige aussagenlogische Formel", this.ansicht);
}
}
private void brecheAb() {
strg.brecheAb();
ansicht.dispose();
}
}
|
def generate_fibonacci(n):
"""
Generates an array of Fibonacci numbers.
Args:
n (int): The desired length of the Fibonacci array.
Returns:
list: A list of Fibonacci numbers.
"""
fibonacci_arr = [0, 1]
while len(fibonacci_arr) < n:
fibonacci_arr.append(fibonacci_arr[-2] + fibonacci_arr[-1])
return fibonacci_arr |
<gh_stars>100-1000
package com.github.messenger4j.internal.gson;
import com.github.messenger4j.send.recipient.PhoneNumberRecipient;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import com.google.gson.JsonSerializationContext;
import com.google.gson.JsonSerializer;
import java.lang.reflect.Type;
/**
* @author <NAME>
* @since 1.0.0
*/
final class PhoneNumberRecipientSerializer implements JsonSerializer<PhoneNumberRecipient> {
@Override
public JsonElement serialize(
PhoneNumberRecipient phoneNumberRecipient, Type typeOfSrc, JsonSerializationContext context) {
final JsonObject phoneNumberRecipientObject = new JsonObject();
phoneNumberRecipientObject.addProperty("phone_number", phoneNumberRecipient.phoneNumber());
if (phoneNumberRecipient.firstName().isPresent()
&& phoneNumberRecipient.lastName().isPresent()) {
final JsonObject nameObject = new JsonObject();
nameObject.addProperty("first_name", phoneNumberRecipient.firstName().get());
nameObject.addProperty("last_name", phoneNumberRecipient.lastName().get());
phoneNumberRecipientObject.add("name", nameObject);
}
return phoneNumberRecipientObject;
}
}
|
<filename>UDP_TransferNT/include/UDP_TransferNT.h
#ifndef UDP_TRANSFER_NT_H
#define UDP_TRANSFER_NT_H
#include "Network.h"
#endif |
<gh_stars>0
function test() {
var test = {
test: 'hello'
};
}
|
#!/bin/bash
# Set up the appropriate rustc toolchain
set -e
cd "$(dirname "$0")"
ERRNO=0
RTIM_PATH=$(command -v rustup-toolchain-install-master) || ERRNO=$?
CARGO_HOME=${CARGO_HOME:-$HOME/.cargo}
# Check if people also install RTIM in other locations beside
# ~/.cargo/bin
if [[ "$ERRNO" -ne 0 ]] || [[ "$RTIM_PATH" == $CARGO_HOME/bin/rustup-toolchain-install-master ]]; then
cargo install -Z install-upgrade rustup-toolchain-install-master
else
VERSION=$(rustup-toolchain-install-master -V | grep -o "[0-9.]*")
REMOTE=$(cargo search rustup-toolchain-install-master | grep -o "[0-9.]*")
echo "info: skipping updating rustup-toolchain-install-master at $RTIM_PATH"
echo " current version : $VERSION"
echo " remote version : $REMOTE"
fi
RUST_COMMIT=$(git ls-remote https://github.com/rust-lang/rust master | awk '{print $1}')
if rustc +master -Vv 2>/dev/null | grep -q "$RUST_COMMIT"; then
echo "info: master toolchain is up-to-date"
exit 0
fi
rustup-toolchain-install-master -f -n master -c rustc-dev -- "$RUST_COMMIT"
rustup override set master
|
#!/usr/bin/env bash
set +e
#LC_ALL=C
# Licence block
: << LICENSE
MIT License
Copyright (c) 2018 Vitalii Bieliavtsev
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
LICENSE
# List of a script exit codes
: << EXITCODES
1 - Some external program error
51 - Some path(s) is/(are) missing.
52 - No authentication data for Steam account
53 - Wrong selection
54 - Wrong MOD ID
55 - Reserved code. Not used
56 - Wrong MODs ID in "meta.cpp" file ("0" as usually)
57 - No MOD_ID in meta.cpp file
58 - Earley interrupted
60 - Already running
EXITCODES
# Trap exit codes and remove PID file on exit
trap cleanup EXIT QUIT ABRT TERM
trap 'exit $?' ERR
trap 'exit 2' INT
PID_FILE=/tmp/a3upddownmod.pid
cleanup() {
local EXIT_CODE=$?
[[ "${EXIT_CODE}" = "60" ]] && exit "${EXIT_CODE}"
rm "${PID_FILE}"
exit "${EXIT_CODE}"
}
if [[ -f "${PID_FILE}" ]]; then
echo "Already running: PID=$(cat ${PID_FILE})"
exit 60
else
echo $$ > "${PID_FILE}"
fi
###
# Mandatory variables
STEAM_APP_ID="107410"
CURL_CMD="/usr/bin/curl" # CURL command
STEAM_CHANGELOG_URL="https://steamcommunity.com/sharedfiles/filedetails/changelog" # URL to get the date of the last MOD's update in a WorkShop
# Change it according to your paths
# Path to 'steamcmd' executable file
STEAM_CMD_PATH="/home/steam/server/steamcmd/steamcmd.sh"
# Path to there is Workshop downloaded the MODs
WORKSHOP_PATH="/home/steam/Steam/steamapps/workshop"
# Notification script
NOTIFICATION_SCRIPT=$(dirname "${BASH_SOURCE[0]}")/notify_update_status.sh
# Authentication
if [[ ! -f $(dirname ${BASH_SOURCE[0]})/../auth.sh ]]; then
# Optional variables
STEAM_LOGIN="" # Steam login (with a purchased Arma 3)
STEAM_PASS="" # Steam password
else
if [[ $- =~ x ]]; then debug=1; set +x; fi
source "$(dirname "${BASH_SOURCE[0]}")"/../auth.sh
STEAM_PASS="$(echo ${STEAM_PASS} | base64 -d)"
[[ $debug == 1 ]] && set -x
fi
# Check for needed paths and for CURL
if [[ ! -f "${STEAM_CMD_PATH}" || ! -d "${WORKSHOP_PATH}" ]]; then
echo "Some path(s) is/(are) missing. Check - does an all paths are correctly setted up! Exit."
return 51
elif [[ ! -f "${CURL_CMD}" ]]; then
echo "CURL is missing. Check - does it installed and pass the correct path to it into variable 'CURL_CMD'. Exit."
return 51
fi
## Functions
# Usage
usage() {
cat << EOF
Usage
$0 [ -h ] [ -n ] { -c | -u }
Where:
-h - Show this help
-n - Execute notification script.
-c - Check for MOD's updates, do not update
OR
-u - Update MODs
EOF
}
# Check authorization data for Steam
# Checking for does the Steam login and password are pre-configured?
if [[ -z "${STEAM_LOGIN}" ]]; then
clear
read -e -p "Steam login is undefined. Please, enter it now: " STEAM_LOGIN
if [[ -z "${STEAM_LOGIN}" ]]; then
echo -ne "Steam login not specified! Exiting!\n"
exit 52
fi
fi
if [[ -z "${STEAM_PASS}" ]]; then
clear
read -sep "Steam password is undefined. Please, enter it now (password will not be displayed in console output!): " STEAM_PASS
if [[ -z "${STEAM_PASS}" ]]; then
echo -ne "Steam password not specified! Exiting!\n"
exit 52
fi
fi
clear
check_mod_id() {
if [[ -z "${MOD_ID}" ]]; then
return 57
fi
}
# Backup
backupwkshpdir() {
check_mod_id
FULL_PATH="${WORKSHOP_PATH}/content/${STEAM_APP_ID}/${MOD_ID}"
if [[ -d "${FULL_PATH}" ]]; then
echo "Workshop target directory for MOD ${MOD_NAME} is already present. Moving it to ${FULL_PATH}_old_$(date +%y%m%d-%H%M)"
mv -f "${FULL_PATH}" "${FULL_PATH}_old_$(date +%y%m%d-%H%M)" &>/dev/null
fi
}
# Get original MOD's name from meta.cpp file
get_mod_name() {
check_mod_id
FULL_PATH="${WORKSHOP_PATH}/content/${STEAM_APP_ID}/${MOD_ID}"
if [[ -f "${FULL_PATH}"/meta.cpp ]]; then
grep -h "name" "${FULL_PATH}"/meta.cpp | \
awk -F'"' '{print $2}' | \
tr -d "[:punct:]" | \
# tr "[:upper:]" "[:lower:]" | \
sed -E 's/\s{1,}/_/g' | \
sed 's/^/\@/g'
fi
}
# Mod's application ID from meta.cpp file
get_mod_id() {
check_mod_id
FULL_PATH="${WORKSHOP_PATH}/content/${STEAM_APP_ID}/${MOD_ID}"
if [[ -f "${FULL_PATH}"/meta.cpp ]]; then
grep -h "publishedid" "${FULL_PATH}"/meta.cpp | \
awk '{print $3}' | \
tr -d [:punct:]
fi
}
# Get the MOD's last updated date from Steam Workshop
get_wkshp_date() {
if [[ "$(${CURL_CMD} -sN ${URL} | grep -m1 "Update:" | wc -w)" = "7" ]]; then
PRINT="$(${CURL_CMD} -sN ${URL} | grep -m1 -E -o '<p id=".*">' | awk -F'"' '{ print $2}')"
else
PRINT="$(${CURL_CMD} -sN ${URL} | grep -m1 -E -o '<p id=".*">' | awk -F'"' '{ print $2}')"
fi
WKSHP_UP_ST="${PRINT}"
}
countdown() {
local TIMEOUT="10"
for (( TIMER="${TIMEOUT}"; TIMER>0; TIMER--)); do
printf "\rDisplay the list in: ${TIMER}\nor Press any key to continue without waiting... :)"
read -s -t 1 -n1
if [[ "$?" = "0" ]]; then
break
fi
clear
done
}
# Fix case
fixuppercase() {
FULL_PATH="${WORKSHOP_PATH}/content/${STEAM_APP_ID}/${MOD_ID}"
find "${FULL_PATH}" -depth -exec rename 's/(.*)\/([^\/]*)/$1\/\L$2/' {} \;
if [[ "$?" = "0" ]]; then
echo -en "Fixed upper case for MOD ${MOD_NAME}\n"
fi
}
# Fix Steam application ID
fixappid() {
FULL_PATH="${WORKSHOP_PATH}/content/${STEAM_APP_ID}/${MOD_ID}"
check_mod_id
DMOD_ID=$(get_mod_id) # Downloaded MODs ID
DMOD_ID="${DMOD_ID%$'\r'}"
if [[ "${DMOD_ID}" = "0" ]]; then
echo "Steam ApplicationID is 0. Will try to fix."
sed -i 's/^publishedid.*$/publishedid \= '${MOD_ID}'\;/' "${FULL_PATH}"/meta.cpp
if [[ "$?" = "0" ]]; then
echo -en "Steam ApplicationID is fixed.\n"
fi
fi
}
# Check all installed mods for updates in Steam Workshop.
checkupdates() {
echo "Checking for updates..."
# check all installed MODs for updates.
LIST_TO_UPDATE=( )
MOD_UP_CMD=( )
MOD_ID_LIST=( )
for MOD_DIR in $(ls -1 "${WORKSHOP_PATH}"/content/"${STEAM_APP_ID}" | grep -v -E "*old*"); do
MOD_ID=$(grep "publishedid" "${WORKSHOP_PATH}"/content/"${STEAM_APP_ID}"/"${MOD_DIR}"/meta.cpp | awk -F"=" '{ print $2 }' | tr -d [:blank:] | tr -d [:space:] | tr -d ";$")
MOD_ID="${MOD_ID%$'\r'}"
URL="${STEAM_CHANGELOG_URL}/${MOD_ID}"
URL="${URL%$'\r'}"
MOD_NAME=$(grep "name" "${WORKSHOP_PATH}"/content/"${STEAM_APP_ID}"/"${MOD_DIR}"/meta.cpp | awk -F"=" '{ print $2 }' | tr [:space:] "_" | tr -d ";$" | awk -F\" '{ print $2 }')
check_mod_id
FULL_PATH="${WORKSHOP_PATH}/content/${STEAM_APP_ID}/${MOD_ID}"
get_wkshp_date
UPDATE_TIME="${WKSHP_UP_ST}"
CREATION_TIME=$(date --date="$(stat ${FULL_PATH} | sed '6q;d' | cut -d" " -f2-3)" +%s ) #Fix for MC syntax hilighting #"
if [[ "${MOD_ID}" = "0" ]]; then
echo -ne "\033[37;1;41mWrong ID for MOD ${MOD_NAME} in file 'meta.cpp'\033[0m You can update it manually and the next time it will be checked well. \n"
continue
elif [[ ! -f "${WORKSHOP_PATH}/content/${STEAM_APP_ID}/${MOD_DIR}/meta.cpp" ]]; then
echo -ne "\033[37;1;41mNo 'meta.cpp' file found for MOD ${MOD_NAME}.\033[0m\n"
continue
else
# Compare update time
if [[ "${UPDATE_TIME}" -gt "${CREATION_TIME}" ]]; then
# Construct the list of MODs to update
MOD_UP_CMD+=("+workshop_download_item ${STEAM_APP_ID} ${MOD_ID}")
LIST_TO_UPDATE+=("${MOD_NAME}")
MOD_ID_LIST+=("${MOD_ID}")
echo -en "\033[37;1;42mMod \e[34m${MOD_NAME}\e[37;1;42m ID ${MOD_ID} can be updated.\033[0m\n\n"
continue
else
echo -en "MOD \e[1;32m${MOD_NAME}\e[0m ID ${MOD_ID} is already up to date!\n\n"
continue
fi
fi
done
export LIST_TO_UPDATE
export MOD_UP_CMD
}
# Download MOD by its ID
download_mod() {
if [[ $- =~ x ]]; then debug=1; set +x; fi
until "${STEAM_CMD_PATH}" +login "${STEAM_LOGIN}" "${STEAM_PASS}" "${MOD_UP_CMD}" validate +quit; do
echo -n "\nRetrying after error while downloading.\n"
sleep 3
done
[[ $debug == 1 ]] && set -x
if [[ ! -d "${WORKSHOP_PATH}"/content/"${STEAM_APP_ID}"/"${MOD_ID}" ]]; then
echo "NOT Downloaded! Exiting!"
exit 54
fi
echo -e "\n"
}
# Update single MOD
update_mod() {
check_mod_id
FULL_PATH="${WORKSHOP_PATH}/content/${STEAM_APP_ID}/${MOD_ID}"
rm -rf "${FULL_PATH}"
download_mod
fixuppercase
}
# Ask for confirmation
simplequery() {
SELECT=false
while ! "${SELECT}"; do
read -e -p "Enter [y|Y]-Yes, [n|N]-No or [quit]-to abort: " ANSWER
case "${ANSWER}" in
y | Y )
SELECT=true
;;
n | N )
SELECT=true
exit 1
;;
quit )
echo -ne "\033[37;1;41mWarning!\033[0m Some important changes wasn't made. This could or could not to cause the different problems.\n"
exit 58
;;
* )
echo -ne "Wrong selection! Try again or type 'quit' to interrupt process.\n"
;;
esac
done
}
# Update all MODs in a batch mode
update_all() {
TMP_IDS=("${MOD_ID_LIST[*]}")
for MOD_ID in ${TMP_IDS[*]}; do
check_mod_id
FULL_PATH="${WORKSHOP_PATH}/content/${STEAM_APP_ID}/${MOD_ID}"
MOD_NAME="$(get_mod_name)"
backupwkshpdir "${MOD_ID}"
MOD_UP_CMD="+workshop_download_item ${STEAM_APP_ID} ${MOD_ID} "
download_mod
fixuppercase
fixappid
unset MOD_ID
unset MOD_NAME
done
}
# Send notification
notify_send() {
if [[ -n "${DO_NOTIFY}" ]]; then
"${NOTIFICATION_SCRIPT}" "${MSG_SEND}"
else
echo -en "${MSG}" | tr -d '*'
fi
}
# Check CLI options
DO_CHECK=
DO_UPDATE=
DO_NOTIFY=
while getopts "ucnh" opt; do
case $opt in
c)
DO_CHECK=1
;;
u)
DO_UPDATE=1
;;
n)
DO_NOTIFY=1
;;
h)
usage
exit 0
;;
*)
echo "Wrong parameter!"
exit 1
;;
esac
done
if [[ -n "${DO_CHECK}" && -n "${DO_UPDATE}" ]]; then
echo "Error: Only one of check or update may be supplied" >&2
exit 1
elif [[ -z "${DO_CHECK}" && -z "${DO_UPDATE}" && -n "${DO_NOTIFY}" ]]; then
echo "Error: -n option can not be used separate of others!"
exit 1
elif [[ -n "${DO_CHECK}" ]]; then
checkupdates
if [[ -n "${LIST_TO_UPDATE[*]}" ]]; then
MSG="Can be updated:\n**- $(echo ${LIST_TO_UPDATE[*]} | sed 's/ /\\n- /g')**\nPlease, proceed manually."
MSG_SEND=":exclamation: ${MSG}"
notify_send
exit 0
else
exit 0
fi
elif [[ -n "${DO_UPDATE}" ]]; then
checkupdates
# Print MODs which could be updated
if [[ -n "${LIST_TO_UPDATE[*]}" ]]; then
if update_all; then
MSG="These Mod(s) has been updated:\n**- $(echo ${LIST_TO_UPDATE[*]} | sed 's/ /\\n- /g')**"
MSG_SEND=":white_check_mark::exclamation: ${MSG}"
notify_send
exit 0
else
MSG="**WARNING!**\n Something went wrong during update! **Check it!**"
notify_send
exit 1
fi
else
exit 0
fi
fi
## End of a functions block
# Ask user for action
echo -ne "After selecting to 'Update' -> 'Single' - you will see the list of installed MODs.\n\033[37;1;41mPlease, copy the needed \"publishedid\" before exiting from the list.\nIt will be unavailabe after exit.\nTo get the list again - you'll need to restart the script\033[0m\n"
echo -ne "What do you want to do? \n [u|U] - Update MOD \n [c|C] - Check all MODs for updates\n [d|D] - Download MOD?\n [q|Q] - Quit\n]"
echo -ne "Any other selection will cause script to stop.\n"
read -e -p "Make selection please: " ACTION
case "${ACTION}" in
## Actions section
# Check for updates, do not update
c | C )
checkupdates
# Print MODs which could be updated
if [[ -n "${LIST_TO_UPDATE[*]}" ]]; then
echo -ne "Mods ${LIST_TO_UPDATE[*]} can be updated. Please, proceed manually."
else
echo "All MODs are up to date. Exiting."
exit 0
fi
;;
# Proceed update after check
u | U )
clear
# Ask user to select update mode
read -e -p "How do you want to update? [b|B]-Batch or [s|S]-Single MOD? " UPD_M
case "${UPD_M}" in
# Batch update
b | B )
# Check updates for installed MODs
checkupdates
# Print MODs which could be updated
if [[ -n "${LIST_TO_UPDATE[*]}" ]]; then
update_all
echo -ne "These Mods has been updated:\n ${LIST_TO_UPDATE[*]}"
else
echo "All MODs are up to date. Exiting."
exit 0
fi
;;
# Update a single MOD
s | S )
countdown
echo -ne "$(grep -hr -A1 'publishedid' --include=meta.cpp -E --exclude-dir='*_old_*' ${WORKSHOP_PATH}/content/${STEAM_APP_ID})\n" | less
echo -ne "Please, specify MOD's ID.\n"
# Ask user to enter a MOD's name to update
echo -ne "You have installed a MODs listed above. Please, enter the MODs ID to update:\n"
unset MOD_ID
unset FULL_PATH
read -er MOD_ID
# Check syntax
DIGITS="^[0-9]+$"
if ! [[ "${MOD_ID}" =~ ${DIGITS} ]] && [[ -z "${MOD_ID}" ]]; then
echo -ne "Wrong MOD's ID! Exiting!\n"
exit 54
else
# Update the single selected MOD
MOD_ID="${MOD_ID%$'\r'}"
MODS_PATH="${WORKSHOP_PATH}/content/${STEAM_APP_ID}/${MOD_ID}"
MOD_NAME=$(get_mod_name)
echo "Starting to update MOD ${MOD_NAME}..."
if [[ "${MOD_ID}" = "0" ]]; then
echo -ne "MOD application ID is not configured for mod ${MOD_NAME} in file ${FULL_PATH}/meta.cpp \n"
echo -ne "Find it by the MODs name in a Steam Workshop and update in MODs 'meta.cpp' file or use Download option to get MOD by it's ID. Exiting.\n"
exit 56
elif [[ -z "${MOD_ID}" ]]; then
echo -ne "\033[37;1;41mNo 'meta.cpp' file found for MOD ${MOD_NAME}.\033[0m\n"
true
fi
URL="${STEAM_CHANGELOG_URL}/${MOD_ID}"
URL="${URL%$'\r'}"
get_wkshp_date
UPDATE_TIME="${WKSHP_UP_ST}"
CREATION_TIME=$(date --date="$(stat ${MODS_PATH} | sed '6q;d' | cut -d" " -f2-3)" +%s ) #Fix for MC syntax hilighting #"
if [[ ${UPDATE_TIME} -gt ${CREATION_TIME} ]]; then
MOD_UP_CMD=+"workshop_download_item ${STEAM_APP_ID} ${MOD_ID}"
echo "${MOD_UP_CMD}"
backupwkshpdir
if update_mod; then
echo "MOD's update is successfully downloaded to ${FULL_PATH}"
fixappid "${FULL_PATH}"
fi
else
echo -ne "\033[37;1;42mMOD ${MOD_NAME} is already up to date.\033[0m \n"
exit 0
fi
fi
;;
* )
echo -ne "Wrong selection! Exiting.\n"
exit 53
;;
esac
;;
# Download new MOD
d | D )
echo ""
# Ask user to enter a MOD Steam AppID
read -e -p "Please, enter an Application ID in a Steam WorkShop to download: " MOD_ID
if [[ -d "${WORKSHOP_PATH}"/content/"${STEAM_APP_ID}"/"${MOD_ID}" ]]; then
echo "Already present! Use UPDATE action. Exiting!"
exit 1
fi
echo "Application ID IS: ${MOD_ID}\n"
echo "Starting to download MOD ID ${MOD_ID}..."
MODS_PATH=${FULL_PATH}
MOD_UP_CMD=+"workshop_download_item ${STEAM_APP_ID} ${MOD_ID}"
echo "${MOD_UP_CMD}"
download_mod
fixuppercase
fixappid
;;
q | Q )
echo "Quit"
exit 0
;;
* )
echo -ne "Wrong selection! Exiting!\n"
exit 53
;;
esac
echo ""
exit 0
|
<filename>src/test/java/com/cathive/fx/guice/thread/FxApplicationThreadMethodInterceptorTest.java
/*
* Copyright (C) 2012-2013 The Cat Hive Developers.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.cathive.fx.guice.thread;
import static org.testng.Assert.assertNotNull;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import com.cathive.fx.guice.FxApplicationThread;
import com.cathive.fx.guice.example.ExampleFxHelper;
import com.cathive.fx.guice.fxml.FXMLLoadingModule;
import com.google.inject.AbstractModule;
import com.google.inject.Guice;
import com.google.inject.Injector;
import com.google.inject.matcher.Matchers;
/**
* @author <NAME>
*/
@Test(singleThreaded = true)
public class FxApplicationThreadMethodInterceptorTest {
private Injector injector;
@BeforeClass
protected void initialize() {
injector = Guice.createInjector(new FXMLLoadingModule(), new AbstractModule() {
@Override
protected void configure() {
bindInterceptor(
Matchers.any(),
Matchers.annotatedWith(FxApplicationThread.class),
new FxApplicationThreadMethodInterceptor());
}
});
}
@Test(description = "Make sure that methods annotated with @FxApplicationThread can be identified correctly during runtime!")
public void testAnnotationRetention() throws NoSuchMethodException, SecurityException {
final FxApplicationThread annotation = ExampleFxHelper.class.getMethod("methodAnnotatedWithFxAppThread1").getAnnotation(FxApplicationThread.class);
assertNotNull(annotation, "No @FxApplicationThread annotation found. This probably means, that someone changed the 'Retention' of the class from 'RetentionPolicy.RUNTIME' to something useless!");
}
@Test
public void testInterceptorWithNonAnnotatedMethods() {
injector.getInstance(ExampleFxHelper.class).methodNotAnnotatedWithFxAppThread();
}
@Test(dependsOnMethods = "testAnnotationRetention", expectedExceptions = RuntimeException.class)
public void testInterceptorWithAnnotatedMethodsWithReturnType() {
injector.getInstance(ExampleFxHelper.class).methodAnnotatedFxAppThreadWithReturnType();
}
@Test(dependsOnMethods = "testAnnotationRetention", expectedExceptions = RuntimeException.class)
public void testInterceptorWithAnnotatedMethodsThatThrowsException() throws Exception {
injector.getInstance(ExampleFxHelper.class).methodAnnotatedWithFxAppThreadThatThrowsException();
}
@Test(dependsOnMethods = "testAnnotationRetention")
public void testCorrectlyAnnotatedMethods() throws Exception {
// TODO How can we correctly test this?
}
}
|
<gh_stars>0
// Copyright 2020 Self Group Ltd. All Rights Reserved.
import SelfSDK from '../../src/self-sdk'
import { exit } from 'process';
async function request(appID: string, appSecret: string, selfID: string) {
// const SelfSDK = require("self-sdk");
let opts = {'logLevel': 'debug'}
if (process.env["SELF_ENV"] != "") {
opts['env'] = process.env["SELF_ENV"]
}
let storageFolder = __dirname.split("/").slice(0,-1).join("/") + "/.self_storage"
const sdk = await SelfSDK.build( appID, appSecret, "random", storageFolder, opts);
sdk.logger.info(`sending fact request through an intermediary to ${selfID}`)
sdk.logger.info(`waiting for user input`)
try {
let res = await sdk.facts().requestViaIntermediary(selfID, [{
fact: 'phone_number',
operator: '==',
sources: ['user_specified'],
expected_value: '+44111222333'
}])
if(!res) {
sdk.logger.warn(`fact request has timed out`)
} else if(res.status === "unauthorized") {
sdk.logger.warn("you are unauthorized to run this action")
}else if (res.status === 'accepted') {
sdk.logger.info("your assertion is....")
sdk.logger.info(res.attestationValuesFor('phone_number')[0])
} else {
sdk.logger.info("your request has been rejected")
}
} catch (error) {
sdk.logger.error(error.toString())
}
sdk.close()
exit();
}
async function main() {
let appID = process.env["SELF_APP_ID"]
let appSecret = process.env["SELF_APP_SECRET"]
let selfID = process.env["SELF_USER_ID"]
await request(appID, appSecret, selfID);
}
main();
|
<gh_stars>0
package level_three;
//package com.google.challenges;
import java.util.HashMap;
import java.util.Map;
//import org.apache.commons.lang3.time.StopWatch;
/**
*
* @author Peter
* With her LAMBCHOP doomsday device finished, Commander Lambda is preparing for her debut on the galactic stage - but in order to make a
* grand entrance, she needs a grand staircase! As her personal assistant, you've been tasked with figuring out how to build the best
* staircase EVER.
*
* Lambda has given you an overview of the types of bricks available, plus a budget. You can buy different amounts of the different types
* of bricks (for example, 3 little pink bricks, or 5 blue lace bricks). Commander Lambda wants to know how many different types of staircases
* can be built with each amount of bricks, so she can pick the one with the most options.
*
* Each type of staircase should consist of 2 or more steps. No two steps are allowed to be at the same height - each step must be lower
* than the previous one. All steps must contain at least one brick. A step's height is classified as the total amount of bricks that make
* up that step.
*
* For example, when N = 3, you have only 1 choice of how to build the staircase, with the first step having a height of 2 and the second
* step having a height of 1: (# indicates a brick)
*
* #
* ##
* 21
*
* When N = 4, you still only have 1 staircase choice:
* #
* #
* ##
* 31
*
* But when N = 5, there are two ways you can build a staircase from the given bricks. The two staircases can have heights (4, 1) or (3, 2),
* as shown below:
* #
* #
* #
* ##
* 41
*
* #
* ##
* ##
* 32
*
* Write a function called answer(n) that takes a positive integer n and returns the number of different staircases that can be built from
* exactly n bricks. n will always be at least 3 (so you can have a staircase at all), but no more than 200, because Commander Lambda's not
* made of money!
*
*/
public class the_grandest_staircase_of_them_all {
private static Map<Integer, Map<Integer, Integer>> mem_map;
public static int answer(int n) {
mem_map = new HashMap<Integer, Map<Integer, Integer>>();
return next_val(n, 0);
}
public static int next_val(int n, int height) {
if (n == 0) {
return 1;
}
else if ((n - height) <= 0) return 0;
else {
Map<Integer, Integer> node_map;
if (mem_map.containsKey(n)) {
node_map = mem_map.get(n);
if (node_map.containsKey(height)) return node_map.get(height);
int chain_val = 0;
if (height == 0) {
for (int next_height = height + 1; next_height < n; next_height++) {
chain_val = chain_val + next_val(n - next_height, next_height);
mem_map.get(n).put(height, chain_val);
}
} else {
for (int next_height = height + 1; next_height <= n; next_height++) {
chain_val = chain_val + next_val(n - next_height, next_height);
mem_map.get(n).put(height, chain_val);
}
}
return chain_val;
} else {
node_map = new HashMap<Integer, Integer>();
int chain_val = 0;
if (height == 0) {
for (int next_height = height + 1; next_height < n; next_height++) {
chain_val = chain_val + next_val(n - next_height, next_height);
node_map.put(height, chain_val);
mem_map.put(n, node_map);
}
} else {
for (int next_height = height + 1; next_height <= n; next_height++) {
chain_val = chain_val + next_val(n - next_height, next_height);
node_map.put(height, chain_val);
mem_map.put(n, node_map);
}
}
return chain_val;
}
}
}
public static void main(String[] args) {
// StopWatch timer = new StopWatch();
// timer.start();
// 1
System.out.println(answer(3));
// 1
System.out.println(answer(4));
// 2
System.out.println(answer(5));
// 2
System.out.println(answer(10));
// 487067745
// Answer with Memoization Execution time: 0.052163291 seconds.
// Answer without Memoization Execution time: 17.103186586 seconds.
System.out.println(answer(200));
// timer.stop();
// System.out.println("Execution time: " + timer.getNanoTime() / (Math.pow(10, 9)) + " seconds.");
}
}
|
<filename>src/main/java/stincmale/server/reqres/spring/http/package-info.java<gh_stars>0
/**
* Contains <a href="https://spring.io/projects/spring-framework">Spring</a>-specific functionality useful for request/response HTTP
* implementations of {@link stincmale.server.Server}.
*/
@javax.annotation.ParametersAreNonnullByDefault
package stincmale.server.reqres.spring.http;
|
#include <iostream>
#include <vector>
struct TreeNode
{
int val;
TreeNode *left;
TreeNode *right;
TreeNode(int x) : val(x), left(NULL), right(NULL) {}
};
class Solution
{
public:
bool hasPathSum(TreeNode* root, int sum)
{
if (root == nullptr)
return false;
return hasPathSumHelper(root, sum, 0);
}
private:
bool hasPathSumHelper(TreeNode* node, int target, int sum)
{
sum += node->val;
if (node->left == nullptr && node->right == nullptr)
return sum == target;
return ((node->left && hasPathSumHelper(node->left, target, sum))
|| (node->right && hasPathSumHelper(node->right, target, sum)));
}
}; |
<reponame>artsy/Mitosis<filename>source/facebook/types.js
// @flow
/** A structre used for triggering quick replies */
export interface QuickReply {
content_type: "text",
title: string,
payload?: string
}
/** Button for attaching to images */
export interface FBButton {
type: "web_url" | "postback",
title: string,
url?: ?string,
payload?: ?string
}
export interface GenericElement {
title: string,
subtitle?: string,
item_url: string,
image_url: string,
buttons?: Array<FBButton>
}
|
<reponame>davidyu62/egovframe-runtime<filename>Batch/org.egovframe.rte.bat.core/src/test/java/org/egovframe/brte/sample/example/support/EgovStagingItemProcessor.java
/*
* Copyright 2006-2007 the original author or authors. *
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.egovframe.brte.sample.example.support;
import java.util.HashMap;
import java.util.Map;
import javax.sql.DataSource;
import org.springframework.batch.item.ItemProcessor;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.dao.OptimisticLockingFailureException;
import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate;
import org.springframework.util.Assert;
/**
* BATCH_STAGING에 processed 값 업데이트
*
* @param <T> item type
*
* @see EgovStagingItemReader
* @see EgovStagingItemWriter
* @see EgovProcessIndicatorItemWrapper
*
*
* == 개정이력(Modification Information) ==
*
* 수정일 수정자 수정내용
* ------- -------- ---------------------------
* 2017.07.06 장동한 SimpleJdbcTemplate(Deprecated) > NamedParameterJdbcTemplate 변경
*
*/
@SuppressWarnings("deprecation")
public class EgovStagingItemProcessor<T> implements ItemProcessor<EgovProcessIndicatorItemWrapper<T>, T>, InitializingBean {
//DB 사용을 위한 NamedParameterJdbcTemplate
private NamedParameterJdbcTemplate namedParameterJdbcTemplate;
/**
* DataSource 세팅
* @param dataSource
*/
public void setDataSource(DataSource dataSource) {
this.namedParameterJdbcTemplate = new NamedParameterJdbcTemplate(dataSource);
}
/**
* 설정확인
*/
@Override
public void afterPropertiesSet() throws Exception {
Assert.notNull(namedParameterJdbcTemplate, "Either jdbcTemplate or dataSource must be set");
}
/**
* BATCH_STAGING에 processed 값 업데이트
*
*/
@Override
public T process(EgovProcessIndicatorItemWrapper<T> wrapper) throws Exception {
Map<String,Object> namedParameters = new HashMap<String,Object>();
namedParameters.put("processed1", EgovStagingItemWriter.DONE);
namedParameters.put("id", wrapper.getId());
namedParameters.put("processed2", EgovStagingItemWriter.NEW);
String query = "UPDATE BATCH_STAGING SET PROCESSED=:processed1 WHERE ID=:id AND PROCESSED=:processed2";
int count = namedParameterJdbcTemplate.update(query, namedParameters);
if (count != 1) {
throw new OptimisticLockingFailureException("The staging record with ID=" + wrapper.getId() + " was updated concurrently when trying to mark as complete (updated "
+ count + " records.");
}
return wrapper.getItem();
}
}
|
#!/bin/bash
# elasticsearch 使用相关命令
# 启动 es 服务
sudo systemctl restart elasticsearch.service
# 测试 es 启动是否成功,es 启动需要时间,大概 10s ~ 1min
curl 127.0.0.1:9200
# 停止 es 服务
sudo systemctl stop elasticsearch.service
# 关闭 es 开机启动
sudo systemctl disable elasticsearch.service
# 开启索引,接口地址 /{index_name}/_open
curl -XPOST http://localhost:9200/products/_open?pretty
# 关闭索引,接口地址 /{index_name}/_close
curl -XPOST http://localhost:9200/products/_close?pretty
# 查看 index 中一个 type 有多少个文档,接口地址 /{index_name}/{type_name}/_count
curl http://localhost:9200/products/_doc/_count?pretty
# 创建一个 index,接口地址 /{index_name}
curl -XPUT http://localhost:9200/test_index
# 查看一个索引的详情,接口地址 /{index_name}
curl http://localhost:9200/test_index
curl http://localhost:9200/test_index?pretty # 格式化 json 信息展示结果
# 查看一个别名所指向的索引,接口地址 /_alias/{index_alias}?pretty
curl http://localhost:9200/_alias/products?pretty
# 在 index 中创建 type,接口地址 /{index_name}/_mapping/{type_name}
curl -H'Content-Type: application/json' -XPUT http://localhost:9200/test_index/_mapping/_doc?pretty -d'{
"properties": {
"title": { "type": "text", "analyzer": "ik_smart" },
"description": { "type": "text", "analyzer": "ik_smart" },
"price": { "type": "scaled_float", "scaling_factor": 100 }
}
}'
# 往 index 里面插入文档,接口地址 /{index_name}/{type_name}/{id}
# es 与 mysql 的不同点,id 不是自增,而是需要自己指定
curl -H'Content-Type: application/json' -XPUT http://localhost:9200/test_index/_doc/1?pretty -d'{
"title": "iPhone X",
"description": "新品到货",
"price": 8848
}'
# 读取文档数据,接口地址 /{index_name}/{type_name}/{id}
curl http://localhost:9200/test_index/_doc/1?pretty
# 搜索,接口地址 /{index_name}/{type_name}/_search
curl -XPOST -H'Content-Type:application/json' http://localhost:9200/test_index/_doc/_search?pretty -d'
{
"query" : { "match" : { "description" : "新品" }}
}'
# PHP elasticsearch/elasticsearch 使用
# app('es')->get(['index' => 'test_index','type' => '_doc', 'id' => 1]) 获取某一个商品属性
# app('es')->index(['id' => $arr['id'], 'index' => 'products', 'type' => '_doc', 'body' => $arr]); 将商品属性写入 es
# app('es')->search($params); 进行查询
# 词项查询
#$params = [
# 'index' => 'products',
# 'type' => '_doc',
# 'body' => [
# 'query' => [
# 'bool' => [
# 'filter' => [
# ['term' => ['on_sale' => true]],
# ],
# ],
# ],
# ],
#];
#app('es')->search($params);
# 分页查询
#$params = [
# 'index' => 'products',
# 'type' => '_doc',
# 'body' => [
# 'from' => 0,
# 'size' => 5,
# 'query' => [
# 'bool' => [
# 'filter' => [
# ['term' => ['on_sale' => true]],
# ],
# ],
# ],
# ],
#];
#$results = app('es')->search($params);
#count($results['hits']['hits']);
#$results['hits']['total']
# 排序
#$params = [
# 'index' => 'products',
# 'type' => '_doc',
# 'body' => [
# 'from' => 0,
# 'size' => 5,
# 'query' => [
# 'bool' => [
# 'filter' => [
# ['term' => ['on_sale' => true]],
# ],
# ],
# ],
# 'sort' => [
# ['price' => 'desc']
# ],
# ],
#];
#$results = app('es')->search($params);
#collect($results['hits']['hits'])->pluck('_source.price');
# 多字段匹配查询
#$params = [
# 'index' => 'products',
# 'type' => '_doc',
# 'body' => [
# 'query' => [
# 'bool' => [
# 'filter' => [
# ['term' => ['on_sale' => true]],
# ],
# 'must' => [
# [
# 'multi_match' => [
# 'query' => 'iPhone',
# 'fields' => [
# 'title^3',
# 'long_title^2',
# 'description',
# ],
# ],
# ],
# ],
# ],
# ],
# ],
#];
#app('es')->search($params);
# 多字段匹配查询支持 Nested(嵌套) 对象
#$params = [
# 'index' => 'products',
# 'type' => '_doc',
# 'body' => [
# 'query' => [
# 'bool' => [
# 'filter' => [
# ['term' => ['on_sale' => true]],
# ],
# 'must' => [
# [
# 'multi_match' => [
# 'query' => '256G',
# 'fields' => [
# 'skus.title',
# 'skus.description',
# 'properties.value',
# ],
# ],
# ],
# ],
# ],
# ],
# ],
#];
#app('es')->search($params);
|
# SPDX-FileCopyrightText: 2017 <NAME> for Adafruit Industries
#
# SPDX-License-Identifier: MIT
"""
`adafruit_max9744`
====================================================
CircuitPython module for the MAX9744 20W class D amplifier. See
examples/simpletest.py for a demo of the usage.
* Author(s): <NAME>
Implementation Notes
--------------------
**Hardware:**
* Adafruit `MAX9744 Stereo 20W Class D Audio Amplifier
<https://www.adafruit.com/product/1752>`_ (Product ID: 1752)
**Software and Dependencies:**
* Adafruit CircuitPython firmware for the ESP8622 and M0-based boards:
https://github.com/adafruit/circuitpython/releases
"""
from micropython import const
__version__ = "0.0.0-auto.0"
__repo__ = "https://github.com/adafruit/Adafruit_CircuitPython_MAX9744.git"
# Internal constants:
_MAX9744_DEFAULT_ADDRESS = const(0b01001011)
_MAX9744_COMMAND_VOLUME = const(0b00000000)
_MAX9744_COMMAND_FILTERLESS = const(0b01000000)
_MAX9744_COMMAND_CLASSIC_PWM = const(0b01000001)
_MAX9744_COMMAND_VOLUME_UP = const(0b11000100)
_MAX9744_COMMAND_VOLUME_DOWN = const(0b11000101)
class MAX9744:
"""MAX9744 20 watt class D amplifier.
:param i2c: The I2C bus for the device.
:param address: (Optional) The address of the device if it has been overridden from the
default with the AD1, AD2 pins.
"""
# Global buffer for writing data. This saves memory use and prevents
# heap fragmentation. However this is not thread-safe or re-entrant by
# design!
_BUFFER = bytearray(1)
def __init__(self, i2c, *, address=_MAX9744_DEFAULT_ADDRESS):
# This device doesn't use registers and instead just accepts a single
# command string over I2C. As a result we don't use bus device or
# other abstractions and just talk raw I2C protocol.
self._i2c = i2c
self._address = address
def _write(self, val):
# Perform a write to update the amplifier state.
try:
# Make sure bus is locked before write.
while not self._i2c.try_lock():
pass
# Build bytes to send to device with updated value.
self._BUFFER[0] = val & 0xFF
self._i2c.writeto(self._address, self._BUFFER)
finally:
# Ensure bus is always unlocked.
self._i2c.unlock()
def _set_volume(self, volume):
# Set the volume to the specified level (0-63).
assert 0 <= volume <= 63
self._write(_MAX9744_COMMAND_VOLUME | (volume & 0x3F))
# pylint: disable=line-too-long
volume = property(
None,
_set_volume,
"Set the volume of the amplifier. Specify a value from 0-63 where 0 is muted/off and 63 is maximum volume.",
)
# pylint: enable=line-too-long
def volume_up(self):
"""Increase the volume by one level."""
self._write(_MAX9744_COMMAND_VOLUME_UP)
def volume_down(self):
"""Decrease the volume by one level."""
self._write(_MAX9744_COMMAND_VOLUME_DOWN)
|
# DOTFILES_SHELL="$(dirname "$(readlink -f -- "$0")")"
DOTFILES_SHELL="$(dirname "$(readlink "$0")")"
DOTFILES="$(dirname $(dirname "$DOTFILES_SHELL"))"
if [ "${SAVED_PATH:-==Unset==}" = "==Unset==" ]; then
export SAVED_PATH="${PATH}"
else
export PATH="${SAVED_PATH}"
fi
# env # # # # # # # # # # # # # # # # #
if [ -f $DOTFILES/.env ]; then
source $DOTFILES/.env
fi
if [ -z "$CODE" ]; then CODE=$HOME/Code; fi
if [ -z "$CURRENT" ]; then CURRENT=$CODE; fi
if [ -z "$EMAIL" ]; then EMAIL=katie.k7r@gmail.com; fi
# dev # # # # # # # # # # # # # # # # #
for f in $DOTFILES_SHELL/dev/*; do
[ -f $f ] && source $f
done
# aliases # # # # # # # # # # # # # # #
if [ -f $DOTFILES_SHELL/_aliases.sh ]; then
source $DOTFILES_SHELL/_aliases.sh
fi
# paths # # # # # # # # # # # # # # # #
if [ -f $DOTFILES_SHELL/_paths.sh ]; then
source $DOTFILES_SHELL/_paths.sh
fi
# &c # # # # # # # # # # # # # # # # #
|
<gh_stars>1-10
/*
* Copyright 2017 agido GmbH
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.agido.idea.settings.config;
import com.intellij.util.xmlb.annotations.AbstractCollection;
import com.intellij.util.xmlb.annotations.Property;
import java.util.ArrayList;
import java.util.List;
public class SettingsEnforcerState {
@Property(surroundWithTag = false)
@AbstractCollection(surroundWithTag = false)
public List<ConfigGroup> groups = new ArrayList<>();
}
|
#!/bin/ash
apk add curl --no-cache
python -m SimpleHTTPServer 80 &
PID=$!
certbot certonly --webroot -w ./ -n --agree-tos --email ${EMAIL} --no-self-upgrade -d ${DOMAINS}
kill $PID
CERTPATH=/etc/letsencrypt/live/
ls $CERTPATH || exit 1
cat ./secret-template.json | \
sed "s/NAMESPACE/${NAMESPACE}/" | \
sed "s/NAME/${SECRET}/" | \
sed "s/TLSCERT/$(cat ${CERTPATH}/cert.pem | base64 | tr -d '\n')/" | \
sed "s/TLSKEY/$(cat ${CERTPATH}/privkey.pem | base64 | tr -d '\n')/" | \
sed "s/TLSPEM/$(cat ${CERTPATH}/fullchain.pem | base64 | tr -d '\n')/" \
> ./secret.json
ls ./secret.json || exit 1
# update secret
curl -v --cacert /var/run/secrets/kubernetes.io/serviceaccount/ca.crt \
-H "Authorization: Bearer $(cat /var/run/secrets/kubernetes.io/serviceaccount/token)" \
-H "Accept: application/json, */*" \
-H "Content-Type: application/strategic-merge-patch+json" \
-d @./secret.json \
https://kubernetes/api/v1/namespaces/${NAMESPACE}/secrets/${SECRET}
cat /deployment-patch-template.json | \
sed "s/TLSUPDATED/$(date)/" | \
sed "s/NAMESPACE/${NAMESPACE}/" | \
sed "s/NAME/${DEPLOYMENT}/" \
> /deployment-patch.json
ls /deployment-patch.json || exit 1
# update pod spec on ingress deployment to trigger redeploy
# curl -v --cacert /var/run/secrets/kubernetes.io/serviceaccount/ca.crt -H "Authorization: Bearer $(cat /var/run/secrets/kubernetes.io/serviceaccount/token)" -k -v -XPATCH -H "Accept: application/json, */*" -H "Content-Type: application/strategic-merge-patch+json" -d @/deployment-patch.json https://kubernetes/apis/extensions/v1beta1/namespaces/${NAMESPACE}/deployments/${DEPLOYMENT}
|
#!/bin/bash -e
if [ -f /opt/rucio/etc/rucio.cfg ]; then
echo "rucio.cfg already mounted."
else
echo "rucio.cfg not found. will generate one."
j2 /tmp/rucio.cfg.j2 | sed '/^\s*$/d' > /opt/rucio/etc/rucio.cfg
fi
echo "=================== /opt/rucio/etc/rucio.cfg ============================"
cat /opt/rucio/etc/rucio.cfg
echo ""
j2 /tmp/rucio.conf.j2 | sed '/^\s*$/d' > /etc/httpd/conf.d/rucio.conf
echo "=================== /etc/httpd/conf.d/rucio.conf ========================"
cat /etc/httpd/conf.d/rucio.conf
echo ""
httpd -D FOREGROUND
|
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/512+0+512-N/model --tokenizer_name model-configs/1024-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/512+0+512-N/512+0+512-FW-first-256 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function remove_all_but_function_words_first_half_quarter --eval_function penultimate_quarter_eval |
import { Extent3DDict, ImageCopyTexture, Origin3DDict } from "../webgpu";
export class TextureUtils {
static destination = new ImageCopyTexture();
static source = new ImageCopyTexture();
static copySize = new Extent3DDict();
// n -> 1
static buildTextureArray(
textures: GPUTexture[],
length: number,
width: number,
height: number,
textureArray: GPUTexture,
commandEncoder: GPUCommandEncoder
): void {
this.destination.origin = new Origin3DDict();
this.destination.origin.x = 0;
this.destination.origin.y = 0;
this.destination.texture = textureArray;
this.copySize.width = width;
this.copySize.height = height;
this.copySize.depthOrArrayLayers = 1;
for (let i = 0; i < length; i++) {
this.destination.origin.z = i;
this.source.texture = textures[i];
commandEncoder.copyTextureToTexture(this.source, this.destination, this.copySize);
}
}
// 6n -> 1
static buildCubeTextureArray(
textures: GPUTexture[],
length: number,
width: number,
height: number,
textureArray: GPUTexture,
commandEncoder: GPUCommandEncoder
): void {
this.destination.origin = new Origin3DDict();
this.destination.origin.x = 0;
this.destination.origin.y = 0;
this.destination.texture = textureArray;
this.copySize.width = width;
this.copySize.height = height;
this.copySize.depthOrArrayLayers = 6;
for (let i = 0; i < length; i++) {
this.destination.origin.z = 6 * i;
this.source.texture = textures[i];
commandEncoder.copyTextureToTexture(this.source, this.destination, this.copySize);
}
}
}
|
#!/bin/bash
#
cd data/
curl -O https://attackevals.mitre-engenuity.org/downloadable_JSON/AhnLab_Results.json
curl -O https://attackevals.mitre-engenuity.org/downloadable_JSON/Bitdefender_Results.json
curl -O https://attackevals.mitre-engenuity.org/downloadable_JSON/CheckPoint_Results.json
curl -O https://attackevals.mitre-engenuity.org/downloadable_JSON/Cisco_Results.json
curl -O https://attackevals.mitre-engenuity.org/downloadable_JSON/CrowdStrike_Results.json
curl -O https://attackevals.mitre-engenuity.org/downloadable_JSON/CyCraft_Results.json
curl -O https://attackevals.mitre-engenuity.org/downloadable_JSON/Cybereason_Results.json
curl -O https://attackevals.mitre-engenuity.org/downloadable_JSON/Cylance_Results.json
curl -O https://attackevals.mitre-engenuity.org/downloadable_JSON/Cynet_Results.json
curl -O https://attackevals.mitre-engenuity.org/downloadable_JSON/ESET_Results.json
curl -O https://attackevals.mitre-engenuity.org/downloadable_JSON/Elastic_Results.json
curl -O https://attackevals.mitre-engenuity.org/downloadable_JSON/F-Secure_Results.json
curl -O https://attackevals.mitre-engenuity.org/downloadable_JSON/Fidelis_Results.json
curl -O https://attackevals.mitre-engenuity.org/downloadable_JSON/FireEye_Results.json
curl -O https://attackevals.mitre-engenuity.org/downloadable_JSON/Fortinet_Results.json
curl -O https://attackevals.mitre-engenuity.org/downloadable_JSON/GoSecure_Results.json
curl -O https://attackevals.mitre-engenuity.org/downloadable_JSON/Malwarebytes_Results.json
curl -O https://attackevals.mitre-engenuity.org/downloadable_JSON/McAfee_Results.json
curl -O https://attackevals.mitre-engenuity.org/downloadable_JSON/MicroFocus_Results.json
curl -O https://attackevals.mitre-engenuity.org/downloadable_JSON/Microsoft_Results.json
curl -O https://attackevals.mitre-engenuity.org/downloadable_JSON/OpenText_Results.json
curl -O https://attackevals.mitre-engenuity.org/downloadable_JSON/PaloAltoNetworks_Results.json
curl -O https://attackevals.mitre-engenuity.org/downloadable_JSON/ReaQta_Results.json
curl -O https://attackevals.mitre-engenuity.org/downloadable_JSON/SentinelOne_Results.json
curl -O https://attackevals.mitre-engenuity.org/downloadable_JSON/Sophos_Results.json
curl -O https://attackevals.mitre-engenuity.org/downloadable_JSON/Symantec_Results.json
curl -O https://attackevals.mitre-engenuity.org/downloadable_JSON/TrendMicro_Results.json
curl -O https://attackevals.mitre-engenuity.org/downloadable_JSON/Uptycs_Results.json
curl -O https://attackevals.mitre-engenuity.org/downloadable_JSON/VMware_Results.json
|
#!/bin/sh
cd $(dirname $(realpath $(cygpath --unix $0)))
sh deploy.sh
WOT_DIRECTORY=/cygdrive/d/home/max/WoT
CURRENT_DIRECTORY=`pwd`
#SAMPLE_REPLAY=15.wotreplay
#SAMPLE_REPLAY=fogofwar.wotreplay
#SAMPLE_REPLAY=markers.wotreplay
SAMPLE_REPLAY=squad.wotreplay
#SAMPLE_REPLAY=tk,blowup.wotreplay
cd ${WOT_DIRECTORY}
REPLAY=${CURRENT_DIRECTORY}/../test/replays/${SAMPLE_REPLAY}
#cmd /c start ./WorldOfTanks.exe `cygpath --windows $REPLAY`
cmd /c start ./wot-xvm-proxy.exe /debug `cygpath --windows $REPLAY`
#cmd /c start ./wot-xvm-proxy.exe `cygpath --windows $REPLAY`
|
<reponame>v-greach/iot-sdks-e2e-fx<filename>ci-wrappers/c/wrapper/generated/WrapperApi.cpp
/**
* IoT SDK Device & Client REST API
* REST API definition for End-to-end testing of the Azure IoT SDKs. All SDK APIs that are tested by our E2E tests need to be defined in this file. This file takes some liberties with the API definitions. In particular, response schemas are undefined, and error responses are also undefined.
*
* OpenAPI spec version: 1.0.0
*
*
* NOTE: This class is auto generated by the swagger code generator 2.3.1.
* https://github.com/swagger-api/swagger-codegen.git
* Do not edit the class manually.
*/
#include <corvusoft/restbed/byte.hpp>
#include <corvusoft/restbed/string.hpp>
#include <corvusoft/restbed/settings.hpp>
#include <corvusoft/restbed/request.hpp>
#include "WrapperApi.h"
#include "WrapperGlue.h"
WrapperGlue wrapper_glue;
namespace io {
namespace swagger {
namespace server {
namespace api {
WrapperApi::WrapperApi() {
std::shared_ptr<WrapperApiWrapperCleanupResource> spWrapperApiWrapperCleanupResource = std::make_shared<WrapperApiWrapperCleanupResource>();
this->publish(spWrapperApiWrapperCleanupResource);
std::shared_ptr<WrapperApiWrapperMessageResource> spWrapperApiWrapperMessageResource = std::make_shared<WrapperApiWrapperMessageResource>();
this->publish(spWrapperApiWrapperMessageResource);
std::shared_ptr<WrapperApiWrapperSessionResource> spWrapperApiWrapperSessionResource = std::make_shared<WrapperApiWrapperSessionResource>();
this->publish(spWrapperApiWrapperSessionResource);
}
WrapperApi::~WrapperApi() {}
void WrapperApi::startService(int const& port) {
std::shared_ptr<restbed::Settings> settings = std::make_shared<restbed::Settings>();
settings->set_port(port);
settings->set_root("");
this->start(settings);
}
void WrapperApi::stopService() {
this->stop();
}
WrapperApiWrapperCleanupResource::WrapperApiWrapperCleanupResource()
{
this->set_path("/wrapper/cleanup/");
this->set_method_handler("PUT",
std::bind(&WrapperApiWrapperCleanupResource::PUT_method_handler, this,
std::placeholders::_1));
}
WrapperApiWrapperCleanupResource::~WrapperApiWrapperCleanupResource()
{
}
void WrapperApiWrapperCleanupResource::PUT_method_handler(const std::shared_ptr<restbed::Session> session) {
const auto request = session->get_request();
// Change the value of this variable to the appropriate response before sending the response
int status_code = 200;
/**
* Process the received information here
*/
if (status_code == 200) {
session->close(200, "OK", { {"Connection", "close"} });
return;
}
}
WrapperApiWrapperMessageResource::WrapperApiWrapperMessageResource()
{
this->set_path("/wrapper/message/");
this->set_method_handler("PUT",
std::bind(&WrapperApiWrapperMessageResource::PUT_method_handler, this,
std::placeholders::_1));
}
WrapperApiWrapperMessageResource::~WrapperApiWrapperMessageResource()
{
}
void WrapperApiWrapperMessageResource::PUT_method_handler(const std::shared_ptr<restbed::Session> session) {
const auto request = session->get_request();
int content_length = request->get_header("Content-Length", 0);
session->fetch(content_length,
[ this ]( const std::shared_ptr<restbed::Session> session, const restbed::Bytes & body )
{
const auto request = session->get_request();
std::string requestBody = restbed::String::format("%.*s\n", ( int ) body.size( ), body.data( ));
wrapper_glue.PrintMessage(requestBody.c_str());
session->close(200, "OK", { {"Connection", "close"} });
});
}
WrapperApiWrapperSessionResource::WrapperApiWrapperSessionResource()
{
this->set_path("/wrapper/session/");
this->set_method_handler("GET",
std::bind(&WrapperApiWrapperSessionResource::GET_method_handler, this,
std::placeholders::_1));
this->set_method_handler("PUT",
std::bind(&WrapperApiWrapperSessionResource::PUT_method_handler, this,
std::placeholders::_1));
}
WrapperApiWrapperSessionResource::~WrapperApiWrapperSessionResource()
{
}
void WrapperApiWrapperSessionResource::GET_method_handler(const std::shared_ptr<restbed::Session> session) {
const auto request = session->get_request();
// Change the value of this variable to the appropriate response before sending the response
int status_code = 200;
/**
* Process the received information here
*/
if (status_code == 200) {
session->close(200, "OK", { {"Connection", "close"} });
return;
}
}
void WrapperApiWrapperSessionResource::PUT_method_handler(const std::shared_ptr<restbed::Session> session) {
const auto request = session->get_request();
// Change the value of this variable to the appropriate response before sending the response
int status_code = 200;
/**
* Process the received information here
*/
if (status_code == 200) {
session->close(200, "OK", { {"Connection", "close"} });
return;
}
}
}
}
}
}
|
#!/bin/bash
set -e
source "${EJABBERD_HOME}/scripts/lib/base_config.sh"
source "${EJABBERD_HOME}/scripts/lib/config.sh"
source "${EJABBERD_HOME}/scripts/lib/base_functions.sh"
source "${EJABBERD_HOME}/scripts/lib/functions.sh"
# discover hostname
readonly nodename=$(get_nodename)
is_zero ${ERLANG_NODE} \
&& export ERLANG_NODE="ejabberd@localhost"
## backward compatibility
# if ERLANG_NODE is true reset it to "ejabberd" and add
# hostname to the nodename.
# else: export ${ERLANG_NODE} with nodename
if (is_true ${ERLANG_NODE}); then
export ERLANG_NODE="ejabberd@${nodename}"
fi
run_scripts() {
local run_script_dir="${EJABBERD_HOME}/scripts/${1}"
for script in ${run_script_dir}/*.sh ; do
if [ -f ${script} -a -x ${script} ] ; then
echo "${script}..."
${script}
fi
done
}
pre_scripts() {
run_scripts "pre"
}
post_scripts() {
run_scripts "post"
}
stop_scripts() {
run_scripts "stop"
}
ctl() {
local action="$1"
${EJABBERDCTL} ${action} >/dev/null
}
_trap() {
echo "Stopping ejabberd..."
stop_scripts
if ctl stop ; then
local cnt=0
sleep 1
while ctl status || test $? = 1 ; do
cnt=`expr $cnt + 1`
if [ $cnt -ge 60 ] ; then
break
fi
sleep 1
done
fi
}
# Catch signals and shutdown ejabberd
trap _trap SIGTERM SIGINT
## run ejabberd
case "$@" in
start)
pre_scripts
tail -n 0 -F ${LOGDIR}/crash.log \
${LOGDIR}/error.log \
${LOGDIR}/erlang.log \
${FEEDDIR}/log/startup.py.log &
echo "Starting ejabberd..."
exec ${EJABBERDCTL} "foreground" &
child=$!
${EJABBERDCTL} "started"
post_scripts
exec /usr/local/bin/botnet start ${FEEDDIR} &
wait $child
;;
live)
pre_scripts
echo "Starting ejabberd in 'live' mode..."
exec ${EJABBERDCTL} "live"
;;
shell)
exec "/bin/bash"
;;
*)
exec $@
;;
esac
|
<?php
$arr = array(1,2,3,4,5);
$sum = 0;
foreach ($arr as $val) {
$sum += $val;
}
echo $sum;
?>
# Output: 15 |
#ifndef SCANNER_H
#define SCANNER_H
#include <string>
#include <regex>
class Scanner {
public:
void scan(const std::string &userExpression);
void setLexeme(const std::string &lexemePattern);
std::string formattedExpression() const;
private:
std::string _formattedExpression;
std::regex _lexeme;
};
inline void Scanner::setLexeme(const std::string &lexemePattern) {
_lexeme = lexemePattern;
}
inline std::string Scanner::formattedExpression() const {
return _formattedExpression;
}
#endif // SCANNER_H
|
const express = require('express');
const consign = require('consign');
const bodyParser = require('body-parser');
const expressValidator = require('express-validator');
const app = express();
require('./routes/routes.js')(app);
//require('./db/db.js');
app.use(bodyParser.urlencoded({ extended: true }));
app.use(bodyParser.json());
app.use(expressValidator());
//consign()
// .include('/db/db.js')
// .then('/routes/routes.js')
// .into(app);
const port = process.env.PORT || 8080;
app.listen(port, () => {
console.log('Server is running on port: ' +port);
});
module.exports = app; |
#!/bin/bash
# Copyright (c) Lawrence Livermore National Security, LLC and other Ascent
# Project developers. See top-level LICENSE AND COPYRIGHT files for dates and
# other details. No copyright assignment is required to contribute to Ascent.
set -ev
export TAG_NAME=alpinedav/ascent-ci:ubuntu-18.04-cuda-11.4.0-devel
# exec docker build to create image
echo "docker build -t ${TAG_NAME} ."
docker build -t ${TAG_NAME} .
|
<gh_stars>0
import React from 'react'
import resume from '../assets/pdf/Delara-Shamanian-Resume-2022.pdf'
const Media = () => {
return (
<div>
<ul className="icons">
<li>
<a
href="https://github.com/delarasham"
className="icon fa-github alt"
target="_blank"
rel="noreferrer"
>
<span className="label">GitHub</span>
</a>
</li>
<li>
<a
href="https://www.linkedin.com/in/delara-shamanian/"
className="icon fa-linkedin alt"
target="_blank"
rel="noreferrer"
>
<span className="label">LinkedIn</span>
</a>
</li>
<li>
<a
href="mailto:<EMAIL>"
target="_blank"
className="icon fa-envelope alt"
rel="noreferrer"
>
<span className="label">Email</span>
</a>
</li>
<li>
<a
href={resume}
target="_blank"
className="icon fa-file alt"
rel="noreferrer"
>
<span className="label">Resume</span>
</a>
</li>
</ul>
</div>
)
}
export default Media
|
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/1024+0+512-only-pad/13-model --tokenizer_name model-configs/1536-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/1024+0+512-only-pad/13-1024+0+512-HPMI-1 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function shuffle_within_sentences_high_pmi_first_two_thirds_full --eval_function last_element_eval |
/* eslint-disable camelcase */
/**
* Setup environment defaults
*/
process.env.NODE_ENV = process.env.NODE_ENV ? process.env.NODE_ENV : "development";
process.env.npm_config_environment = process.env.npm_config_environment
? process.env.npm_config_environment
: "development";
process.env.npm_config_branch = process.env.npm_config_branch ? process.env.npm_config_branch : "unknown";
process.env.npm_config_debug = process.env.npm_config_debug ? process.env.npm_config_debug : "false";
/**
* Use environment defaults
*/
const nodeEnvironment = process.env.NODE_ENV;
const environment = process.env.npm_config_environment;
const isProduction = environment === "production";
const isDebug = process.env.npm_config_debug === "true";
const branchOverride = process.env.npm_config_branch;
const defaultBranch = isProduction ? "main" : "development";
/**
* Setup screeps config
*/
let targetBranch = defaultBranch;
if (branchOverride !== "unknown") {
if (require("../screeps.json")[branchOverride] == null) {
throw new Error(`Invalid config for branch: ${branchOverride}`);
} else {
targetBranch = branchOverride;
}
}
const packageJson = require("../package.json");
const configFile = require("../screeps")[targetBranch];
const version = packageJson.version;
/**
* Expose full configuration
*/
function getEnvironmentVariables() {
return {
version,
environment,
nodeEnvironment,
isProduction,
isDebug,
targetBranch,
configFile
};
}
module.exports = function () {
return getEnvironmentVariables();
};
|
'use strict';
var chai = require('chai');
var sinon = require('sinon');
chai.use(require('sinon-chai'));
var expect = chai.expect;
var MultiParser = require('../multiparser.js');
describe('The MultiParser constructor', function () {
it('requires a boundary to be provided', function () {
expect(function () {
new MultiParser();
}).to.throw(Error);
});
});
describe('The MultiParser stream', function () {
var parser, mock;
beforeEach(function () {
var boundary = 'boundary';
parser = new MultiParser(boundary);
mock = sinon.mock(parser).expects('write');
});
it('cannot be ended when the root multipart message is not complete', function () {
expect(function () {
parser.end();
}).to.throw(Error);
});
it('should write any data provided when being ended', function () {
var data = new Buffer('');
parser.pop();
parser.end(data);
expect(mock).to.have.been.calledWith(data);
});
});
describe('The MultiParser caching mechanism', function () {
var parser, mock;
beforeEach(function () {
var boundary = 'boundary';
var count = 0;
parser = new MultiParser(boundary);
mock = sinon.mock(parser).expects('process');
});
it('should contain a newline in it\'s initial state', function () {
expect(mock).to.not.have.been.called;
expect(parser.buffer.toString()).to.equal('\r\n');
});
it('should contain a newline after writing an empty string', function () {
parser.write('');
expect(mock).to.not.have.been.called;
expect(parser.buffer.toString()).to.equal('\r\n');
});
it('should append writes if the maximum cache length isn\'t reached', function () {
parser.write(new Buffer(5));
parser.write(new Buffer(4));
expect(mock).to.not.have.been.called;
expect(parser.buffer.length).to.equal(11);
});
it('allows processing data inside the cache margin', function() {
mock.returns(98);
parser.buffer = new Buffer('');
parser.write(new Buffer(100));
expect(mock).to.have.been.called;
expect(parser.buffer.length).to.equal(2);
});
it('should process the cache when the total data length exceeds the margin', function() {
mock.returns(7);
parser.buffer = new Buffer(10);
parser.write(new Buffer(10));
expect(mock).to.have.been.called;
expect(parser.buffer.length).to.equal(13);
});
});
describe('The MultiParser boundary processor', function () {
var parser;
var part, trailer, pop;
beforeEach(function () {
var boundary = 'boundary';
parser = new MultiParser(boundary);
part = sinon.mock(parser).expects('part');
trailer = sinon.mock(parser).expects('trailer');
pop = sinon.mock(parser).expects('pop');
});
describe('in the start state parsing a preamble', function () {
var output = '';
beforeEach(function () {
// Check the initial state.
expect(parser.margin).to.equal(15);
expect(parser.state).to.equal(MultiParser.states.start);
parser.message.on('data', function (data) {
output += data.toString();
});
});
it('should transition to the body state if the data length equals the margin', function () {
expect(parser.process(new Buffer('\r\nfourteen chars'), 0)).to.equal(2);
expect(output).to.equal('');
expect(parser.state).to.equal(MultiParser.states.body);
});
it('should skip the first two bytes when parsing a preamble', function () {
parser.process(new Buffer('\r\npreamble contents'), 0);
expect(output).to.equal('pr');
expect(parser.state).to.equal(MultiParser.states.body);
});
});
describe('in the body state', function () {
beforeEach(function () {
parser.state = MultiParser.states.body;
// Check the initial state.
expect(parser.margin).to.equal(15);
expect(parser.state).to.equal(MultiParser.states.body);
});
it('should start a new part when encountering a boundary', function () {
// The boundary excluding the trailing newline should be consumed.
expect(parser.process(new Buffer('\r\n--boundary\r\nhe'), 0)).to.equal(12);
// An additional part should have been produced.
expect(part).to.have.been.called;
});
it('should return when encountering a boundary without a trailing CRLF', function () {
// The boundary excluding the trailing newline should be consumed.
expect(parser.process(new Buffer('\r\n--boundary----'), 0)).to.equal(12);
// An additional part should have been produced.
expect(part).to.not.have.been.called;
expect(trailer).to.not.have.been.called;
expect(pop).to.not.have.been.called;
});
it('should start a trailer when encountering a closing boundary', function () {
// The boundary including dashes and trailing newline should be consumed.
expect(parser.process(new Buffer('\r\n--boundary--\r\n'), 0)).to.equal(16);
// An additional part should have been produced.
expect(trailer).to.have.been.called;
expect(pop).to.have.been.called;
});
});
describe('in the header state', function () {
var part;
beforeEach(function () {
parser.message.on('part', function (message) {
part = message;
});
MultiParser.prototype.initialize.call(parser);
MultiParser.prototype.part.call(parser);
// Check the initial state.
expect(parser.margin).to.equal(3);
expect(parser.state).to.equal(MultiParser.states.headers);
});
it('should add headers to the Message object', function () {
parser.write(new Buffer('header: value\r\nanother: '));
parser.write(new Buffer('another value\r\n\r\n'));
expect(part.headers.header).to.equal('value');
});
it('should throw if it stays in the headers state when the headers are complete', function () {
// Force the underlying HTTP parser in the body state.
parser.process(new Buffer('\r\n\r\n'));
// And then force the MultiParser back in the headers state.
parser.state = MultiParser.states.headers;
parser.margin = 3;
expect(function () {
parser.process(new Buffer('\r\n\r\n'), 0);
}).to.throw(Error);
});
it('should throw when encountering a malformed header', function () {
expect(function () {
parser.process(new Buffer('\r\nheader\r\n\r\n'));
}).to.throw(Error);
});
});
});
|
<gh_stars>0
'use strict';
/*
* @Author: <NAME>, Design Research Lab, Universität der Künste Berlin
* @Date: 2016-05-04 11:38:41
* @Last Modified by: lutzer
* @Last Modified time: 2016-07-14 16:51:58
*/
export default {
web_service_url : '/api/',
web_socket_url : ':'+window.location.port,
files_url : '/files/',
stringTruncateShort: 160,
stringTruncateLong: 220,
projectionTimeInterval : 8000,
dataset: null,
recordsPerPage: 12,
tags: ['tag1','tag2','tag3','tag4']
} |
package com.ccl.simplerecylerview.holder;
import android.util.SparseArray;
import android.view.View;
import androidx.recyclerview.widget.RecyclerView;
/**
* Created by ccl on 2017/4/24.
* RecyclerView封装抽取
*/
public class BaseAdapterHolder extends RecyclerView.ViewHolder implements View.OnClickListener, View.OnLongClickListener {
private OnItemCilckListener listener;
private SparseArray<View> mViews;//使用SparseArray考虑到数据量不大的情况下性能更优,且key为ViewId是正好是int类型符合SparseArray定义
private View mConvertView;
public BaseAdapterHolder(View itemView) {
super(itemView);
//ButterKnife.bind(this, itemView); //自行选择是否使用
mConvertView = itemView;
itemView.setOnClickListener(this);
itemView.setOnLongClickListener(this);
}
public void setOnItemClickListener(OnItemCilckListener listener) {
this.listener = listener;
}
@Override
public void onClick(View v) {
if (listener != null) {
listener.onItemClick(v, getLayoutPosition());//getLayoutPosition获取最新变化后的position
}
}
@Override
public boolean onLongClick(View v) {
if (listener != null) {
listener.onItemLongClick(v, getLayoutPosition());
}
return true;
}
/**
* 通过viewId获取控件
*
* @param viewId
* @return
*/
public <T extends View> T getView(int viewId) {
View view = mViews.get(viewId);
if (view == null) {//不存在是存储到SparseArray里 下次使用时不必再次调用findviewbyid 提升效率
view = mConvertView.findViewById(viewId);
mViews.put(viewId, view);
}
return (T) view;
}
/**
* 获取当前view
*
* @return
*/
public View getConvertView() {
return mConvertView;
}
/**
* 点击监听
*/
public interface OnItemCilckListener {
void onItemClick(View v, int position);
void onItemLongClick(View v, int position);
}
}
|
<reponame>muthukumaravel7/armnn<filename>Documentation/structstd_1_1hash_3_01armnn_1_1profiling_1_1_profiling_guid_01_4.js
var structstd_1_1hash_3_01armnn_1_1profiling_1_1_profiling_guid_01_4 =
[
[ "operator()", "structstd_1_1hash_3_01armnn_1_1profiling_1_1_profiling_guid_01_4.xhtml#ad8ec82f570fd40a657d1551253da5472", null ]
]; |
import { nullOrUndefined } from "@/helpers/checks";
const defaultAddress: string =
process.env.NODE_ENV === "development"
? "http://localhost:8000"
: "https://htwg-app-back.herokuapp.com/";
/**
* @description Maps http_status_code to a string
* @param status
*/
export function statusToString(status: number): string {
switch (status) {
case 0:
return "Daten noch nicht da";
case 200:
return "Hat einwandfrei geklappt";
case 400:
return "Die Anfrage war fehlerhaft";
case 403:
return "Deine Benutzerdaten sind nicht richtig";
case 404:
return "Es wurde überall gesucht, aber die Antwort ist nicht zu finden";
case 500:
return "Wir haben einen Fehler gemacht";
default:
return "Unbekannter Fehler";
}
}
/**
* @description Handles response and catches any possible error from backend.
* @returns {Promise<string | object>} Can be a JSON or a string.
* @param response
*/
export async function handleResponse(
response: Response
): Promise<{ content: string | object; status: number }> {
let responseData: string | object = "";
if (response.status === 200) {
const contentType = response.headers.get("content-type");
if (!nullOrUndefined(contentType)) {
if (contentType.includes("application/json")) {
responseData = await response.json();
} else if (
contentType.includes("text/plain") ||
contentType.includes("text/html")
) {
responseData = await response.text();
}
} else {
responseData = await response.text();
}
} else {
responseData = statusToString(response.status);
}
return { content: responseData, status: response.status };
}
export async function post(
postBody: string,
address: string = defaultAddress
): Promise<{ content: any; status: number }> {
const response = await raw_post(postBody, address);
return handleResponse(response);
}
export async function raw_post(
postBody: string,
address: string = defaultAddress
): Promise<Response> {
return fetch(address, {
method: "POST",
body: postBody,
});
}
export async function get(
args: string,
address: string = defaultAddress
): Promise<{ content: any; status: number }> {
const response = await fetch(address + args);
return handleResponse(response);
}
|
#Aqueduct - Compliance Remediation Content
#Copyright (C) 2011,2012 Vincent C. Passaro (vincent.passaro@gmail.com)
#
#This program is free software; you can redistribute it and/or
#modify it under the terms of the GNU General Public License
#as published by the Free Software Foundation; either version 2
#of the License, or (at your option) any later version.
#
#This program is distributed in the hope that it will be useful,
#but WITHOUT ANY WARRANTY; without even the implied warranty of
#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
#GNU General Public License for more details.
#
#You should have received a copy of the GNU General Public License
#along with this program; if not, write to the Free Software
#Foundation, Inc., 51 Franklin Street, Fifth Floor,
#Boston, MA 02110-1301, USA.
#!/bin/bash
######################################################################
#By Tummy a.k.a Vincent C. Passaro #
#Vincent[.]Passaro[@]gmail[.]com #
#www.vincentpassaro.com #
######################################################################
#_____________________________________________________________________
#| Version | Change Information | Author | Date |
#|__________|_______________________|____________________|____________|
#| 1.0 | Initial Script | Vincent C. Passaro | 20-oct-2011|
#| | Creation | | |
#|__________|_______________________|____________________|____________|
#######################DISA INFORMATION###############################
#Group ID (Vulid): V-4246
#Group Title: Password Configuration Table Configuration
#Rule ID: SV-4246r5_rule
#Severity: CAT II
#Rule Version (STIG-ID): GEN008620
#Rule Title: System BIOS or system controllers supporting password protection must have administrator accounts/passwords configured, and no others.
#
#Vulnerability Discussion: A system's BIOS or system controller handles the initial startup of a system and its configuration must be protected from unauthorized modification. When the BIOS or system controller supports the creation of user accounts or passwords, such protections must be used and accounts/passwords only assigned to system administrators. Failure to protect BIOS or system controller settings could result in denial of service or compromise of the system resulting from unauthorized configuration changes.
#
#Responsibility: System Administrator
#IAControls: ECSC-1
#
#Check Content:
#On systems with a BIOS or system controller, verify that a supervisor or administrator password is set. #If a password is not set, this is a finding.
#
#If the BIOS or system controller supports user-level access in addition to supervisor/administrator access, determine if this access is enabled. If so, this is a finding.
#
#Fix Text: Access the system's BIOS or system controller. Set a supervisor/administrator password if one has not been set. Disable a user-level password if one has been set.
#######################DISA INFORMATION###############################
#Global Variables#
PDI=GEN008620
#Start-Lockdown
|
package com.linkedin.datahub.graphql.resolvers.domain;
import com.linkedin.common.urn.Urn;
import com.linkedin.datahub.graphql.QueryContext;
import com.linkedin.datahub.graphql.authorization.AuthorizationUtils;
import com.linkedin.datahub.graphql.exception.AuthorizationException;
import com.linkedin.datahub.graphql.generated.Domain;
import com.linkedin.datahub.graphql.generated.EntityType;
import com.linkedin.datahub.graphql.generated.ListDomainsInput;
import com.linkedin.datahub.graphql.generated.ListDomainsResult;
import com.linkedin.entity.client.EntityClient;
import com.linkedin.metadata.Constants;
import com.linkedin.metadata.search.SearchEntity;
import com.linkedin.metadata.search.SearchResult;
import graphql.schema.DataFetcher;
import graphql.schema.DataFetchingEnvironment;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.CompletableFuture;
import java.util.stream.Collectors;
import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*;
/**
* Resolver used for listing all Domains defined within DataHub. Requires the MANAGE_DOMAINS platform privilege.
*/
public class ListDomainsResolver implements DataFetcher<CompletableFuture<ListDomainsResult>> {
private static final Integer DEFAULT_START = 0;
private static final Integer DEFAULT_COUNT = 20;
private static final String DEFAULT_QUERY = "";
private final EntityClient _entityClient;
public ListDomainsResolver(final EntityClient entityClient) {
_entityClient = entityClient;
}
@Override
public CompletableFuture<ListDomainsResult> get(final DataFetchingEnvironment environment) throws Exception {
final QueryContext context = environment.getContext();
return CompletableFuture.supplyAsync(() -> {
if (AuthorizationUtils.canManageDomains(context)) {
final ListDomainsInput input = bindArgument(environment.getArgument("input"), ListDomainsInput.class);
final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart();
final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount();
final String query = input.getQuery() == null ? DEFAULT_QUERY : input.getQuery();
try {
// First, get all group Urns.
final SearchResult gmsResult = _entityClient.search(
Constants.DOMAIN_ENTITY_NAME,
query,
Collections.emptyMap(),
start,
count,
context.getAuthentication());
// Now that we have entities we can bind this to a result.
final ListDomainsResult result = new ListDomainsResult();
result.setStart(gmsResult.getFrom());
result.setCount(gmsResult.getPageSize());
result.setTotal(gmsResult.getNumEntities());
result.setDomains(mapUnresolvedDomains(gmsResult.getEntities().stream()
.map(SearchEntity::getEntity)
.collect(Collectors.toList())));
return result;
} catch (Exception e) {
throw new RuntimeException("Failed to list domains", e);
}
}
throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator.");
});
}
// This method maps urns returned from the list endpoint into Partial Domain objects which will be resolved be a separate Batch resolver.
private List<Domain> mapUnresolvedDomains(final List<Urn> entityUrns) {
final List<Domain> results = new ArrayList<>();
for (final Urn urn : entityUrns) {
final Domain unresolvedDomain = new Domain();
unresolvedDomain.setUrn(urn.toString());
unresolvedDomain.setType(EntityType.DOMAIN);
results.add(unresolvedDomain);
}
return results;
}
}
|
ckc.directive('teamCookTimeChart', teamCookTimeChart);
/* @ngInject */
function teamCookTimeChart() {
//define the directive
var directive = {
restrict: "AECM",
templateUrl: 'assets/views/directives/team-cookTimeChart-directive.htm',
replace: true,
scope: {
merchantId: "=",
channelId: "="
},
link: linkFunc,
controller: teamCookTimeChartController,
controllerAs: 'vm',
bindToController: true
};
/* @ngInject */
function linkFunc(scope, el, attr, ctrl) {
}
teamCookTimeChartController.$inject = ['$scope', '$log', '$firebaseObject'];
/* @ngInject */
function teamCookTimeChartController($scope, $log, $firebaseObject) {
// NOTIFY PROGRESS
console.log('teamCookTimeChartController');
// DEFINE: PRIVATE VARIABLES
// DEFINE: VIEW MODEL VARIBLES
var vm = this;
$scope.vm.schedule = {
minInt: "15",
slots: {
"2021-05-01T08:30:00-08:00": { wasFullfilled: false, sku: "", hasAddnl: false, addSku: "" },
"2021-05-01T08:45:00-08:00": { wasFullfilled: false, sku: "", hasAddnl: false, addSku: "" },
"2021-05-01T09:00:00-08:00": { wasFullfilled: false, sku: "", hasAddnl: false, addSku: "" },
"2021-05-01T09:15:00-08:00": { wasFullfilled: false, sku: "", hasAddnl: false, addSku: "" },
"2021-05-01T09:30:00-08:00": { wasFullfilled: false, sku: "", hasAddnl: false, addSku: "" },
"2021-05-01T09:45:00-08:00": { wasFullfilled: false, sku: "", hasAddnl: false, addSku: "" },
"2021-05-01T10:00:00-08:00": { wasFullfilled: false, sku: "", hasAddnl: false, addSku: "" },
"2021-05-01T10:15:00-08:00": { wasFullfilled: false, sku: "", hasAddnl: false, addSku: "" },
"2021-05-01T10:30:00-08:00": { wasFullfilled: false, sku: "", hasAddnl: false, addSku: "" },
"2021-05-01T10:45:00-08:00": { wasFullfilled: false, sku: "", hasAddnl: false, addSku: "" },
"2021-05-01T11:00:00-08:00": { wasFullfilled: false, sku: "", hasAddnl: false, addSku: "" },
"2021-05-01T11:15:00-08:00": { wasFullfilled: false, sku: "", hasAddnl: false, addSku: "" },
"2021-05-01T11:30:00-08:00": { wasFullfilled: false, sku: "", hasAddnl: false, addSku: "" }
}
}
// DEFINE: PRIVATE FUNCTIONS
// DEFINE: VIEW MODEL FUNCITONS
};
return directive;
}; |
<gh_stars>1-10
/*
* Copyright (c) Open Source Strategies, Inc.
*
* Opentaps is free software: you can redistribute it and/or modify it
* under the terms of the GNU Affero General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Opentaps is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Opentaps. If not, see <http://www.gnu.org/licenses/>.
*/
package org.opentaps.gwt.common.server.lookup;
import java.util.List;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.opentaps.foundation.infrastructure.InfrastructureException;
import org.opentaps.gwt.common.client.lookup.configuration.TaxAuthorityLookupConfiguration;
import org.opentaps.gwt.common.server.HttpInputProvider;
import org.opentaps.gwt.common.server.InputProviderInterface;
import org.opentaps.base.entities.TaxAuthorityAndDetail;
import org.opentaps.foundation.entity.EntityInterface;
/**
* The RPC service used to populate the Tax Authority autocompleters widgets.
*/
public class TaxAuthorityLookupService extends EntityLookupAndSuggestService {
protected TaxAuthorityLookupService(InputProviderInterface provider) {
super(provider, TaxAuthorityLookupConfiguration.LIST_OUT_FIELDS);
}
/**
* AJAX event to suggest Tax Authorities.
* @param request a <code>HttpServletRequest</code> value
* @param response a <code>HttpServletResponse</code> value
* @return the JSON response
* @throws InfrastructureException if an error occurs
*/
public static String suggestTaxAuthorities(HttpServletRequest request, HttpServletResponse response) throws InfrastructureException {
InputProviderInterface provider = new HttpInputProvider(request);
JsonResponse json = new JsonResponse(response);
TaxAuthorityLookupService service = new TaxAuthorityLookupService(provider);
service.suggestTaxAuthority();
return json.makeSuggestResponse(TaxAuthorityLookupConfiguration.OUT_TAX_ID, service);
}
/**
* Suggests a list of <code>TaxAuthority</code>.
* @return the list of <code>TaxAuthority</code>, or <code>null</code> if an error occurred
*/
public List<TaxAuthorityAndDetail> suggestTaxAuthority() {
return findSuggestMatchesAnyOf(TaxAuthorityAndDetail.class, TaxAuthorityLookupConfiguration.LIST_LOOKUP_FIELDS);
}
@Override
public String makeSuggestDisplayedText(EntityInterface taxAuth) {
return taxAuth.getString(TaxAuthorityLookupConfiguration.OUT_TAX_NAME);
}
}
|
console.clear();
/*
Functions objects, objects are not functions
*/
// Expression
var x = function fn (x,y) {};
// call with x()
// x is holding a function object.
// declare
function myFun(x,y) {} // no semi colon
/* take input, do something, return */
// x -> x + 1 -> return result
function add (x) {
var result = x + 1;
return result;
}
//call function: 1: ()
var result = add(1); // -> 2
console.log(result);
// call function 2: call
add.call(null, 5);
// call function 3: apply
add.apply(null, [23]);
// call function 4: new // create an object with prototype link
// special way of calling a function (on this later....)
var t = new add(5);
console.log(t);
/*
Scope: Area in code
*/
var x = 1; // defined in scope(1)
/********Scope(2)******/ // boundry
var y = 2;
/********Scope(3)******/ // scope (3) inside scope (2)
var z = 3;
/*******************/
/*******************/
// Scope 1 is parent of scope 2 and 3
// Scope 2 is parent of scope 3.
// Scope 3 is the inner most scope. Has acess to all parents (2 and 1)
// Scope 2 is the child of scope 1. It has access only to its parent, scope 1.
// Scope 1 is blind to all its children. It cannot what's inside them
// Scope 2 is the parent of scope 3. It cannot see what scope 3 is doing.
function a () {
var x = 1;
function b () {
var y = 2;
function c () {
var z = 3;
//
}
}
}
|
import { h } from "../h";
import { Intro } from "./intro";
import { Example } from "./example";
import { Tokens } from './tokens';
import { Template } from '../template';
// TODO test
export const SiteContents = (
<main>
<h1>hi</h1>
</main>
);
|
require 'fog/core/model'
module Fog
module Compute
class OracleCloud
class SecurityList < Fog::Model
identity :name
attribute :account
attribute :description
attribute :uri
attribute :outbound_cidr_policy
attribute :proxyuri
attribute :policy
# Only used in create
attribute :description
def save
#identity ? update : create
create
end
def create
requires :name
data = service.create_security_list(name, description, policy, outbound_cidr_policy)
merge_attributes(data.body)
end
def destroy
requires :name
service.delete_security_list(name)
end
def add_rule (port, list, rule_name=nil)
if !rule_name then rule_name = "#{name}_#{port}_#{list}" end
if port.is_a? Numeric then
# See if it's a public port
secapps = Fog::Compute[:oraclecloud].security_applications.all_public
public_app = secapps.detect { |app|
Float(app.dport || 0) == port }
if public_app then
secapp = public_app.name
else
begin
custom_app = Fog::Compute[:oraclecloud].security_applications.get("#{name}_#{port}")
rescue Fog::Compute::OracleCloud::NotFound
# Create custom security application
custom_app = Fog::Compute[:oraclecloud].security_applications.create(
:name => "#{name}_#{port}",
:protocol => 'tcp',
:dport => port
)
end
secapp = custom_app.name
end
else
# They want to use a named security application
# TODO: Add support for user created security apps
secapp = '/oracle/public/' + port
end
block = /\d{,2}|1\d{2}|2[0-4]\d|25[0-5]/
re = /\A#{block}\.#{block}\.#{block}\.#{block}\z/
if re =~ list then
# They sent an ip address. Create new security ip list
# Check if it exists already (assume this has been run before)
begin
seclist = Fog::Compute[:oraclecloud].security_ip_lists.get("#{name}_#{list}")
rescue Fog::Compute::OracleCloud::NotFound
Fog::Logger.debug "Creating Security IP List for #{list}"
seclist = Fog::Compute[:oraclecloud].security_ip_lists.create(
:name => "#{name}_#{list}",
:secipentries => [list]
)
end
list_name = "seciplist:#{name}_#{list}"
else
list_name = list
end
begin
rule = Fog::Compute[:oraclecloud].security_rules.get(rule_name)
rescue Fog::Compute::OracleCloud::NotFound
Fog::Logger.debug "Creating Security Rule for #{list_name} to #{name} (app:#{port})"
rule = Fog::Compute[:oraclecloud].security_rules.create(
:application => secapp,
:name => rule_name,
:src_list => list_name,
:dst_list => "seclist:#{name}"
)
end
rule
end
end
end
end
end
|
<filename>AcousticFieldSim/src/acousticfield3d/protocols/ArduinoNano.java
package acousticfield3d.protocols;
import acousticfield3d.gui.MainForm;
import acousticfield3d.simulation.AnimKeyFrame;
import acousticfield3d.simulation.Transducer;
import acousticfield3d.utils.TextFrame;
import java.util.List;
/**
*
* @author am14010
*/
public class ArduinoNano extends DeviceConnection{
@Override
public int getDivs() {
return 24;
}
@Override
public int getSpeed() {
return 115200;
}
public int getTransducers(){
return 8;
}
@Override
public void sendPattern(final List<Transducer> transducers) {
if(serial == null){
return;
}
final byte[] data = calcSignals01( getTransducers(), transducers);
final int size = data.length;
//send data
for(int i = 0; i < size; ++i){
serial.writeByte( (data[i] & 0xF0) | 0x1 );
serial.writeByte( ((data[i] << 4) & 0xF0) | 0x1 );
}
}
@Override
public void switchBuffers() {
if (serial == null){
return;
}
serial.writeByte( 0x00 );
serial.flush();
}
public static void exportAnimation(final MainForm mf) {
final List<AnimKeyFrame> frames = mf.animPanel.getCurrentAnimation().getKeyFrames().getElements();
final StringBuilder sb = new StringBuilder();
sb.append("{");
final int nKeys = frames.size();
int ik = 0;
for (AnimKeyFrame k : frames) {
sb.append("{");
final byte[] data = calcSignals01AnimFrame(8, k, 24);
final int l = data.length;
for (int in = 0; in < l; ++in) {
sb.append("0x" + Integer.toHexString(data[in] & 0xFF));
if (in != l - 1) {
sb.append(",");
}
}
if (ik != nKeys - 1) {
sb.append("},\n");
} else {
sb.append("}}");
}
++ik;
}
TextFrame.showText("Animation Data", sb.toString(), mf);
}
}
|
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.u1F1F2 = void 0;
var u1F1F2 = {
"viewBox": "0 0 2600 2760.837",
"children": [{
"name": "path",
"attribs": {
"d": "M2281 2297q0 17-11 30.5t-31 13.5h-366q-17 0-28-10t-13-27l-74-747-284 757q-5 12-15.5 19.5t-22.5 7.5h-273q-11 0-23-7t-17-20l-282-757-74 747q-2 17-13.5 27t-27.5 10H359q-17 0-29-12t-12-32L466 543q0-14 12-26t29-12h392q14 0 24.5 9.5T939 535l360 1178 361-1178q4-9 15-19.5t25-10.5h391q22 0 31.5 12t10.5 26z"
},
"children": []
}]
};
exports.u1F1F2 = u1F1F2; |
<filename>TrackOrJargh/src/main/java/com/trackorjargh/javarepository/PointFilmRepository.java
package com.trackorjargh.javarepository;
import java.util.List;
import javax.transaction.Transactional;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Modifying;
import org.springframework.data.jpa.repository.Query;
import com.trackorjargh.javaclass.Film;
import com.trackorjargh.javaclass.PointFilm;
import com.trackorjargh.javaclass.User;
public interface PointFilmRepository extends JpaRepository<PointFilm, Long>{
PointFilm findByUserAndFilm(User user, Film film);
List<PointFilm> findByFilm(Film film);
@Modifying
@Transactional
@Query(value = "DELETE FROM POINT_FILM WHERE USER_ID = ?1", nativeQuery = true)
void removePointsFilmsByUserId(long id);
@Modifying
@Transactional
@Query(value = "DELETE FROM POINT_FILM WHERE FILM_ID = ?1", nativeQuery = true)
void removePointsFilmsByFilmId(long id);
PointFilm findById (Long id);
}
|
#!/bin/bash
mkdir -p build
cd build
cmake ..
make -j8
cd .. |
#!/usr/bin/env bash
CUDA_VISIBLE_DEVICES=5 python ../MultiDCP/ehill_multidcp_pretrain.py \
--drug_file "../MultiDCP/data/all_drugs_l1000.csv" \
--gene_file "../MultiDCP/data/gene_vector.csv" --hill_train_file "../MultiDCP/data/ehill_data/high_confident_data_train.csv" \
--hill_dev_file "../MultiDCP/data/ehill_data/high_confident_data_dev.csv" \
--hill_test_file "../MultiDCP/data/ehill_data/high_confident_data_test.csv" \
--train_file "../MultiDCP/data/pert_transcriptom/signature_train_cell_2.csv" \
--dev_file "../MultiDCP/data/pert_transcriptom/signature_dev_cell_2.csv" \
--test_file "../MultiDCP/data/pert_transcriptom/signature_test_cell_2.csv" \
--dropout 0.1 --batch_size 64 --max_epoch 100 \
--all_cells "../MultiDCP/data/ehill_data/pretrain_cell_list_ehill.p" \
--cell_ge_file "../MultiDCP/data/adjusted_ccle_tcga_ad_tpm_log2.csv" \
--linear_encoder_flag # > ../MultiDCP/output/cellwise_output_ran5.txt
|
<filename>code/service-catalog-tmp/src/main/java/com/ibm/catalog/ProductCategory.java
package com.ibm.catalog;
import javax.persistence.*;
@Entity
@Table(name = "productcategory")
@NamedQuery(name = "ProductCategory.findByCategoryId", query = "SELECT f FROM ProductCategory f WHERE f.categoryid=:categoryid")
public class ProductCategory {
@Id
@SequenceGenerator(name = "productcategorySequence", sequenceName = "productcategory_id_seq", allocationSize = 1, initialValue = 10)
@GeneratedValue(generator = "productcategorySequence")
private Integer id;
@Column
private Integer productid;
@Column
private Integer categoryid;
public ProductCategory() {
}
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public Integer getProductid() {
return productid;
}
public void setProductid(Integer productid) {
this.productid = productid;
}
public Integer getCategoryid() {
return categoryid;
}
public void setCategoryid(Integer categoryid) {
this.categoryid = categoryid;
}
}
|
# Import necessary packages
from keras.models import Sequential
from keras.layers import Dense
# Define model
model = Sequential()
model.add(Dense(32, activation='relu', input_dim=4))
model.add(Dense(1, activation='sigmoid'))
# Compile model
model.compile(optimizer='rmsprop',
loss='binary_crossentropy',
metrics=['accuracy'])
# Train model
model.fit(input_data,
target,
epochs=100,
batch_size=32) |
import d3 from 'd3';
export function moveToFront () {
const sel = d3.select(this);
return sel.each(function () {
this.parentNode.appendChild(this);
});
}
|
/*
* Entry point for the companion app
*/
import { settingsStorage } from "settings";
import { localStorage } from 'local-storage';
import { outbox } from "file-transfer";
import { device } from "peer";
import * as messaging from "messaging";
function extractSetting(settingName, defaultValue, typeMap = val => Number(val)) {
if (!settingsStorage.getItem(settingName)) {
console.log('default: ' , defaultValue)
settingsStorage.setItem(settingName, JSON.stringify(defaultValue))
return defaultValue
}
let setting = settingsStorage.getItem(settingName)
if (setting && String(setting).charAt(0) === '{') {
return typeMap(JSON.parse(settingsStorage.getItem(settingName)).name)
} else {
return typeMap(settingsStorage.getItem(settingName))
}
}
function mapToBoolean(val) {
return (typeof val === 'boolean' && val) || val == 'true'
}
let shouldNotify = true
function extractTime(settingName, defaultTime) {
let time = extractSetting(settingName, defaultTime, val => val)
if (!time) {
time = defaultTime
}
if (time && time.indexOf(':') > 0) {
let splitTime = time.split(':')
return {
hour: Number(splitTime[0]),
minute: Number(splitTime[1])
}
}
return {
hour: Number(time),
minute: 0
}
}
//todo update this to not send an update to the client on each setting extraction
// just once settings are done extracting
function getSettings() {
shouldNotify = false
let bedtime = extractTime('bedtime', '22:00')
bedtime = bedtime.hour * 60 + bedtime.minute
let settings = {
hour: extractSetting('hour', 8),
minute: extractSetting('minute', 30),
steps: extractSetting('steps', 50),
showHeartRate: extractSetting('showheartrate', true, mapToBoolean),
disableAlarm: extractSetting('disableAlarm', false, mapToBoolean),
alarmEnabled: extractSetting('alarmEnabled', true, mapToBoolean),
showBatteryLevel: extractSetting('showBatteryLevel', true, mapToBoolean),
adjustBrightness: extractSetting('adjustBrightness', true, mapToBoolean),
logocounting: extractSetting('logocounting', true, mapToBoolean),
bedtime: bedtime,
showWakeupImage: extractSetting('showWakeupImage', true, mapToBoolean),
silentInProgress: extractSetting('silentInProgress', true, mapToBoolean)
}
shouldNotify = true
return settings
}
let width = device.modelName === 'Ionic' ? 348 : 300
let height = device.modelName === 'Ionic' ? 250 : 300
let UNSPLASH_RANDOM_URL = `https://api.unsplash.com/photos/random?query=sunrise&w=${width}&h=${height}&client_id=e726195f8bf03b737757c53dde3d25fc92ebba58571c7600760102006cae3d9d`
const destFilename = 'background.jpg'
let obtainedNewBackground = undefined
let tryGetNewBackgroundImage = () => {
let now = new Date().getTime()
//wait at least an hour before retrieving a new background
if (obtainedNewBackground !== undefined && (now / 1000 / 60 - obtainedNewBackground) > 60) return
fetch(UNSPLASH_RANDOM_URL)
.then(response => response.json())
.then(json =>
fetch(json.urls.small).then(response => {
// We need an arrayBuffer of the file contents
return response.arrayBuffer();
}).then(data => {
outbox.enqueue(destFilename, data).then((ft) => {
console.log("Transfer of '" + destFilename + "' successfully queued.");
obtainedNewBackground = now
})
.catch(error => {
throw new Error("Failed to queue '" + destFilename + "'. Error: " + error)
})
})
.catch(error => console.log("ERROR: ", error)));
}
settingsStorage.onchange = function(evt) {
if (!shouldNotify) {
return
}
if (messaging.peerSocket.readyState === messaging.peerSocket.OPEN) {
if (evt.key === null) return
console.log(`Value changed: ${evt.key}: ${evt.newValue}`)
settingsStorage.setItem(evt.key, evt.newValue)
let settings
try {
settings = getSettings()
} catch (e) {
console.log("error from getSettings", e)
}
if (settings.showWakeupImage) tryGetNewBackgroundImage()
messaging.peerSocket.send({ name: 'settings', data: settings });
} else {
console.log("companion - no connection");
}
}
messaging.peerSocket.onmessage = function({data}) {
let { name, dat } = data
if (name === 'settings') {
_.keys(dat).forEach(key => settingsStorage.setItem(key, dat[key]))
} else if (name === 'training') {
console.log('received new training data')
persistData(dat)
//save data locally:
// https://dev.fitbit.com/build/reference/companion-api/storage/
//then:
//upload to server
// https://dev.fitbit.com/build/reference/companion-api/fetch/
// or start polling every 15 minutes to try to send this data
// it is tempting to use https://dev.fitbit.com/build/reference/device-api/user-profile/ as an id
// as of now no unique device id is available, could use guid lib or request 2 factor auth in settings page,
// then retrieve from profile
}
}
let persistIntervalId = undefined
let FIFTEEN_MINUTES = 15 * 60 * 1000
function persistData(data) {
let unsenttraining = settingsStorage.getItem('unsenttraining')
unsenttraining = !!unsenttraining ? unsenttraining.concat(data) : data
postData('https://relz8bq5l9.execute-api.eu-west-1.amazonaws.com/production/', data, 'PUT')
.then(error => {
clearInterval(persistIntervalId)
persistIntervalId = undefined
settingsStorage.setItem('unsenttraining', [])
}).catch(error => {
persistIntervalId = setInterval(() => persistData(unsenttraining), FIFTEEN_MINUTES)
settingsStorage.setItem('unsenttraining', unsenttraining)
})
}
let getShowWakeupImage = () => extractSetting('showWakeupImage', true, mapToBoolean)
//Try every minute to update the settings of the app...
let settingsSendInterval = setInterval(function() {
if (messaging.peerSocket.readyState === messaging.peerSocket.OPEN) {
if (getShowWakeupImage()) tryGetNewBackgroundImage()
let settings
try {
settings = getSettings()
} catch (e) {
console.log("error from getSettings", e)
}
messaging.peerSocket.send(settings);
//until we, have then stop
clearInterval(settingsSendInterval);
}
}, 10 * 1000)
function postData(url = ``, data = {}, method = 'POST') {
// Default options are marked with *
return fetch(url, {
method: method, // *GET, POST, PUT, DELETE, etc.
//mode: "cors", // no-cors, cors, *same-origin
cache: "no-cache", // *default, no-cache, reload, force-cache, only-if-cached
//credentials: "*", // include, same-origin, *omit
headers: {
"Content-Type": "application/json; charset=utf-8",
// "Content-Type": "application/x-www-form-urlencoded",
},
redirect: "follow", // manual, *follow, error
referrer: "no-referrer", // no-referrer, *client
body: JSON.stringify(data), // body data type must match "Content-Type" header
})
.then(response => response.json()); // parses response to JSON
} |
#include "Line.hh"
using namespace glow::geometry;
|
/*
* Script to help compare gzipped and non-zipped key spaces on S3
*
* Usage: node utils/compare.js
*
* Expects data to be in files like
*
* 2018-02-10/1518286521.ndjson.gz
* 2018-02-10/1518287191.ndjson.gz
* 2018-02-10/1518293192.ndjson.gz
* ...
*
* Will output a file called missing.txt that'll contain missing manifest
*
*/
'use strict';
const fs = require('fs');
// Read in original files
const original = fs.readFileSync('./manifest.txt', {encoding: 'utf-8'});
const zipped = fs.readFileSync('./manifest_gzip.txt', {encoding: 'utf-8'});
// Stick original in an object
let baseObj = {};
original.split('\n').forEach((m) => {
baseObj[m] = false;
});
// Loop over zipped and mark as present
zipped.split('\n').forEach((m) => {
const objName = m.replace('.gz', '');
baseObj[objName] = true;
});
// Collect all the false items
let missing = [];
for (let m in baseObj) {
if (baseObj[m] === false) {
missing.push(`realtime/${m}`);
}
}
// Output to file
console.info(`There are ${missing.length} missing objects, wrote to missing.txt`);
fs.writeFileSync('./missing.txt', missing.join('\n')); |
#!/usr/bin/env bash
set -eE
# (C) Sergey Tyurin 2022-05-18 18:00:00
# Disclaimer
##################################################################################################################
# You running this script/function means you will not blame the author(s)
# if this breaks your stuff. This script/function is provided AS IS without warranty of any kind.
# Author(s) disclaim all implied warranties including, without limitation,
# any implied warranties of merchantability or of fitness for a particular purpose.
# The entire risk arising out of the use or performance of the sample scripts and documentation remains with you.
# In no event shall author(s) be held liable for any damages whatsoever
# (including, without limitation, damages for loss of business profits, business interruption,
# loss of business information, or other pecuniary loss) arising out of the use of or inability
# to use the script or documentation. Neither this script/function,
# nor any part of it other than those parts that are explicitly copied from others,
# may be republished without author(s) express written permission.
# Author(s) retain the right to alter this disclaimer at any time.
##################################################################################################################
# All generated executables will be placed in the $NODE_BIN_DIR folder.
# Options:
# cpp - build cpp node with utils
# rust - build rust node with utils
# dapp - build rust node with utils for DApp server. If NODE_TYPE="CPP" in env.sh, node will be build w/o compressions for CPP network
BUILD_STRT_TIME=$(date +%s)
SCRIPT_DIR=`cd "$(dirname "${BASH_SOURCE[0]}")" && pwd -P`
source "${SCRIPT_DIR}/env.sh"
echo
echo "################################### FreeTON nodes build script #####################################"
echo "+++INFO: $(basename "$0") BEGIN $(date +%s) / $(date)"
echo "INFO from env: Network: $NETWORK_TYPE; Node: $NODE_TYPE; WC: $NODE_WC; Elector: $ELECTOR_TYPE; Staking mode: $STAKE_MODE; Access method: $(if $FORCE_USE_DAPP;then echo "DApp"; else echo "console"; fi )"
BackUP_Time="$(date +'%F_%T'|tr ':' '-')"
case "${@}" in
cpp)
CPP_NODE_BUILD=true
RUST_NODE_BUILD=false
DAPP_NODE_BUILD=false
;;
rust)
CPP_NODE_BUILD=false
RUST_NODE_BUILD=true
DAPP_NODE_BUILD=false
;;
dapp)
CPP_NODE_BUILD=false
RUST_NODE_BUILD=true
DAPP_NODE_BUILD=true
;;
*)
CPP_NODE_BUILD=false
RUST_NODE_BUILD=true
DAPP_NODE_BUILD=false
;;
esac
[[ ! -d $NODE_BIN_DIR ]] && mkdir -p $NODE_BIN_DIR
#=====================================================
# Packages set for different OSes
PKGS_FreeBSD="mc libtool perl5 automake llvm-devel gmake git jq wget gawk base64 gflags ccache cmake curl gperf openssl ninja lzlib vim sysinfo logrotate gsl p7zip zstd pkgconf python google-perftools"
PKGS_CentOS="curl jq wget bc vim libtool logrotate openssl-devel clang llvm-devel ccache cmake ninja-build gperf gawk gflags snappy snappy-devel zlib zlib-devel bzip2 bzip2-devel lz4-devel libmicrohttpd-devel readline-devel p7zip libzstd-devel gperftools gperftools-devel"
PKGS_Ubuntu="git mc curl build-essential libssl-dev automake libtool clang llvm-dev jq vim cmake ninja-build ccache gawk gperf texlive-science doxygen-latex libgflags-dev libmicrohttpd-dev libreadline-dev libz-dev pkg-config zlib1g-dev p7zip-full bc libzstd-dev libgoogle-perftools-dev"
PKG_MNGR_FreeBSD="sudo pkg"
PKG_MNGR_CentOS="sudo dnf"
PKG_MNGR_Ubuntu="sudo apt"
FEXEC_FLG="-executable"
#=====================================================
# Detect OS and set packages
OS_SYSTEM=`uname -s`
if [[ "$OS_SYSTEM" == "Linux" ]];then
OS_SYSTEM="$(hostnamectl |grep 'Operating System'|awk '{print $3}')"
elif [[ ! "$OS_SYSTEM" == "FreeBSD" ]];then
echo
echo "###-ERROR: Unknown or unsupported OS. Can't continue."
echo
exit 1
fi
#=====================================================
# Set packages set & manager according to OS
case "$OS_SYSTEM" in
FreeBSD)
export ZSTD_LIB_DIR=/usr/local/lib
PKGs_SET=$PKGS_FreeBSD
PKG_MNGR=$PKG_MNGR_FreeBSD
$PKG_MNGR delete -y rust boost-all|cat
$PKG_MNGR update -f
$PKG_MNGR upgrade -y
FEXEC_FLG="-perm +111"
sudo wget https://github.com/mikefarah/yq/releases/download/v4.13.3/yq_freebsd_amd64 -O /usr/local/bin/yq && sudo chmod +x /usr/local/bin/yq
if ${CPP_NODE_BUILD};then
# libmicrohttpd \
# does not build with libmicrohttpd-0.9.71
# build & install libmicrohttpd-0.9.70
mkdir -p $HOME/src
cd $HOME/src
# sudo pkg remove -y libmicrohttpd | cat
fetch https://ftp.gnu.org/gnu/libmicrohttpd/libmicrohttpd-0.9.70.tar.gz
tar xf libmicrohttpd-0.9.70.tar.gz
cd libmicrohttpd-0.9.70
./configure && make && sudo make install
fi
;;
CentOS)
export ZSTD_LIB_DIR=/usr/lib64
PKGs_SET=$PKGS_CentOS
PKG_MNGR=$PKG_MNGR_CentOS
$PKG_MNGR -y update --allowerasing
$PKG_MNGR group install -y "Development Tools"
$PKG_MNGR config-manager --set-enabled powertools
$PKG_MNGR --enablerepo=extras install -y epel-release
sudo wget https://github.com/mikefarah/yq/releases/download/v4.13.3/yq_linux_amd64 -O /usr/bin/yq && sudo chmod +x /usr/bin/yq
if ${CPP_NODE_BUILD};then
$PKG_MNGR remove -y boost
$PKG_MNGR install -y gcc-toolset-10 gcc-toolset-10-gcc
$PKG_MNGR install -y gcc-toolset-10-toolchain
source /opt/rh/gcc-toolset-10/enable
fi
;;
Oracle)
export ZSTD_LIB_DIR=/usr/lib64
PKGs_SET=$PKGS_CentOS
PKG_MNGR=$PKG_MNGR_CentOS
$PKG_MNGR -y update --allowerasing
$PKG_MNGR group install -y "Development Tools"
$PKG_MNGR config-manager --set-enabled ol8_codeready_builder
$PKG_MNGR install -y oracle-epel-release-el8
sudo wget https://github.com/mikefarah/yq/releases/download/v4.13.3/yq_linux_amd64 -O /usr/bin/yq && sudo chmod +x /usr/bin/yq
if ${CPP_NODE_BUILD};then
$PKG_MNGR remove -y boost
$PKG_MNGR install -y gcc-toolset-10 gcc-toolset-10-gcc
$PKG_MNGR install -y gcc-toolset-10-toolchain
source /opt/rh/gcc-toolset-10/enable
fi
;;
Ubuntu|Debian)
export ZSTD_LIB_DIR=/usr/lib/x86_64-linux-gnu
PKGs_SET=$PKGS_Ubuntu
PKG_MNGR=$PKG_MNGR_Ubuntu
$PKG_MNGR install -y software-properties-common
sudo add-apt-repository -y ppa:ubuntu-toolchain-r/ppa
sudo wget https://github.com/mikefarah/yq/releases/download/v4.13.3/yq_linux_amd64 -O /usr/bin/yq && sudo chmod +x /usr/bin/yq
if ${CPP_NODE_BUILD};then
$PKG_MNGR remove -y libboost-all-dev|cat
$PKG_MNGR update && $PKG_MNGR upgrade -y
$PKG_MNGR install -y g++-10
sudo update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-10 90 --slave /usr/bin/g++ g++ /usr/bin/g++-10 --slave /usr/bin/gcov gcov /usr/bin/gcov-10
mkdir -p $HOME/src
cd $HOME/src
# sudo pkg remove -y libmicrohttpd | cat
wget https://ftp.gnu.org/gnu/libmicrohttpd/libmicrohttpd-0.9.70.tar.gz
tar xf libmicrohttpd-0.9.70.tar.gz
cd libmicrohttpd-0.9.70
./configure && make && sudo make install
fi
;;
*)
echo
echo "###-ERROR: Unknown or unsupported OS. Can't continue."
echo
exit 1
;;
esac
#=====================================================
# Install packages
echo
echo '################################################'
echo "---INFO: Install packages ... "
$PKG_MNGR install -y $PKGs_SET
#=====================================================
# Install BOOST for C++ node
if ${CPP_NODE_BUILD}; then
echo
echo '################################################'
echo '---INFO: Install BOOST from source'
Installed_BOOST_Ver="$(cat /usr/local/include/boost/version.hpp 2>/dev/null | grep "define BOOST_LIB_VERSION"|awk '{print $3}'|tr -d '"'| awk -F'_' '{printf("%d%s%2d\n", $1,".",$2)}')"
Required_BOOST_Ver="$(echo $BOOST_VERSION | awk -F'.' '{printf("%d%s%2d\n", $1,".",$2)}')"
if [[ "$Installed_BOOST_Ver" != "$Required_BOOST_Ver" ]];then
mkdir -p $HOME/src
cd $HOME/src
sudo rm -rf $HOME/src/boost* |cat
sudo rm -rf /usr/local/include/boost |cat
sudo rm -f /usr/local/lib/libboost* |cat
Boost_File_Version="$(echo ${BOOST_VERSION}|awk -F. '{printf("%s_%s_%s",$1,$2,$3)}')"
wget https://boostorg.jfrog.io/artifactory/main/release/${BOOST_VERSION}/source/boost_${Boost_File_Version}.tar.gz
tar xf boost_${Boost_File_Version}.tar.gz
cd $HOME/src/boost_${Boost_File_Version}/
./bootstrap.sh
sudo ./b2 install --prefix=/usr/local
else
echo "---INFO: BOOST Version ${BOOST_VERSION} already installed"
fi
fi
#=====================================================
# Install or upgrade RUST
echo
echo '################################################'
echo "---INFO: Install RUST ${RUST_VERSION}"
cd $HOME
curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- --default-toolchain ${RUST_VERSION} -y
# curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs -o $HOME/rust_install.sh
# sh $HOME/rust_install.sh -y --default-toolchain ${RUST_VERSION}
# curl https://sh.rustup.rs -sSf | sh -s -- --default-toolchain ${RUST_VERSION} -y
source $HOME/.cargo/env
cargo install cargo-binutils
#=====================================================
# Build C++ node
if ${CPP_NODE_BUILD};then
echo
echo '################################################'
echo "---INFO: Build C++ node ..."
cd $SCRIPT_DIR
[[ -d ${TON_SRC_DIR} ]] && rm -rf "${TON_SRC_DIR}"
echo "---INFO: clone ${CNODE_GIT_REPO} (${CNODE_GIT_COMMIT})..."
git clone "${CNODE_GIT_REPO}" "${TON_SRC_DIR}"
cd "${TON_SRC_DIR}"
git checkout "${CNODE_GIT_COMMIT}"
git submodule init && git submodule update --recursive
git submodule foreach 'git submodule init'
git submodule foreach 'git submodule update --recursive'
echo "---INFO: clone ${CNODE_GIT_REPO} (${CNODE_GIT_COMMIT})... DONE"
echo
echo "---INFO: build a node..."
mkdir -p "${TON_BUILD_DIR}" && cd "${TON_BUILD_DIR}"
cmake .. -G "Ninja" -DCMAKE_BUILD_TYPE=Release -DPORTABLE=ON
ninja
echo "---INFO: build a node... DONE"
echo
# cp $NODE_BIN_DIR/lite-client $NODE_BIN_DIR/lite-client_${BackUP_Time}|cat
# cp $NODE_BIN_DIR/validator-engine $NODE_BIN_DIR/validator-engine_${BackUP_Time}|cat
# cp $NODE_BIN_DIR/validator-engine-console $NODE_BIN_DIR/validator-engine-console_${BackUP_Time}|cat
cp -f $TON_BUILD_DIR/lite-client/lite-client $NODE_BIN_DIR/
cp -f $TON_BUILD_DIR/validator-engine/validator-engine $NODE_BIN_DIR/
cp -f $TON_BUILD_DIR/validator-engine-console/validator-engine-console $NODE_BIN_DIR/
cp -f $TON_BUILD_DIR/crypto/fift $NODE_BIN_DIR/
#=====================================================
echo "---INFO: build utils (convert_address)..."
cd "${NODE_TOP_DIR}/utils/convert_address"
cargo update
cargo build --release
cp "${NODE_TOP_DIR}/utils/convert_address/target/release/convert_address" "$NODE_BIN_DIR/"
echo "---INFO: build utils (convert_address)... DONE"
fi
#=====================================================
# Build rust node
if ${RUST_NODE_BUILD};then
echo
echo '################################################'
echo "---INFO: build RUST NODE ..."
echo -e "${BoldText}${BlueBack}---INFO: RNODE git repo: ${RNODE_GIT_REPO} ${NormText}"
echo -e "${BoldText}${BlueBack}---INFO: RNODE git commit: ${RNODE_GIT_COMMIT} ${NormText}"
[[ -d ${RNODE_SRC_DIR} ]] && rm -rf "${RNODE_SRC_DIR}"
# git clone --recurse-submodules "${RNODE_GIT_REPO}" $RNODE_SRC_DIR
git clone "${RNODE_GIT_REPO}" "${RNODE_SRC_DIR}"
cd "${RNODE_SRC_DIR}"
git checkout "${RNODE_GIT_COMMIT}"
git submodule init && git submodule update --recursive
git submodule foreach 'git submodule init'
git submodule foreach 'git submodule update --recursive'
cd $RNODE_SRC_DIR
sed -i.bak 's%features = \[\"cmake_build\", \"dynamic_linking\"\]%features = \[\"cmake_build\"\]%g' Cargo.toml
#====== Uncomment to disabe node's logs competely
# sed -i.bak 's%log = '0.4'%log = { version = "0.4", features = ["release_max_level_off"] }%' Cargo.toml
cargo update
# node git commit
export GC_TON_NODE="$(git --git-dir="$RNODE_SRC_DIR/.git" rev-parse HEAD 2>/dev/null)"
# block version
export NODE_BLK_VER=$(cat $RNODE_SRC_DIR/src/validating_utils.rs |grep -A1 'supported_version'|tail -1|tr -d ' ')
# patch main.rs
sed -i.bak -e '/TON NODE git commit: {}\\n\\/p; s/TON NODE git commit: {}\\n\\/Node block version: {}\\n\\/' $RNODE_SRC_DIR/src/main.rs
sed -i.bak -e '/std::option_env!("GC_TON_NODE").unwrap_or("Not set"),/p; s/std::option_env!("GC_TON_NODE").unwrap_or("Not set"),/std::option_env!("NODE_BLK_VER").unwrap_or("Not set"),/' $RNODE_SRC_DIR/src/main.rs
echo -e "${BoldText}${BlueBack}---INFO: RNODE build flags: ${RNODE_FEATURES} commit: ${GC_TON_NODE} Block version: ${NODE_BLK_VER}${NormText}"
RUSTFLAGS="-C target-cpu=native" cargo build --release --features "${RNODE_FEATURES}"
# cp $NODE_BIN_DIR/rnode $NODE_BIN_DIR/rnode_${BackUP_Time}|cat
cp -f ${RNODE_SRC_DIR}/target/release/ton_node $NODE_BIN_DIR/rnode
#=====================================================
# Build rust node console
echo '################################################'
echo "---INFO: Build rust node console ..."
[[ -d ${RCONS_SRC_DIR} ]] && rm -rf "${RCONS_SRC_DIR}"
git clone --recurse-submodules "${RCONS_GIT_REPO}" $RCONS_SRC_DIR
cd $RCONS_SRC_DIR
git checkout "${RCONS_GIT_COMMIT}"
git submodule init
git submodule update
cargo update
RUSTFLAGS="-C target-cpu=native" cargo build --release
find $RCONS_SRC_DIR/target/release/ -maxdepth 1 -type f ${FEXEC_FLG} -exec cp -f {} $NODE_BIN_DIR/ \;
echo "---INFO: build RUST NODE ... DONE."
fi
#=====================================================
# Build TON Solidity Compiler (solc)
# echo "---INFO: build TON Solidity Compiler ..."
# [[ ! -z ${SOLC_SRC_DIR} ]] && rm -rf "${SOLC_SRC_DIR}"
# git clone --recurse-submodules "${SOLC_GIT_REPO}" "${SOLC_SRC_DIR}"
# cd "${SOLC_SRC_DIR}"
# git checkout "${SOLC_GIT_COMMIT}"
# mkdir ${SOLC_SRC_DIR}/build
# cd "${SOLC_SRC_DIR}/build"
# cmake ../compiler/ -DCMAKE_BUILD_TYPE=Release
# if [[ "$(uname)" == "Linux" ]];then
# V_CPU=`nproc`
# else
# V_CPU=`sysctl -n hw.ncpu`
# fi
# cmake --build . -- -j $V_CPU
# cp -f "${SOLC_SRC_DIR}/build/solc/solc" $NODE_BIN_DIR/
# cp -f "${SOLC_SRC_DIR}/lib/stdlib_sol.tvm" $NODE_BIN_DIR/
# echo "---INFO: build TON Solidity Compiler ... DONE."
#=====================================================
# Build TVM-linker
# echo
# echo '################################################'
# echo "---INFO: build TVM-linker ..."
# [[ ! -z ${TVM_LINKER_SRC_DIR} ]] && rm -rf "${TVM_LINKER_SRC_DIR}"
# git clone --recurse-submodules "${TVM_LINKER_GIT_REPO}" "${TVM_LINKER_SRC_DIR}"
# cd "${TVM_LINKER_SRC_DIR}"
# git checkout "${TVM_LINKER_GIT_COMMIT}"
# cd "${TVM_LINKER_SRC_DIR}/tvm_linker"
# RUSTFLAGS="-C target-cpu=native" cargo build --release
# cp -f "${TVM_LINKER_SRC_DIR}/tvm_linker/target/release/tvm_linker" $NODE_BIN_DIR/
# echo "---INFO: build TVM-linker ... DONE."
#=====================================================
# Build tonos-cli
echo
echo '################################################'
echo "---INFO: build tonos-cli ... "
[[ -d ${TONOS_CLI_SRC_DIR} ]] && rm -rf "${TONOS_CLI_SRC_DIR}"
git clone --recurse-submodules "${TONOS_CLI_GIT_REPO}" "${TONOS_CLI_SRC_DIR}"
cd "${TONOS_CLI_SRC_DIR}"
git checkout "${TONOS_CLI_GIT_COMMIT}"
cargo update
RUSTFLAGS="-C target-cpu=native" cargo build --release
# cp $NODE_BIN_DIR/tonos-cli $NODE_BIN_DIR/tonos-cli_${BackUP_Time}|cat
cp "${TONOS_CLI_SRC_DIR}/target/release/tonos-cli" "$NODE_BIN_DIR/"
echo "---INFO: build tonos-cli ... DONE"
#=====================================================
# download contracts
echo
echo '################################################'
echo "---INFO: download contracts ... "
rm -rf "${ContractsDIR}"
rm -rf "${NODE_TOP_DIR}/Surf-contracts"
git clone ${CONTRACTS_GIT_REPO} "${ContractsDIR}"
cd "${ContractsDIR}"
git checkout $CONTRACTS_GIT_COMMIT
cd ${NODE_TOP_DIR}
git clone --single-branch --branch ${Surf_GIT_Commit} ${CONTRACTS_GIT_REPO} "${ContractsDIR}/Surf-contracts"
curl -o ${Elector_ABI} ${RustCup_El_ABI_URL} &>/dev/null
#=====================================================
# Check reboot required after update
case "$OS_SYSTEM" in
FreeBSD)
;;
Oracle|CentOS)
needs-restarting -r
;;
Ubuntu|Debian)
if [ -f /var/run/reboot-required ]; then
echo 'reboot required'
cat /var/run/reboot-required.pkgs
fi
;;
*)
;;
esac
echo
echo '################################################'
BUILD_END_TIME=$(date +%s)
Build_mins=$(( (BUILD_END_TIME - BUILD_STRT_TIME)/60 ))
Build_secs=$(( (BUILD_END_TIME - BUILD_STRT_TIME)%60 ))
echo
echo "+++INFO: $(basename "$0") on $HOSTNAME FINISHED $(date +%s) / $(date)"
echo "All builds took $Build_mins min $Build_secs secs"
echo "================================================================================================"
exit 0
|
# Copyright (c) 2013, Daynix Computing LTD (www.daynix.com)
# All rights reserved.
#
# Maintained by oss@daynix.com
#
# This file is a part of VirtHCK, please see the wiki page
# on https://github.com/daynix/VirtHCK/wiki for more.
#
# This code is licensed under standard 3-clause BSD license.
# See file LICENSE supplied with this package for the full license text.
#Overiding command line config arguments
. `dirname $0`/args.cfg
# Setup functions file
# 4-digit unique ID
UNIQUE_ID=`printf "%04d" ${UNIQUE_ID}`
UID_FIRST=`printf $UNIQUE_ID | cut -c1,2`
UID_SECOND=`printf $UNIQUE_ID | cut -c3,4`
#VNC ports
PORT_BASE=`expr ${UNIQUE_ID} '*' 3`
STUDIO_PORT=`expr ${PORT_BASE} - 2`
CLIENT1_PORT=`expr ${PORT_BASE} - 1`
CLIENT2_PORT=`expr ${PORT_BASE}`
# Aux. bridges
CTRL_BR_NAME=ctrltestbr_${UNIQUE_ID}
TEST_BR_NAME=hcktestbr_${UNIQUE_ID}
timestamp()
{
echo `date -u +'%Y-%m-%dT%H-%M-%SZ'`
}
# Real user
REAL_ME=$USER
if [ -n "$SUDO_USER" ]
then
REAL_ME=$SUDO_USER
[ "$RUN_QEMU_AS_ROOT" = true ] || QEMU_RUN_AS="-runas ${REAL_ME}"
fi
STUDIO_IMAGE=`readlink -f $STUDIO_IMAGE`
CLIENT1_IMAGE=`readlink -f $CLIENT1_IMAGE`
CLIENT2_IMAGE=`readlink -f $CLIENT2_IMAGE`
test x"${SNAPSHOT}" = xon && SNAPSHOT_OPTION="-snapshot"
test x"${UNSAFE_CACHE}" = xon && DRIVE_CACHE_OPTION=",cache=unsafe"
test x"${ENLIGHTENMENTS_STATE}" = xon && ENLIGHTENMENTS_OPTION=,hv_spinlocks=0x1FFF,hv_relaxed
test x"${CLIENT_WORLD_ACCESS}" = xon && CLIENT_WORLD_ACCESS_NOTIFY="ENABLED!!!" || CLIENT_WORLD_ACCESS_NOTIFY="disabled"
if [ z${ENABLE_S3} = zon ]
then
S3_DISABLE_OPTION="0"
else
S3_DISABLE_OPTION="1"
fi
if [ z${ENABLE_S4} = zon ]
then
S4_DISABLE_OPTION="0"
else
S4_DISABLE_OPTION="1"
fi
STUDIO_TELNET_PORT=$(( ${STUDIO_PORT} + 10000 ))
CLIENT1_TELNET_PORT=$(( ${CLIENT1_PORT} + 10000 ))
CLIENT2_TELNET_PORT=$(( ${CLIENT2_PORT} + 10000 ))
MONITOR_STUDIO="-monitor telnet::${STUDIO_TELNET_PORT},server,nowait -monitor vc"
MONITOR_CLIENT1="-monitor telnet::${CLIENT1_TELNET_PORT},server,nowait"
MONITOR_CLIENT2="-monitor telnet::${CLIENT2_TELNET_PORT},server,nowait"
if [ z${VIDEO_TYPE} = zVNC ]
then
GRAPHICS_STUDIO="-vnc :${STUDIO_PORT}"
GRAPHICS_CLIENT1="-vga cirrus -vnc :${CLIENT1_PORT}"
CLIENT1_PORTS_MSG="Vnc ${CLIENT1_PORT}/$(( ${CLIENT1_PORT} + 5900 )) Telnet ${CLIENT1_TELNET_PORT}"
GRAPHICS_CLIENT2="-vga cirrus -vnc :${CLIENT2_PORT}"
CLIENT2_PORTS_MSG="Vnc ${CLIENT2_PORT}/$(( ${CLIENT2_PORT} + 5900 )) Telnet ${CLIENT2_TELNET_PORT}"
MONITOR_STDIO="${MONITOR_STDIO} -monitor vc"
MONITOR_CLIENT1="${MONITOR_CLIENT1} -monitor vc"
MONITOR_CLIENT2="${MONITOR_CLIENT2} -monitor vc"
fi
if [ z${VIDEO_TYPE} = zSPICE ]
then
GRAPHICS_STUDIO="-vnc :${STUDIO_PORT}"
CLIENT1_SPICE_PORT=$(( ${CLIENT1_PORT} + 5900 ))
CLIENT2_SPICE_PORT=$(( ${CLIENT2_PORT} + 5900 ))
GRAPHICS_CLIENT1="-spice port=${CLIENT1_SPICE_PORT},disable-ticketing -vga qxl -global qxl-vga.revision=3"
CLIENT1_PORTS_MSG="Spice ${CLIENT1_SPICE_PORT} Telnet ${CLIENT1_TELNET_PORT}"
GRAPHICS_CLIENT2="-spice port=${CLIENT2_SPICE_PORT},disable-ticketing -vga qxl -global qxl-vga.revision=3"
CLIENT2_PORTS_MSG="Spice ${CLIENT2_SPICE_PORT} Telnet ${CLIENT2_TELNET_PORT}"
fi
if [ ! -z "${CLIENT1_N_QUEUES}" ]
then
CLIENT1_N_VECTORS=$(( ${CLIENT1_N_QUEUES} * 2 + 2))
CLIENT1_NETDEV_QUEUES=${CLIENT1_N_QUEUES}
CLIENT1_MQ_DEVICE_PARAM=",mq=on,vectors=${CLIENT1_N_VECTORS}"
fi
if [ ! -z "${CLIENT2_N_QUEUES}" ]
then
CLIENT2_N_VECTORS=$(( ${CLIENT2_N_QUEUES} * 2 + 2))
CLIENT2_NETDEV_QUEUES=${CLIENT2_N_QUEUES}
CLIENT2_MQ_DEVICE_PARAM=",mq=on,vectors=${CLIENT2_N_VECTORS}"
fi
#SMB share on host
if [ -d "$SHARE_ON_HOST" ] && [ "$SHARE_ON_HOST" != "false" ]
then
SHARE_ON_HOST=`cd ${SHARE_ON_HOST} && pwd` # Get the absolute path
elif [ "$SHARE_ON_HOST" != "false" ]
then
echo "Directory ${SHARE_ON_HOST} does not exist!"
echo "Either create it, or set the \"SHARE_ON_HOST\" variable to \"false\"."
echo "Running without a share..."
SHARE_ON_HOST="false"
fi
remove_bridges() {
case $TEST_NET_TYPE in
bridge)
ifconfig ${TEST_BR_NAME} down
brctl delbr ${TEST_BR_NAME}
;;
OVS)
ovs-vsctl del-br ${TEST_BR_NAME}
;;
esac
ifconfig ${CTRL_BR_NAME} down
brctl delbr ${CTRL_BR_NAME}
}
queue_len_tx()
{
case $1 in
''|*[!0-9]*) p1=1 ;;
*) p1=$1 ;;
esac
case $2 in
''|*[!0-9]*) p2=1 ;;
*) p2=$2 ;;
esac
echo $(( ( $p1 > $p2 ? $p1 : $p2 ) * 2048 ))
}
create_bridges() {
case $TEST_NET_TYPE in
bridge)
brctl addbr ${TEST_BR_NAME} 2>&1 | grep -v "already exists"
ifconfig ${TEST_BR_NAME} up
ifconfig ${TEST_BR_NAME} txqueuelen $(queue_len_tx $CLIENT1_N_QUEUES $CLIENT2_N_QUEUES)
;;
OVS)
ovs-vsctl add-br ${TEST_BR_NAME}
;;
esac
brctl addbr ${CTRL_BR_NAME} 2>&1 | grep -v "already exists"
ifconfig ${CTRL_BR_NAME} up
}
enslave_iface() {
BRNAME=$1
IFNAME=$2
ifconfig ${IFNAME} promisc 0.0.0.0 &&
brctl addif ${BRNAME} ${IFNAME} &&
ethtool -K ${IFNAME} tx off
}
enslave_test_iface() {
BRNAME=$1
IFNAME=$2
ifconfig ${IFNAME} promisc 0.0.0.0 &&
case $TEST_NET_TYPE in
bridge)
brctl addif ${BRNAME} ${IFNAME} ||
echo ERROR: Failed to enslave ${IFNAME} to ${BRNAME} bridge
;;
OVS)
{ ovs-vsctl add-port ${BRNAME} ${IFNAME} &&
ovs-vsctl set port ${IFNAME} other-config:priority-tags=true; } ||
echo ERROR: Failed to enslave ${IFNAME} to ${BRNAME} ovs-bridge
;;
esac
ethtool -K ${IFNAME} tx off
ifconfig ${IFNAME} txqueuelen $(queue_len_tx $CLIENT1_N_QUEUES $CLIENT1_N_QUEUES)
}
enslave_test_iface_macvtap() {
BRNAME=$1
UNIQUE_SUFFIX=$2
MAC_ADDRESS=$3
ip link add link ${BRNAME} macvtap-${UNIQUE_SUFFIX} address ${MAC_ADDRESS} type macvtap mode bridge ||
echo ERROR: Failed to create macvtap interface
ifconfig macvtap-${UNIQUE_SUFFIX} up ||
echo ERROR: Failed to bring up macvtap-${UNIQUE_SUFFIX} interface
TAP_ID=`ip link show macvtap-${UNIQUE_SUFFIX} | grep macvtap-${UNIQUE_SUFFIX} | cut -f1 -d':'`
echo "/dev/tap${TAP_ID}"
}
delete_macvtap() {
ip link del macvtap-$1
}
dump_config()
{
if [ ! -z "${TEST_DEV_EXTRA_PARAMS}" ]
then
local EXTRA_PARAMS="${TEST_DEV_EXTRA_PARAMS}"
else
local EXTRA_PARAMS=None
fi
cat <<END
Setup configuration
Machine type................${MACHINE_TYPE}
Setup ID................... ${UNIQUE_ID}
Test suite type............ ${TEST_DEV_TYPE}
Test device................ ${TEST_DEV_NAME}
Test device extra config... ${EXTRA_PARAMS}
Graphics................... ${VIDEO_TYPE}
Test network backend....... ${TEST_NET_TYPE}
Studio VM display port..... Vnc ${STUDIO_PORT}/$(( ${STUDIO_PORT} + 5900 )) Telnet ${STUDIO_TELNET_PORT}
Client 1 display port...... ${CLIENT1_PORTS_MSG}
Client 2 display port...... ${CLIENT2_PORTS_MSG}
QEMU binary................ ${QEMU_BIN}
Studio VM image............ ${STUDIO_IMAGE}
Client 1 VM Image.......... ${CLIENT1_IMAGE}
Client 2 VM Image.......... ${CLIENT2_IMAGE}
SMB share on host.......... ${SHARE_ON_HOST}
Client world access........ ${CLIENT_WORLD_ACCESS_NOTIFY}
Client 1 VCPUs............. ${CLIENT1_CPUS}
Client 2 VCPUs............. ${CLIENT2_CPUS}
Memory for each client..... ${CLIENT_MEMORY}
World network device....... ${WORLD_NET_DEVICE}
Control network device..... ${CTRL_NET_DEVICE}
VHOST...................... ${VHOST_STATE}
Enlightenments..............${ENLIGHTENMENTS_STATE}
S3 enabled..................${ENABLE_S3}
S4 enabled..................${ENABLE_S4}
Snapshot mode.............. ${SNAPSHOT}
END
}
LOOPRUN_FILE=${HCK_ROOT}"/.hck_stop_looped_vms_${UNIQUE_ID}.flag"
loop_run_vm() {
while true; do
$*
test -f $LOOPRUN_FILE && return 0
sleep 2
done
}
loop_run_stop() {
touch $LOOPRUN_FILE
}
loop_run_reset() {
rm -f $LOOPRUN_FILE
}
remove_bridge_scripts() {
for p in 'ctrl' 'world' 'test'; do
rm -f ${HCK_ROOT}"/hck_${p}_bridge_ifup_${UNIQUE_ID}.sh"
done
}
IVSHMEM_SOCKET=/tmp/ivshmem_socket_${UNIQUE_ID}
IVSHMEM_PID=/var/run/ivshmem-server_${UNIQUE_ID}.pid
run_ivshmem_server() {
if [ "${TEST_DEV_TYPE}" = "ivshmem" ]; then
echo Running ivshmem server...
sudo rm -f /tmp/ivshmem_socket_${UNIQUE_ID}
${IVSHMEM_SERVER_BIN} -p ${IVSHMEM_PID} -S ${IVSHMEM_SOCKET}
fi
}
kill_ivshmem_server() {
if [ "${TEST_DEV_TYPE}" = "ivshmem" ]; then
echo stopping ivshmem server...
sudo kill ${cat ${IVSHMEM_PID}}
fi
}
|
<gh_stars>0
var penumpang = [];
var tambahPenumpang = function(namaPenumpang, penumpang) {
// jika angkot kosong
if ( penumpang == 0 ) {
// tambah penumpang di awal array
penumpang.push(namaPenumpang);
// kembalikan isi array dan keluar dari function
return penumpang;
} else {
// telusuri seluruh kursi dari awal
for ( var i = 0; i < penumpang.length; i++ ){
// jika ada kursi yang kosong
if ( penumpang[i] == undefined ) {
// tambah penumpah di kursi tersebut
penumpang[i] = namaPenumpang;
// kembarikan isi array dan keluar dari function
return penumpang;
} else if ( penumpang[i] == namaPenumpang) {
console.log(namaPenumpang + ' sudah ada di dalam angkot ! ');
return penumpang;
} else if ( i == penumpang.length - 1 ) {
// tambah penumpang di akhir array
penumpang.push(namaPenumpang);
return penumpang;
}
}
}
}
var hapusPenumpang = function(namaPenumpang, penumpang) {
// jika angkot kosong
if ( penumpang.length == 0 ){
console.log( 'Angkot sudah kosong, tidak perlu dihapus ')
return penumpang;
} else {
for ( i = 0; i < penumpang.length; i++ ) {
if ( penumpang[i] == namaPenumpang ) {
penumpang[i] = undefined;
return penumpang;
} else if ( i == penumpang.length - 1 ) {
console.log( namaPenumpang + ' tidak ada di dalam angkot');
return penumpang;
}
}
}
}
|
arr.sort()
# arr = ["apple", "boy", "cat", "dog"] |
#!/bin/bash
cd /usr/share/grafana.all
grafana-server -config /etc/grafana.all/grafana.ini cfg:default.paths.data=/var/lib/grafana.all 1>/var/log/grafana.all.log 2>&1
|
<filename>src/views/Components/Components.js<gh_stars>1-10
import React from "react";
// react components for routing our app without refresh
import { Link } from "react-router-dom";
// @material-ui/core components
import { makeStyles } from "@material-ui/core/styles";
// @material-ui/icons
// core components
import Header from "components/Header/Header.js";
import Footer from "components/Footer/Footer.js";
import GridContainer from "components/Grid/GridContainer.js";
import GridItem from "components/Grid/GridItem.js";
import Parallax from "components/Parallax/Parallax.js";
// sections for this page
import HeaderLinks from "components/Header/HeaderLinks.js";
import styles from "assets/jss/material-kit-react/views/components.js";
const useStyles = makeStyles(styles);
export default function Components(props) {
const classes = useStyles();
const { ...rest } = props;
return (
<div>
<Header
brand={<img width="100px" src="/assets/img/apple-icon.png" />}
rightLinks={<HeaderLinks />}
fixed
color="transparent"
changeColorOnScroll={{
height: 400,
color: "white"
}}
{...rest}
/>
<Parallax image="assets/img/cov.png">
<div className={classes.container}>
<GridContainer>
<GridItem>
<div className={classes.brand}>
<img style={{background: 'white', borderRadius: 10} } width="300px" src="/assets/img/ducitc.png"/>
{/* <h3 className={classes.subtitle}>
Dania University College IT Club
</h3> */}
</div>
</GridItem>
</GridContainer>
</div>
</Parallax>
<Footer />
</div>
);
}
|
#
# borrowed from project https://github.com/doodeck/aws-lambda-idempotent
# similarly the invokidempotent folder
mkdir -p tmp
zip -r tmp/index.zip \
*.js invokeidempotent/*.js invokeidempotent/modules/ \
lib/ modules/ public/ routes/ views/ node_modules/
# As of CLI 1.7 upload-function option has been discontinued. Use update-function-code instead
#aws --profile lambda lambda upload-function --region eu-west-1 \
# --function-name bikeslambda \
# --function-zip tmp/index.zip \
# --role 'arn:aws:iam::915133436062:role/bikes_lambda_exec_role' --mode event \
# --handler invokeidempotent/invokeidempotent.handler --runtime nodejs --timeout 30
# Keep the timeout value in sync with the invokeidempotent/config.js
aws --profile lambda lambda update-function-code --region eu-west-1 \
--function-name bikeslambda \
--zip-file fileb://tmp/index.zip
|
#!/bin/bash
IMAGE=$1
if [ -z "$1" ]; then
echo "Please include an image name parameter (e.g. trellisldp/trellis-cassandra)"
exit 1
else
echo "Building docker image $IMAGE"
fi
VERSION=$(./gradlew -q getVersion)
BRANCH=$(git branch 2>/dev/null | sed -n -e 's/^\* \(.*\)/\1/p')
TAG=latest
# Use the develop tag for snapshots
if [[ $VERSION == *SNAPSHOT* ]]; then
TAG=develop
fi
# Quarkus-based images
cd platform/quarkus
if [[ -f "build/trellis-quarkus-${VERSION}-runner.jar" && -d "build/lib" ]]
then
# Don't use latest/develop tags for maintenance branches
if [[ $BRANCH == *.x ]]; then
docker build -f src/main/docker/Dockerfile.jvm -t "$IMAGE:$VERSION" .
else
docker build -f src/main/docker/Dockerfile.jvm -t "$IMAGE:$TAG" -t "$IMAGE:$VERSION" .
fi
docker push "$IMAGE"
else
echo "Build artifacts not present. Please run 'gradle assemble' first"
exit 1
fi
|
<reponame>lyzardiar/MangoLua<gh_stars>0
#include "Window.h"
using namespace mango;
Window* Window::_instance = nullptr;
void mango::Window::Destory() {
if (_instance != nullptr) {
delete _instance;
_instance = nullptr;
}
} |
<reponame>lgoldstein/communitychest
package net.community.chest.util.logging.factory.console;
import java.io.PrintStream;
import java.util.EnumMap;
import java.util.Map;
import net.community.chest.util.logging.AbstractLoggerWrapper;
import net.community.chest.util.logging.LogLevelWrapper;
import net.community.chest.util.logging.format.LogMsgComponentFormatter;
/**
* Copyright 2007 as per GPLv2
* @author <NAME>.
* @since Jun 26, 2007 1:32:47 PM
*/
public final class ConsoleLoggerWrapper extends AbstractLoggerWrapper {
/**
* Prefix of all properties names used for controlling the wrapper's
* behavior (e.g., level, output stream, etc.)
*/
private static final String WRAPPER_BASE_PROPPREFIX=ConsoleLoggerWrapper.class.getName().toLowerCase();
private static final String getWrapperPropName (final String name)
{
return WRAPPER_BASE_PROPPREFIX + "." + name;
}
private static final String getLevelStreamPropName (final LogLevelWrapper l)
{
final String lName=(null == l) ? null : l.name();
return getWrapperPropName(String.valueOf(lName).toLowerCase());
}
private static Map<LogLevelWrapper,PrintStream> _streamsMap;
private static final synchronized PrintStream getPrintStream (final LogLevelWrapper l)
{
if (null == l)
return System.err;
PrintStream out=(null == _streamsMap) ? null : _streamsMap.get(l);
if (out != null)
return out;
// use the level name as the property last component
final String pName=getLevelStreamPropName(l),
pValue=System.getProperty(pName);
final Boolean bVal;
// if no special value set then display all warning and below in STDERR
if ((null == pValue) || (pValue.length() <= 0))
bVal = Boolean.valueOf(l.compareTo(LogLevelWrapper.WARNING) <= 0);
else
bVal = Boolean.valueOf(pValue);
out = ((null == bVal) || bVal.booleanValue()) ? System.err : System.out;
if (null == _streamsMap)
_streamsMap = new EnumMap<LogLevelWrapper, PrintStream>(LogLevelWrapper.class);
_streamsMap.put(l, out);
return out;
}
/**
* Default format of message(s) to be used unless overridden via the
* {@link #WRAPPER_FORMAT_PROPNAME}
*/
public static final String WRAPPER_DEFAULT_FORMAT=
new StringBuilder(128).append(LogMsgComponentFormatter.MODIFIER_CHAR)
.append(LogMsgComponentFormatter.TIMESTAMP)
.append(LogMsgComponentFormatter.MODOPT_START_DELIM)
.append("HH:mm:ss.SSS")
.append(LogMsgComponentFormatter.MODOPT_END_DELIM)
.append(' ')
.append(LogMsgComponentFormatter.MODIFIER_CHAR)
.append(LogMsgComponentFormatter.THREAD_NAME)
.append('\t')
.append(LogMsgComponentFormatter.MODIFIER_CHAR)
.append(LogMsgComponentFormatter.SIMPLE_CLASS_NAME)
.append('\t')
.append(LogMsgComponentFormatter.MODIFIER_CHAR)
.append(LogMsgComponentFormatter.MESSAGE)
.toString();
/**
* Property used to specify how to format the message(s) - if missing
* then {@link #WRAPPER_DEFAULT_FORMAT} is used
*/
public static final String WRAPPER_FORMAT_PROPNAME=getWrapperPropName("format");
private static LogMsgComponentFormatter<?>[] _fmts /* =null */;
private static final synchronized LogMsgComponentFormatter<?>[] getFormatters ()
{
if (null == _fmts)
_fmts = LogMsgComponentFormatter.parseFormat(System.getProperty(WRAPPER_FORMAT_PROPNAME, WRAPPER_DEFAULT_FORMAT));
return _fmts;
}
private static final void writeMessage (Thread th, long logTime, Class<?> logClass, LogLevelWrapper l, Object ctx, String msg, Throwable t)
{
final PrintStream out=getPrintStream(l);
final LogMsgComponentFormatter<?>[] fmts=getFormatters();
if ((out != null) && (fmts != null) && (fmts.length > 0))
{
int numWritten=0;
for (final LogMsgComponentFormatter<?> f : fmts)
{
if (null == f) // should not happen
continue;
final String v=f.format(th, logTime, logClass, l, ctx, msg, t);
out.append(String.valueOf(v));
numWritten++;
}
if (numWritten > 0)
out.println();
}
}
public ConsoleLoggerWrapper (Class<?> logClass, String logName, String clsIndex)
throws IllegalArgumentException
{
super(logClass, logName, clsIndex);
}
/**
* Property used to control min. level of output messages - default={@link LogLevelWrapper#INFO}
* (i.e., anything below this level will not be output)
*/
public static final String WRAPPER_LEVEL_PROPNAME=getWrapperPropName("level");
private static LogLevelWrapper _outLevel /* =null */;
private static synchronized LogLevelWrapper getOutputLevel ()
{
if (null == _outLevel)
{
final String lvlName=System.getProperty(WRAPPER_LEVEL_PROPNAME, LogLevelWrapper.INFO.name());
if (null == (_outLevel=LogLevelWrapper.fromString(lvlName)))
_outLevel = LogLevelWrapper.INFO; // should not happen
}
return _outLevel;
}
static synchronized LogLevelWrapper setOutputLevel (LogLevelWrapper l)
{
final LogLevelWrapper prev=_outLevel;
if (l != null)
_outLevel = l;
return prev;
}
/*
* @see net.community.chest.util.logging.LoggerWrapper#isEnabledFor(net.community.chest.util.logging.LogLevelWrapper)
*/
@Override
public boolean isEnabledFor (final LogLevelWrapper l)
{
if (null == l)
return false;
final LogLevelWrapper ol=getOutputLevel();
if (null == ol) // should not happen
return true;
final int nDiff=l.compareTo(ol);
return (nDiff <= 0);
}
/*
* @see net.community.chest.util.logging.LoggerWrapper#setEnabledFor(net.community.chest.util.logging.LogLevelWrapper)
*/
@Override
public boolean setEnabledFor (LogLevelWrapper l)
{
if (l != null)
{
setOutputLevel(l);
return true;
}
return false;
}
/*
* @see net.community.chest.util.logging.LoggerWrapper#log(net.community.chest.util.logging.LogLevelWrapper, java.lang.String, java.lang.Throwable)
*/
@Override
public String log (final LogLevelWrapper l, final String msg, final Throwable t)
{
if (isEnabledFor(l))
writeMessage(Thread.currentThread(), System.currentTimeMillis(), getLoggingClass(), l, getThreadContext(), msg, t);
return msg;
}
}
|
<reponame>WERMAR/sfsDashboard<filename>web/builderboard/src/app/error-dialog/error-dialog.component.ts
import {Component, Input, OnInit} from '@angular/core';
import {MatDialogRef} from '@angular/material/dialog';
@Component({
selector: 'app-error-dialog',
templateUrl: './error-dialog.component.html',
})
export class ErrorDialogComponent implements OnInit {
@Input() errorMessage: string;
@Input() errorDescription: string;
constructor(private dialogRef: MatDialogRef<ErrorDialogComponent>) { }
ngOnInit(): void {
}
accept() {
this.dialogRef.close(true);
}
}
|
#!/bin/bash
set -euo pipefail
kill -0 "$(cat /var/www/kivitendo-erp/users/pid/config.kivitendo.conf.pid)"
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.