text stringlengths 1 1.05M |
|---|
#!/bin/bash
# 実行例: repo_update.sh ~/android/aicp
# 実行時の引数が正しいかチェック
if [ $# -lt 1 ]; then
echo "指定された引数は$#個です。" 1>&2
echo "仕様: $CMDNAME [ビルドディレクトリの絶対パス]" 1>&2
exit 1
fi
builddir=$1
cd $builddir
# repo sync
echo "* Syncing repo"
repo sync -j8 -c -f --force-sync --no-clone-bundle
echo -e "\n"
# Sony
echo "* Updating Sony open devices repo"
echo -e "\n"
echo "******************"
echo "device/sony/common"
echo "******************"
cd $builddir/device/sony/common
git rev-parse --abbrev-ref @ | grep n7.1 >/dev/null
if [ "$?" == "1" ] ; then
git checkout n7.1
fi
git ls-remote --heads sony n-mr1 | grep n-mr1 >/dev/null
if [ "$?" == "1" ] ; then
git remote add sony https://github.com/sonyxperiadev/device-sony-common.git
fi
git pull --no-edit sony n-mr1
git push github n7.1
echo -e "\n"
echo "******************"
echo "device/sony/yoshino"
echo "******************"
cd $builddir/device/sony/yoshino
git rev-parse --abbrev-ref @ | grep n7.1 >/dev/null
if [ "$?" == "1" ] ; then
git checkout n7.1
fi
git ls-remote --heads sony n-mr1 | grep n-mr1 >/dev/null
if [ "$?" == "1" ] ; then
git remote add sony https://github.com/sonyxperiadev/device-sony-yoshino.git
fi
git pull --no-edit sony n-mr1
git push github n7.1
echo -e "\n"
echo "******************"
echo "device/sony/maple"
echo "******************"
cd $builddir/device/sony/maple
git rev-parse --abbrev-ref @ | grep n7.1 >/dev/null
if [ "$?" == "1" ] ; then
git checkout n7.1
fi
git ls-remote --heads sony n-mr1 | grep n-mr1 >/dev/null
if [ "$?" == "1" ] ; then
git remote add sony https://github.com/sonyxperiadev/device-sony-maple.git
fi
git pull --no-edit sony n-mr1
git push github n7.1
echo -e "\n"
echo "******************"
echo "hardware/sony/camera"
echo "******************"
cd $builddir/hardware/sony/camera
git rev-parse --abbrev-ref @ | grep n7.1 >/dev/null
if [ "$?" == "1" ] ; then
git checkout n7.1
fi
git ls-remote --heads sony aosp/LA.UM.5.7.r1 | grep aosp/LA.UM.5.7.r1 >/dev/null
if [ "$?" == "1" ] ; then
git remote add sony https://github.com/sonyxperiadev/camera.git
fi
git pull --no-edit sony aosp/LA.UM.5.7.r1
git push github n7.1
echo -e "\n"
echo "******************"
echo "kernel/sony/msm"
echo "******************"
cd $builddir/kernel/sony/msm
git rev-parse --abbrev-ref @ | grep aosp/LA.UM.5.7.r1 >/dev/null
if [ "$?" == "1" ] ; then
git checkout aosp/LA.UM.5.7.r1
fi
git ls-remote --heads sony aosp/LA.UM.5.7.r1 | grep aosp/LA.UM.5.7.r1 >/dev/null
if [ "$?" == "1" ] ; then
git remote add sony https://github.com/sonyxperiadev/kernel.git
fi
git pull --no-edit sony aosp/LA.UM.5.7.r1
git push github aosp/LA.UM.5.7.r1
echo -e "\n"
# LineageOS
echo "* Updating LineageOS repo"
echo -e "\n"
echo "******************"
echo "external/libnfc-nci"
echo "******************"
cd $builddir/external/libnfc-nci
git rev-parse --abbrev-ref @ | grep n7.1 >/dev/null
if [ "$?" == "1" ] ; then
git checkout n7.1
fi
git ls-remote --heads los cm-14.1 | grep cm-14.1 >/dev/null
if [ "$?" == "1" ] ; then
git remote add los https://github.com/LineageOS/android_external_libnfc-nci.git
fi
git pull --no-edit los cm-14.1
git push github n7.1
echo -e "\n"
echo "******************"
echo "hardware/qcom/bt"
echo "******************"
cd $builddir/hardware/qcom/bt
git rev-parse --abbrev-ref @ | grep n7.1 >/dev/null
if [ "$?" == "1" ] ; then
git checkout n7.1
fi
git ls-remote --heads los cm-14.1 | grep cm-14.1 >/dev/null
if [ "$?" == "1" ] ; then
git remote add los https://github.com/LineageOS/android_hardware_qcom_bt.git
fi
git pull --no-edit los cm-14.1
git push github n7.1
echo -e "\n"
echo "******************"
echo "hardware/qcom/display"
echo "******************"
cd $builddir/hardware/qcom/display
git rev-parse --abbrev-ref @ | grep n7.1 >/dev/null
if [ "$?" == "1" ] ; then
git checkout n7.1
fi
git ls-remote --heads los cm-14.1 | grep cm-14.1 >/dev/null
if [ "$?" == "1" ] ; then
git remote add los https://github.com/LineageOS/android_hardware_qcom_display.git
fi
git pull --no-edit los cm-14.1
git push github n7.1
echo -e "\n"
echo "******************"
echo "hardware/qcom/media"
echo "******************"
cd $builddir/hardware/qcom/media
git rev-parse --abbrev-ref @ | grep n7.1 >/dev/null
if [ "$?" == "1" ] ; then
git checkout n7.1
fi
git ls-remote --heads los cm-14.1 | grep cm-14.1 >/dev/null
if [ "$?" == "1" ] ; then
git remote add los https://github.com/LineageOS/android_hardware_qcom_media.git
fi
git pull --no-edit los cm-14.1
git push github n7.1
echo -e "\n"
echo "* Updated successfully!"
cd $builddir
|
/**
* @namespace altspace
*/
/**
* The altspace component makes A-Frame apps compatible with AltspaceVR.
*
* **Note**: If you use the `embedded` A-Frame component on your scene, you must include it *before* the `altspace` component, or your app will silently fail.
* @mixin altspace
* @memberof altspace
* @property {boolean} usePixelScale=`false` - Allows you to use A-Frame units as CSS pixels.
* This is the default behavior for three.js apps, but not for A-Frame apps.
* @property {string} verticalAlign=`middle` - Puts the origin at the `bottom`, `middle` (default),
* or `top` of the Altspace enclosure.
* @property {boolean} enclosuresOnly=`true` - Prevents the scene from being created if
* enclosure is flat.
* @property {boolean} fullspace=`false` - Puts the app into fullspace mode.
*
* @example
* <head>
* <title>My A-Frame Scene</title>
* <script src="https://aframe.io/releases/0.3.0/aframe.min.js"></script>
* <script src="https://cdn.rawgit.com/AltspaceVR/aframe-altspace-component/vAFRAME_ALTSPACE_VERSION/dist/aframe-altspace-component.min.js"></script>
* </head>
* <body>
* <a-scene altspace>
* <a-entity geometry="primitive: box" material="color: #C03546"></a-entity>
* </a-scene>
* </body>
*/
AFRAME.registerComponent('altspace', {
version: 'AFRAME_ALTSPACE_VERSION',
schema: {
usePixelScale: { type: 'boolean', default: 'false'},
verticalAlign: { type: 'string', default: 'middle'},
enclosuresOnly:{ type: 'boolean', default: 'true'},
fullspace: { type: 'boolean', default: 'false'}
},
/*
* Called once when component is attached. Generally for initial setup.
*/
init: function () {
if (!(this.el.object3D instanceof THREE.Scene)) {
console.warn('aframe-altspace-component can only be attached to a-scene');
return;
}
if (window.altspace && window.altspace.inClient) {
this.el.setAttribute('vr-mode-ui', {enabled: false});
this.initRenderer();
this.initCursorEvents();
this.initCollisionEvents();
} else {
console.warn('aframe-altspace-component only works inside of AltspaceVR');
}
},
/*
* Called on every single tick or render loop of the scene.
*/
tick: function (t, dt) {
if(this.el.object3D.updateAllBehaviors)
this.el.object3D.updateAllBehaviors();
},
/*
* Called when a component is removed (e.g., via removeAttribute).
* Generally undoes all modifications to the entity.
*/
remove: function () { },
/*
* Called on each scene tick.
*/
// tick: function (t) { },
/*
* Called when entity pauses.
* Use to stop or remove any dynamic or background behavior such as events.
*/
pause: function () { },
/*
* Called when entity resumes.
* Use to continue or add any dynamic or background behavior such as events.
*/
play: function () { },
/********** Helper Methods **********/
/*
* Swap in Altspace renderer when running in AltspaceVR.
*/
initRenderer: function () {
var scene = this.el.object3D;
altspace.getEnclosure().then(function(e)
{
if(this.data.fullspace){
e.requestFullspace();
e.addEventListener('fullspacechange', function(){
scene.scale.setScalar(e.pixelsPerMeter);
});
}
if (!this.data.usePixelScale || this.data.fullspace){
scene.scale.setScalar(e.pixelsPerMeter);
}
switch (this.data.verticalAlign) {
case 'bottom':
scene.position.y -= e.innerHeight / 2;
break;
case 'top':
scene.position.y += e.innerHeight / 2;
break;
case 'middle':
break;
default:
console.warn('Unexpected value for verticalAlign: ', this.data.verticalAlign);
}
if(this.data.enclosuresOnly && e.innerDepth === 1){
this.el.renderer.render(new THREE.Scene());
this.el.renderer = this.el.effect = oldRenderer;
}
}.bind(this));
var oldRenderer = this.el.renderer;
var renderer = this.el.renderer = this.el.effect = altspace.getThreeJSRenderer({
aframeComponentVersion: this.version
});
var noop = function() {};
renderer.setSize = noop;
renderer.setPixelRatio = noop;
renderer.setClearColor = noop;
renderer.clear = noop;
renderer.enableScissorTest = noop;
renderer.setScissor = noop;
renderer.setViewport = noop;
renderer.getPixelRatio = noop;
renderer.getMaxAnisotropy = noop;
renderer.setFaceCulling = noop;
renderer.context = {canvas: {}};
renderer.shadowMap = {};
},
/*
* Emulate A-Frame cursor events when running in altspaceVR.
*/
initCursorEvents: function() {
var scene = this.el.object3D;
var cursorEl = document.querySelector('a-cursor') || document.querySelector('a-entity[cursor]');
if (cursorEl) {
// Hide A-Frame cursor mesh.
cursorEl.setAttribute('material', 'transparent', true);
cursorEl.setAttribute('material', 'opacity', 0.0);
}
var emit = function (eventName, event) {
// Fire events on intersected object and A-Frame cursor.
var targetEl = event.target.el;
if (cursorEl) cursorEl.emit(eventName, { target: targetEl, ray: event.ray, point: event.point });
if (targetEl) targetEl.emit(eventName, { target: targetEl, ray: event.ray, point: event.point });
} ;
var cursordownObj = null;
scene.addEventListener('cursordown', function(event) {
cursordownObj = event.target;
emit('mousedown', event);
});
scene.addEventListener('cursorup', function(event) {
emit('mouseup', event);
if (event.target.uuid === cursordownObj.uuid) {
emit('click', event);
}
cursordownObj = null;
});
scene.addEventListener('cursorenter', function(event) {
if (!event.target.el) { return; }
event.target.el.addState('hovered');
if (cursorEl) cursorEl.addState('hovering');
emit('mouseenter', event);
});
scene.addEventListener('cursorleave', function(event) {
if (!event.target.el) { return; }
event.target.el.removeState('hovered');
if (cursorEl) cursorEl.removeState('hovering');
emit('mouseleave', event);
});
},
initCollisionEvents: function () {
var scene = this.el.object3D;
var emit = function (eventName, event) {
var targetEl = event.target.el;
if (!targetEl) return;
//remap target and other from object3Ds to aframe element
event.target = targetEl;
if (event.other && event.other.el) {
event.other = event.other.el;
}
targetEl.emit(eventName, event);
};
scene.addEventListener('collisionenter', function (event) {
emit('collisionenter', event);
});
scene.addEventListener('collisionexit', function (event) {
emit('collisionexit', event);
});
scene.addEventListener('triggerenter', function (event) {
emit('triggerenter', event);
});
scene.addEventListener('triggerexit', function (event) {
emit('triggerexit', event);
});
}
});
|
CREATE TABLE [dbo].[StateCode] (
[ID] INT IDENTITY (1, 1) NOT NULL,
[Value] VARCHAR (20) NULL,
CONSTRAINT [PK_StateCode] PRIMARY KEY CLUSTERED ([ID] ASC)
);
|
#!/bin/bash
# Copyright 2018 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Update VERSION then in this directory run ./import.sh
set -e
BRANCH=master
# import VERSION from one of the google internal CLs
VERSION=440899714143b6a143917cbd8e3f0ccba0847cd4
GIT_REPO="https://github.com/envoyproxy/envoy.git"
GIT_BASE_DIR=envoy
SOURCE_PROTO_BASE_DIR=envoy/api
TARGET_PROTO_BASE_DIR=src/main/proto
FILES=(
envoy/annotations/deprecation.proto
envoy/annotations/resource.proto
envoy/api/v2/auth/cert.proto
envoy/api/v2/auth/common.proto
envoy/api/v2/auth/secret.proto
envoy/api/v2/auth/tls.proto
envoy/api/v2/cds.proto
envoy/api/v2/cluster.proto
envoy/api/v2/cluster/circuit_breaker.proto
envoy/api/v2/cluster/filter.proto
envoy/api/v2/cluster/outlier_detection.proto
envoy/api/v2/core/address.proto
envoy/api/v2/core/backoff.proto
envoy/api/v2/core/base.proto
envoy/api/v2/core/config_source.proto
envoy/api/v2/core/event_service_config.proto
envoy/api/v2/core/grpc_service.proto
envoy/api/v2/core/health_check.proto
envoy/api/v2/core/http_uri.proto
envoy/api/v2/core/protocol.proto
envoy/api/v2/core/socket_option.proto
envoy/api/v2/discovery.proto
envoy/api/v2/eds.proto
envoy/api/v2/endpoint.proto
envoy/api/v2/endpoint/endpoint.proto
envoy/api/v2/endpoint/endpoint_components.proto
envoy/api/v2/endpoint/load_report.proto
envoy/api/v2/lds.proto
envoy/api/v2/listener.proto
envoy/api/v2/listener/listener.proto
envoy/api/v2/listener/listener_components.proto
envoy/api/v2/listener/udp_listener_config.proto
envoy/api/v2/rds.proto
envoy/api/v2/route.proto
envoy/api/v2/route/route.proto
envoy/api/v2/route/route_components.proto
envoy/api/v2/scoped_route.proto
envoy/api/v2/srds.proto
envoy/config/accesslog/v3/accesslog.proto
envoy/config/cluster/v3/circuit_breaker.proto
envoy/config/cluster/v3/cluster.proto
envoy/config/cluster/v3/filter.proto
envoy/config/cluster/v3/outlier_detection.proto
envoy/config/core/v3/address.proto
envoy/config/core/v3/backoff.proto
envoy/config/core/v3/base.proto
envoy/config/core/v3/config_source.proto
envoy/config/core/v3/event_service_config.proto
envoy/config/core/v3/extension.proto
envoy/config/core/v3/grpc_service.proto
envoy/config/core/v3/health_check.proto
envoy/config/core/v3/http_uri.proto
envoy/config/core/v3/protocol.proto
envoy/config/core/v3/proxy_protocol.proto
envoy/config/core/v3/socket_option.proto
envoy/config/endpoint/v3/endpoint.proto
envoy/config/endpoint/v3/endpoint_components.proto
envoy/config/filter/accesslog/v2/accesslog.proto
envoy/config/filter/fault/v2/fault.proto
envoy/config/filter/http/fault/v2/fault.proto
envoy/config/filter/network/http_connection_manager/v2/http_connection_manager.proto
envoy/config/listener/v2/api_listener.proto
envoy/config/listener/v3/api_listener.proto
envoy/config/listener/v3/listener.proto
envoy/config/listener/v3/listener_components.proto
envoy/config/listener/v3/udp_listener_config.proto
envoy/config/route/v3/route_components.proto
envoy/config/trace/v2/http_tracer.proto
envoy/config/trace/v2/trace.proto
envoy/config/trace/v2/datadog.proto
envoy/config/trace/v2/dynamic_ot.proto
envoy/config/trace/v2/lightstep.proto
envoy/config/trace/v2/opencensus.proto
envoy/config/trace/v2/service.proto
envoy/config/trace/v2/zipkin.proto
envoy/config/rbac/v2/rbac.proto
envoy/extensions/transport_sockets/tls/v3/cert.proto
envoy/extensions/transport_sockets/tls/v3/common.proto
envoy/extensions/transport_sockets/tls/v3/secret.proto
envoy/extensions/transport_sockets/tls/v3/tls.proto
envoy/service/discovery/v2/ads.proto
envoy/service/discovery/v2/sds.proto
envoy/service/load_stats/v2/lrs.proto
envoy/type/http.proto
envoy/type/matcher/regex.proto
envoy/type/matcher/string.proto
envoy/type/matcher/metadata.proto
envoy/type/matcher/path.proto
envoy/type/matcher/value.proto
envoy/type/matcher/number.proto
envoy/type/matcher/v3/regex.proto
envoy/type/matcher/v3/string.proto
envoy/type/metadata/v2/metadata.proto
envoy/type/metadata/v3/metadata.proto
envoy/type/percent.proto
envoy/type/range.proto
envoy/type/semantic_version.proto
envoy/type/tracing/v2/custom_tag.proto
envoy/type/tracing/v3/custom_tag.proto
envoy/type/v3/http.proto
envoy/type/v3/percent.proto
envoy/type/v3/range.proto
envoy/type/v3/semantic_version.proto
)
pushd `git rev-parse --show-toplevel`/xds/third_party/envoy
# clone the envoy github repo in a tmp directory
tmpdir="$(mktemp -d)"
trap "rm -rf ${tmpdir}" EXIT
pushd "${tmpdir}"
git clone -b $BRANCH $GIT_REPO
trap "rm -rf $GIT_BASE_DIR" EXIT
cd "$GIT_BASE_DIR"
git checkout $VERSION
popd
cp -p "${tmpdir}/${GIT_BASE_DIR}/LICENSE" LICENSE
cp -p "${tmpdir}/${GIT_BASE_DIR}/NOTICE" NOTICE
rm -rf "${TARGET_PROTO_BASE_DIR}"
mkdir -p "${TARGET_PROTO_BASE_DIR}"
pushd "${TARGET_PROTO_BASE_DIR}"
# copy proto files to project directory
for file in "${FILES[@]}"
do
mkdir -p "$(dirname "${file}")"
cp -p "${tmpdir}/${SOURCE_PROTO_BASE_DIR}/${file}" "${file}"
done
popd
popd
|
import Router from '@koa/router';
// import service from './service';
const router = new Router({ prefix: '/realtime-data' });
router.post('/', async ctx => {
ctx.body = {};
});
export default router;
|
#!/bin/sh
set -x
BUILD_JOBS=${BUILD_JOBS:-$(nproc)}
BUILD_TYPE=${BUILD_TYPE:-Release}
UPDATE_SOURCES=${UPDATE_SOURCES:-clean}
WITH_OMZ_DEMO=${WITH_OMZ_DEMO:-ON}
DEV_HOME=`pwd`
OPENCV_HOME=$DEV_HOME/opencv
OPENVINO_HOME=$DEV_HOME/openvino
OPENVINO_CONTRIB=$DEV_HOME/openvino_contrib
ARM_PLUGIN_HOME=$OPENVINO_CONTRIB/modules/arm_plugin
OMZ_HOME=$DEV_HOME/open_model_zoo
STAGING_DIR=$DEV_HOME/armcpu_package
fail()
{
if [ $# -lt 2 ]; then
echo "Script internal error"
exit 31
fi
retval=$1
shift
echo $@
exit $retval
}
cloneSrcTree()
{
DESTDIR=$1
shift
SRCURL=$1
shift
while [ $# -gt 0 ]; do
git clone --recurse-submodules --shallow-submodules --depth 1 --branch=$1 $SRCURL $DESTDIR && return 0
shift
done
return 1
}
checkSrcTree()
{
[ $# -lt 3 ] && fail
if ! [ -d $1 ]; then
echo "Unable to detect $1"
echo "Cloning $2..."
cloneSrcTree $@ || fail 3 "Failed to clone $2. Stopping"
else
echo "Detected $1"
echo "Considering it as source directory"
if [ "$UPDATE_SOURCES" = "reload" ]; then
echo "Source reloading requested"
echo "Removing existing sources..."
rm -rf $1 || fail 1 "Failed to remove. Stopping"
echo "Cloning $2..."
cloneSrcTree $@ || fail 3 "Failed to clone $2. Stopping"
elif [ -d $1/build ]; then
echo "Build directory detected at $1"
if [ "$UPDATE_SOURCES" = "clean" ]; then
echo "Cleanup of previous build requested"
echo "Removing previous build results..."
rm -rf $1/build || fail 2 "Failed to cleanup. Stopping"
fi
fi
fi
return 0
}
#Prepare sources
checkSrcTree $OPENCV_HOME https://github.com/opencv/opencv.git 4.x
checkSrcTree $OPENVINO_HOME https://github.com/openvinotoolkit/openvino.git master
checkSrcTree $OPENVINO_CONTRIB https://github.com/openvinotoolkit/openvino_contrib.git master
if [ "$WITH_OMZ_DEMO" = "ON" ]; then
checkSrcTree $OMZ_HOME https://github.com/openvinotoolkit/open_model_zoo.git master
fi
#cleanup package destination folder
[ -e $STAGING_DIR ] && rm -rf $STAGING_DIR
mkdir -p $STAGING_DIR
#Build OpenCV
mkdir -p $OPENCV_HOME/build && \
cd $OPENCV_HOME/build && \
PYTHONVER=`ls /usr/include | grep "python3[^m]*$"` && \
cmake -DCMAKE_BUILD_TYPE=$BUILD_TYPE -DBUILD_LIST=imgcodecs,videoio,highgui,gapi,python3 \
-DBUILD_opencv_python2=OFF -DBUILD_opencv_python3=ON -DOPENCV_SKIP_PYTHON_LOADER=OFF \
-DPYTHON3_LIMITED_API=ON \
-DPYTHON3_INCLUDE_PATH=/opt/python3.7_arm/include/python3.7m \
-DPYTHON3_LIBRARIES=/opt/python3.7_arm/lib \
-DPYTHON3_NUMPY_INCLUDE_DIRS=/usr/local/lib/python3.7/site-packages/numpy/core/include \
-D CMAKE_USE_RELATIVE_PATHS=ON \
-D CMAKE_SKIP_INSTALL_RPATH=ON \
-D OPENCV_SKIP_PKGCONFIG_GENERATION=ON \
-D OPENCV_BIN_INSTALL_PATH=bin \
-D OPENCV_PYTHON3_INSTALL_PATH=python \
-D OPENCV_INCLUDE_INSTALL_PATH=include \
-D OPENCV_LIB_INSTALL_PATH=lib \
-D OPENCV_CONFIG_INSTALL_PATH=cmake \
-D OPENCV_3P_LIB_INSTALL_PATH=3rdparty \
-D OPENCV_SAMPLES_SRC_INSTALL_PATH=samples \
-D OPENCV_DOC_INSTALL_PATH=doc \
-D OPENCV_OTHER_INSTALL_PATH=etc \
-D OPENCV_LICENSES_INSTALL_PATH=etc/licenses \
-DCMAKE_TOOLCHAIN_FILE="$OPENVINO_HOME/cmake/$TOOLCHAIN_DEFS" \
-DWITH_GTK_2_X=OFF \
-DOPENCV_ENABLE_PKG_CONFIG=ON \
-DPKG_CONFIG_EXECUTABLE=/usr/bin/${ARCH_NAME}-pkg-config \
$OPENCV_HOME && \
make -j$BUILD_JOBS && \
cmake -DCMAKE_INSTALL_PREFIX=$STAGING_DIR/extras/opencv -P cmake_install.cmake && \
echo export OpenCV_DIR=\$INSTALLDIR/extras/opencv/cmake > $STAGING_DIR/extras/opencv/setupvars.sh && \
echo export LD_LIBRARY_PATH=\$INSTALLDIR/extras/opencv/lib:\$LD_LIBRARY_PATH >> $STAGING_DIR/extras/opencv/setupvars.sh && \
mkdir -p $STAGING_DIR/python/python3 && cp -r $STAGING_DIR/extras/opencv/python/cv2 $STAGING_DIR/python/python3 && \
cd $DEV_HOME || fail 11 "OpenCV build failed. Stopping"
#Build OpenVINO
mkdir -p $OPENVINO_HOME/build && \
cd $OPENVINO_HOME/build && \
cmake -DOpenCV_DIR=$STAGING_DIR/extras/opencv/cmake -DENABLE_OPENCV=OFF \
-DPYTHON_INCLUDE_DIRS="/opt/python3.7_arm/include/python3.7m" \
-DPYTHON_LIBRARY="/opt/python3.7_arm/lib/libpython3.7m.so" \
-DENABLE_PYTHON=ON \
-DNGRAPH_PYTHON_BUILD_ENABLE=ON \
-DNGRAPH_ONNX_IMPORT_ENABLE=ON \
-DPYTHON_MODULE_EXTENSION=".so" \
-DENABLE_TESTS=ON -DENABLE_FUNCTIONAL_TESTS=ON -DENABLE_GAPI_TESTS=OFF \
-DENABLE_DATA=OFF \
-DCMAKE_EXE_LINKER_FLAGS=-Wl,-rpath-link,$STAGING_DIR/opencv/lib \
-DENABLE_MYRIAD=ON -DCMAKE_BUILD_TYPE=$BUILD_TYPE \
-DTHREADING=SEQ -DENABLE_LTO=ON \
-DCMAKE_TOOLCHAIN_FILE="$OPENVINO_HOME/cmake/$TOOLCHAIN_DEFS" \
-DARM_COMPUTE_SCONS_JOBS=$BUILD_JOBS \
-DIE_EXTRA_MODULES=$ARM_PLUGIN_HOME \
$OPENVINO_HOME && \
make -j$BUILD_JOBS && \
cmake -DCMAKE_INSTALL_PREFIX=$STAGING_DIR -P cmake_install.cmake && \
ARCHDIR=`ls $OPENVINO_HOME/bin` && \
cd $DEV_HOME || fail 12 "OpenVINO build failed. Stopping"
#OpenVINO python
[ "$UPDATE_SOURCES" = "clean" -a -e $OPENVINO_HOME/pbuild ] && rm -rf $OPENVINO_HOME/pbuild
mkdir -p $OPENVINO_HOME/pbuild && \
cd $OPENVINO_HOME/pbuild && \
cmake -DInferenceEngineDeveloperPackage_DIR=$OPENVINO_HOME/build \
-DENABLE_PYTHON=ON -DPYTHON_EXECUTABLE="/opt/python3.7_arm/bin/python3.7m" \
-DPYTHON_INCLUDE_DIRS=/opt/python3.7_arm/include/python3.7m \
-DPYTHON_LIBRARIES=/opt/python3.7_arm/lib \
-DPYTHON_MODULE_EXTENSION=".so" \
-DPYBIND11_FINDPYTHON=OFF \
-DPYBIND11_NOPYTHON=OFF \
-DPYTHONLIBS_FOUND=TRUE \
-DCMAKE_BUILD_TYPE=$BUILD_TYPE -DENABLE_DATA=OFF \
-DCMAKE_EXE_LINKER_FLAGS=-Wl,-rpath-link,$STAGING_DIR/opencv/lib \
-DCMAKE_TOOLCHAIN_FILE="$OPENVINO_HOME/cmake/$TOOLCHAIN_DEFS" \
$OPENVINO_HOME/src/bindings/python && \
make -j$BUILD_JOBS && \
cmake -DCMAKE_INSTALL_PREFIX=$STAGING_DIR -P cmake_install.cmake && \
cd $DEV_HOME || fail 13 "OpenVINO python bindings build failed. Stopping"
#Open Model Zoo
if [ "$WITH_OMZ_DEMO" = "ON" ]; then
OMZ_DEMOS_BUILD=$OMZ_HOME/build && \
mkdir -p $OMZ_DEMOS_BUILD && \
cd $OMZ_DEMOS_BUILD && \
cmake -DCMAKE_BUILD_TYPE=Release \
-DENABLE_PYTHON=ON \
-DPYTHON_EXECUTABLE=/usr/local/bin/python3.7m \
-DPYTHON_INCLUDE_DIR="/opt/python3.7_arm/include/python3.7m" \
-DPYTHON_LIBRARY="/opt/python3.7_arm/lib" \
-DCMAKE_TOOLCHAIN_FILE="$OPENVINO_HOME/cmake/$TOOLCHAIN_DEFS" \
-DOpenVINO_DIR=$OPENVINO_HOME/build \
-DOpenCV_DIR=$OPENCV_HOME/build \
$OMZ_HOME/demos && \
cmake --build $OMZ_DEMOS_BUILD -- -j$BUILD_JOBS && \
cd $DEV_HOME || fail 16 "Open Model Zoo build failed. Stopping"
python3 $OMZ_HOME/ci/prepare-openvino-content.py l $OMZ_DEMOS_BUILD && \
cp -vr $OMZ_DEMOS_BUILD/dev/. $STAGING_DIR && \
find $OMZ_DEMOS_BUILD -type d -name "Release" -exec cp -vr {} $STAGING_DIR/extras/open_model_zoo/demos \; || \
fail 21 "Open Model Zoo package preparation failed. Stopping"
fi
#Package creation
cd $STAGING_DIR && \
tar -czvf ../OV_ARM_package.tar.gz ./* || \
fail 23 "Package creation failed. Nothing more to do"
exit 0
|
# Copyright (c) 2016, 2022, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
require 'date'
require 'logger'
# rubocop:disable Lint/UnneededCopDisableDirective, Metrics/LineLength
module OCI
# A record represents an entry in a dataset that needs labeling.
class DataLabelingServiceDataplane::Models::Record
LIFECYCLE_STATE_ENUM = [
LIFECYCLE_STATE_ACTIVE = 'ACTIVE'.freeze,
LIFECYCLE_STATE_INACTIVE = 'INACTIVE'.freeze,
LIFECYCLE_STATE_DELETED = 'DELETED'.freeze,
LIFECYCLE_STATE_UNKNOWN_ENUM_VALUE = 'UNKNOWN_ENUM_VALUE'.freeze
].freeze
# **[Required]** The OCID of the record.
# @return [String]
attr_accessor :id
# **[Required]** The name is created by the user. It is unique and immutable.
# @return [String]
attr_accessor :name
# **[Required]** The date and time the resource was created, in the timestamp format defined by RFC3339.
# @return [DateTime]
attr_accessor :time_created
# **[Required]** The date and time the resource was updated, in the timestamp format defined by RFC3339.
# @return [DateTime]
attr_accessor :time_updated
# **[Required]** The OCID of the dataset to associate the record with.
# @return [String]
attr_accessor :dataset_id
# **[Required]** The OCID of the compartment for the task.
# @return [String]
attr_accessor :compartment_id
# This attribute is required.
# @return [OCI::DataLabelingServiceDataplane::Models::SourceDetails]
attr_accessor :source_details
# **[Required]** Whether or not the record has been labeled and has associated annotations.
# @return [BOOLEAN]
attr_accessor :is_labeled
# **[Required]** The lifecycle state of the record.
# ACTIVE - The record is active and ready for labeling.
# INACTIVE - The record has been marked as inactive and should not be used for labeling.
# DELETED - The record has been deleted and is no longer available for labeling.
#
# @return [String]
attr_reader :lifecycle_state
# @return [OCI::DataLabelingServiceDataplane::Models::RecordMetadata]
attr_accessor :record_metadata
# A simple key-value pair that is applied without any predefined name, type, or scope. It exists for cross-compatibility only.
# For example: `{\"bar-key\": \"value\"}`
#
# @return [Hash<String, String>]
attr_accessor :freeform_tags
# The defined tags for this resource. Each key is predefined and scoped to a namespace.
# For example: `{\"foo-namespace\": {\"bar-key\": \"value\"}}`
#
# @return [Hash<String, Hash<String, Object>>]
attr_accessor :defined_tags
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
# rubocop:disable Style/SymbolLiteral
'id': :'id',
'name': :'name',
'time_created': :'timeCreated',
'time_updated': :'timeUpdated',
'dataset_id': :'datasetId',
'compartment_id': :'compartmentId',
'source_details': :'sourceDetails',
'is_labeled': :'isLabeled',
'lifecycle_state': :'lifecycleState',
'record_metadata': :'recordMetadata',
'freeform_tags': :'freeformTags',
'defined_tags': :'definedTags'
# rubocop:enable Style/SymbolLiteral
}
end
# Attribute type mapping.
def self.swagger_types
{
# rubocop:disable Style/SymbolLiteral
'id': :'String',
'name': :'String',
'time_created': :'DateTime',
'time_updated': :'DateTime',
'dataset_id': :'String',
'compartment_id': :'String',
'source_details': :'OCI::DataLabelingServiceDataplane::Models::SourceDetails',
'is_labeled': :'BOOLEAN',
'lifecycle_state': :'String',
'record_metadata': :'OCI::DataLabelingServiceDataplane::Models::RecordMetadata',
'freeform_tags': :'Hash<String, String>',
'defined_tags': :'Hash<String, Hash<String, Object>>'
# rubocop:enable Style/SymbolLiteral
}
end
# rubocop:disable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:disable Metrics/MethodLength, Layout/EmptyLines, Style/SymbolLiteral
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
# @option attributes [String] :id The value to assign to the {#id} property
# @option attributes [String] :name The value to assign to the {#name} property
# @option attributes [DateTime] :time_created The value to assign to the {#time_created} property
# @option attributes [DateTime] :time_updated The value to assign to the {#time_updated} property
# @option attributes [String] :dataset_id The value to assign to the {#dataset_id} property
# @option attributes [String] :compartment_id The value to assign to the {#compartment_id} property
# @option attributes [OCI::DataLabelingServiceDataplane::Models::SourceDetails] :source_details The value to assign to the {#source_details} property
# @option attributes [BOOLEAN] :is_labeled The value to assign to the {#is_labeled} property
# @option attributes [String] :lifecycle_state The value to assign to the {#lifecycle_state} property
# @option attributes [OCI::DataLabelingServiceDataplane::Models::RecordMetadata] :record_metadata The value to assign to the {#record_metadata} property
# @option attributes [Hash<String, String>] :freeform_tags The value to assign to the {#freeform_tags} property
# @option attributes [Hash<String, Hash<String, Object>>] :defined_tags The value to assign to the {#defined_tags} property
def initialize(attributes = {})
return unless attributes.is_a?(Hash)
# convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h| h[k.to_sym] = v }
self.id = attributes[:'id'] if attributes[:'id']
self.name = attributes[:'name'] if attributes[:'name']
self.time_created = attributes[:'timeCreated'] if attributes[:'timeCreated']
raise 'You cannot provide both :timeCreated and :time_created' if attributes.key?(:'timeCreated') && attributes.key?(:'time_created')
self.time_created = attributes[:'time_created'] if attributes[:'time_created']
self.time_updated = attributes[:'timeUpdated'] if attributes[:'timeUpdated']
raise 'You cannot provide both :timeUpdated and :time_updated' if attributes.key?(:'timeUpdated') && attributes.key?(:'time_updated')
self.time_updated = attributes[:'time_updated'] if attributes[:'time_updated']
self.dataset_id = attributes[:'datasetId'] if attributes[:'datasetId']
raise 'You cannot provide both :datasetId and :dataset_id' if attributes.key?(:'datasetId') && attributes.key?(:'dataset_id')
self.dataset_id = attributes[:'dataset_id'] if attributes[:'dataset_id']
self.compartment_id = attributes[:'compartmentId'] if attributes[:'compartmentId']
raise 'You cannot provide both :compartmentId and :compartment_id' if attributes.key?(:'compartmentId') && attributes.key?(:'compartment_id')
self.compartment_id = attributes[:'compartment_id'] if attributes[:'compartment_id']
self.source_details = attributes[:'sourceDetails'] if attributes[:'sourceDetails']
raise 'You cannot provide both :sourceDetails and :source_details' if attributes.key?(:'sourceDetails') && attributes.key?(:'source_details')
self.source_details = attributes[:'source_details'] if attributes[:'source_details']
self.is_labeled = attributes[:'isLabeled'] unless attributes[:'isLabeled'].nil?
raise 'You cannot provide both :isLabeled and :is_labeled' if attributes.key?(:'isLabeled') && attributes.key?(:'is_labeled')
self.is_labeled = attributes[:'is_labeled'] unless attributes[:'is_labeled'].nil?
self.lifecycle_state = attributes[:'lifecycleState'] if attributes[:'lifecycleState']
raise 'You cannot provide both :lifecycleState and :lifecycle_state' if attributes.key?(:'lifecycleState') && attributes.key?(:'lifecycle_state')
self.lifecycle_state = attributes[:'lifecycle_state'] if attributes[:'lifecycle_state']
self.record_metadata = attributes[:'recordMetadata'] if attributes[:'recordMetadata']
raise 'You cannot provide both :recordMetadata and :record_metadata' if attributes.key?(:'recordMetadata') && attributes.key?(:'record_metadata')
self.record_metadata = attributes[:'record_metadata'] if attributes[:'record_metadata']
self.freeform_tags = attributes[:'freeformTags'] if attributes[:'freeformTags']
raise 'You cannot provide both :freeformTags and :freeform_tags' if attributes.key?(:'freeformTags') && attributes.key?(:'freeform_tags')
self.freeform_tags = attributes[:'freeform_tags'] if attributes[:'freeform_tags']
self.defined_tags = attributes[:'definedTags'] if attributes[:'definedTags']
raise 'You cannot provide both :definedTags and :defined_tags' if attributes.key?(:'definedTags') && attributes.key?(:'defined_tags')
self.defined_tags = attributes[:'defined_tags'] if attributes[:'defined_tags']
end
# rubocop:enable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:enable Metrics/MethodLength, Layout/EmptyLines, Style/SymbolLiteral
# Custom attribute writer method checking allowed values (enum).
# @param [Object] lifecycle_state Object to be assigned
def lifecycle_state=(lifecycle_state)
# rubocop:disable Style/ConditionalAssignment
if lifecycle_state && !LIFECYCLE_STATE_ENUM.include?(lifecycle_state)
OCI.logger.debug("Unknown value for 'lifecycle_state' [" + lifecycle_state + "]. Mapping to 'LIFECYCLE_STATE_UNKNOWN_ENUM_VALUE'") if OCI.logger
@lifecycle_state = LIFECYCLE_STATE_UNKNOWN_ENUM_VALUE
else
@lifecycle_state = lifecycle_state
end
# rubocop:enable Style/ConditionalAssignment
end
# rubocop:disable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity, Layout/EmptyLines
# Checks equality by comparing each attribute.
# @param [Object] other the other object to be compared
def ==(other)
return true if equal?(other)
self.class == other.class &&
id == other.id &&
name == other.name &&
time_created == other.time_created &&
time_updated == other.time_updated &&
dataset_id == other.dataset_id &&
compartment_id == other.compartment_id &&
source_details == other.source_details &&
is_labeled == other.is_labeled &&
lifecycle_state == other.lifecycle_state &&
record_metadata == other.record_metadata &&
freeform_tags == other.freeform_tags &&
defined_tags == other.defined_tags
end
# rubocop:enable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity, Layout/EmptyLines
# @see the `==` method
# @param [Object] other the other object to be compared
def eql?(other)
self == other
end
# rubocop:disable Metrics/AbcSize, Layout/EmptyLines
# Calculates hash code according to all attributes.
# @return [Fixnum] Hash code
def hash
[id, name, time_created, time_updated, dataset_id, compartment_id, source_details, is_labeled, lifecycle_state, record_metadata, freeform_tags, defined_tags].hash
end
# rubocop:enable Metrics/AbcSize, Layout/EmptyLines
# rubocop:disable Metrics/AbcSize, Layout/EmptyLines
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.swagger_types.each_pair do |key, type|
if type =~ /^Array<(.*)>/i
# check to ensure the input is an array given that the the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
public_method("#{key}=").call(
attributes[self.class.attribute_map[key]]
.map { |v| OCI::Internal::Util.convert_to_type(Regexp.last_match(1), v) }
)
end
elsif !attributes[self.class.attribute_map[key]].nil?
public_method("#{key}=").call(
OCI::Internal::Util.convert_to_type(type, attributes[self.class.attribute_map[key]])
)
end
# or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# rubocop:enable Metrics/AbcSize, Layout/EmptyLines
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = public_method(attr).call
next if value.nil? && !instance_variable_defined?("@#{attr}")
hash[param] = _to_hash(value)
end
hash
end
private
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
# rubocop:enable Lint/UnneededCopDisableDirective, Metrics/LineLength
|
<reponame>wxsdl123/BatKeeping
class ProtocolsController < ApplicationController
# GET /protocols
# GET /protocols.xml
def index
if params[:ids]
@protocols = Protocol.find(params[:ids],:order => 'number')
else
@protocols = Protocol.current
end
respond_to do |format|
format.html # index.html.erb
format.xml { render :xml => @protocols }
end
end
# GET /protocols/1
# GET /protocols/1.xml
def show
@protocol = Protocol.find(params[:id])
@past_bats = @protocol.all_past_bats - @protocol.bats
respond_to do |format|
format.html # show.html.erb
format.xml { render :xml => @protocol }
end
end
# GET /protocols/new
# GET /protocols/new.xml
def new
@protocol = Protocol.new
@protocol.build_allowed_bats
respond_to do |format|
format.html # new.html.erb
format.xml { render :xml => @protocol }
end
end
# GET /protocols/1/edit
def edit
@protocol = Protocol.find(params[:id])
end
# POST /protocols
# POST /protocols.xml
def create
@protocol = Protocol.new(params[:protocol])
@protocol.end_date = Date.civil(params[:protocol]["start_date(1i)"].to_i, params[:protocol]["start_date(2i)"].to_i, params[:protocol]["start_date(3i)"].to_i) + 3.years
respond_to do |format|
if @protocol.save
set_many_to_many_relationships(@protocol)
format.html { redirect_to(@protocol, :notice => 'Protocol was successfully created.') }
format.xml { render :xml => @protocol, :status => :created, :location => @protocol }
else
format.html { render :action => "new" }
format.xml { render :xml => @protocol.errors, :status => :unprocessable_entity }
end
end
end
# PUT /protocols/1
# PUT /protocols/1.xml
def update
@protocol = Protocol.find(params[:id])
@protocol.end_date = Date.civil(params[:protocol]["start_date(1i)"].to_i, params[:protocol]["start_date(2i)"].to_i, params[:protocol]["start_date(3i)"].to_i) + 3.years
respond_to do |format|
if @protocol.update_attributes(params[:protocol])
set_many_to_many_relationships(@protocol)
format.html { redirect_to(@protocol, :notice => 'Protocol was successfully updated.') }
format.xml { head :ok }
else
format.html { render :action => "edit" }
format.xml { render :xml => @protocol.errors, :status => :unprocessable_entity }
end
end
end
# DELETE /protocols/1
# DELETE /protocols/1.xml
def destroy
@protocol = Protocol.find(params[:id])
@protocol.destroy
respond_to do |format|
format.html { redirect_to(protocols_url) }
format.xml { head :ok }
end
end
def update_mult_bats
@cages=Cage.has_bats
@rooms = Room.has_bats
@species = Species.has_bats
@bats = User.find(session[:person]).bats
@protocols = User.find(session[:person]).protocols
@act = params[:act]
render :action => :mult_bats_form
end
def change_protocol_list
@act = params[:act]
if params[:protocols]
@protocols = Protocol.find(params[:protocols], :order => "number")
else
@protocols = []
end
if params[:bats]
@bats = Bat.find(params[:bats], :order => 'band')
else
@bats = []
end
render :partial => 'form_bats_protocols',
:locals => {:bats => @bats, :protocols => @protocols, :act => @act,
:cages => Cage.has_bats, :rooms => Room.has_bats, :species => Species.has_bats}
end
def change_bat_list
if params[:protocols]
@protocols = Protocol.find(params[:protocols], :order => "number")
else
@protocols = []
end
@act = params[:act]
if params[:cage] && params[:cage][:id] != ""
@bats = Cage.find(params[:cage][:id]).bats
elsif params[:room] && params[:room][:id] != ""
@bats = Room.find(params[:room][:id]).bats
elsif params[:protocol] && params[:protocol][:id] != ""
@bats = Protocol.find(params[:protocol][:id]).bats
elsif params[:species] && params[:species][:id] != ""
@bats = Bat.on_species(Bat.active,Species.find(params[:species][:id]))
elsif params[:bats]
@bats = Bat.find(params[:bats], :order => 'band')
else
@bats = [];
end
render :partial => 'form_bats_protocols',
:locals => {:bats => @bats, :protocols => @protocols, :act => @act,
:cages => Cage.has_bats, :rooms => Room.has_bats, :species => Species.has_bats}
end
def create_mult_prots_mult_bats
bats = Array.new
if params[:bat_id]
params[:bat_id].each{|id, checked| checked=='1' ? bats << Bat.find(id) : '' }
end
protocols = Array.new
if params[:protocol_id]
params[:protocol_id].each{|id, checked| checked=='1' ? protocols << Protocol.find(id) : ''}
end
if protocols.length > 0 and bats.length > 0
#check to make sure # bats isn't over the limit of what's allowed
if params[:act]=='add'
for p in protocols
if p.check_allowed_bats(bats) == false
flash[:notice] = 'Over the allowed bats limit'
redirect_to :action => :edit, :id => p and return
end
end
else
#slow but works...
for b in bats
b_prot = (b.protocols - protocols).uniq
if b.protocols.length != 0 && b_prot.length == 0
flash[:notice] = 'Bats cannot have zero protocols'
redirect_to :action=> :update_mult_bats, :act => params[:act] and return
end
end
end
for bat in bats
if params[:act]=='add'
b_prot = (bat.protocols + protocols).uniq
else
b_prot = (bat.protocols - protocols).uniq
end
bat.save_protocols(b_prot,Time.now,User.find(session[:person]))
end
flash[:notice] = 'Bats/Protocols updated'
redirect_to :action=> :update_mult_bats, :act => params[:act]
else
flash[:notice] = 'No protocols or bats selected'
redirect_to :back
end
end
def list_bats_dates
@start_date = Date.civil(params[:post][:"start_date(1i)"].to_i,params[:post][:"start_date(2i)"].to_i,params[:post][:"start_date(3i)"].to_i)
@end_date = (Date.civil(params[:post][:"end_date(1i)"].to_i,params[:post][:"end_date(2i)"].to_i,params[:post][:"end_date(3i)"].to_i) >> 1) - 1.day
@protocol = Protocol.find(params[:id])
if @start_date > @end_date
flash[:notice] = 'Dates do not overlap'
redirect_to :action => :show, :id => @protocol
else
@p_hist = @protocol.find_hist_btw(@start_date,@end_date)
@bats = @p_hist.collect{|hist| hist.bat}.uniq
end
end
def list_bats_active_between
@start_date = Date.civil(params[:post][:"start_date(1i)"].to_i,params[:post][:"start_date(2i)"].to_i,params[:post][:"start_date(3i)"].to_i)
#we >> 1 to increase the date by one month and we subtract one day from the end date because we want the last day of the month
@end_date = (Date.civil(params[:post][:"end_date(1i)"].to_i,params[:post][:"end_date(2i)"].to_i,params[:post][:"end_date(3i)"].to_i) >> 1) - 1.day
@protocol = Protocol.find(params[:id])
if @start_date > @end_date
flash[:notice] = 'Dates do not overlap'
redirect_to :action => :show, :id => @protocol
else
@bats = @protocol.find_active_btw(@start_date,@end_date)
end
end
def edit_users_on_protocol
@users = User.current
@protocol = Protocol.find(params[:id])
end
def set_many_to_many_relationships(protocol)
set_users_protocol(protocol)
set_data_protocol(protocol)
set_surgery_types_protocol(protocol)
end
def set_users_protocol(protocol)
users = Array.new
params[:user_id].each{|id, checked| checked=='1' ? users << User.find(id) : ''}
protocol.users = users
end
def set_data_protocol(protocol)
data = Array.new
params[:datum_id] ? params[:datum_id].each{|id, checked| checked=='1' ? data << Datum.find(id) : ''} : ''
protocol.data = data
end
def set_surgery_types_protocol(protocol)
surgery_types = Array.new
params[:surgery_type_id] ?
params[:surgery_type_id].each{|id, checked| checked=='1' ? surgery_types << SurgeryType.find(id) : ''} : ''
protocol.surgery_types = surgery_types
end
def update_users_on_protocol
protocol = Protocol.find(params[:id])
set_users_protocol(protocol)
redirect_to :controller => :protocols, :action => :show, :id => protocol
end
def edit_protocols_on_user
@user = User.find(params[:id])
@protocols = Protocol.current
end
def update_protocols_on_user
user = User.find(params[:id])
protocols = Array.new
params[:protocol_id].each{|id, checked| checked=='1' ? protocols << Protocol.find(id) : ''}
user.protocols = protocols
redirect_to :controller => :users, :action => :show, :id => user
end
def remote_add_data
if params[:prot] && params[:prot] != ''
protocol = Protocol.find(params[:prot])
else
protocol = nil
end
d = Datum.new
d.name = params[:name]
d.save
render :partial => "data_on_protocol_form", :locals => {:protocol => protocol,
:data => Datum.all}
end
def delete_data
if params[:prot] && params[:prot] != ''
protocol = Protocol.find(params[:prot])
else
protocol = nil
end
d = Datum.find(params[:data])
if d.protocols.length > 0
flash.now[:data_notice]='Data associated with a protocol, cannot delete'
else
d.destroy
end
render :partial => "data_on_protocol_form", :locals => {:protocol => protocol,
:data => Datum.all}
end
def remote_add_surgery_type
if params[:prot] && params[:prot] != ''
protocol = Protocol.find(params[:prot])
else
protocol = nil
end
sg_t = SurgeryType.new
sg_t.name = params[:name]
sg_t.save
flash.now[:surgery_notice]=
'Surgery type created'
render :partial => "surgeries_on_protocol_form", :locals =>
{:protocol => protocol, :surgery_types => SurgeryType.all}
end
def delete_surgery_type
if params[:prot] && params[:prot] != ''
protocol = Protocol.find(params[:prot])
else
protocol = nil
end
sg_t = SurgeryType.find(params[:surgery_type])
if !sg_t.protocols.empty? || !sg_t.surgeries.empty?
flash.now[:surgery_notice]=
'Surgery type associated with a protocol or surgeries, cannot delete'
else
sg_t.destroy
end
render :partial => "surgeries_on_protocol_form", :locals =>
{:protocol => protocol, :surgery_types => SurgeryType.all}
end
def show_summary_in_table
protocol = Protocol.find(params[:prot])
if params[:limit] == "3"
limit = protocol.summary.split("\n").length
else
limit = 3
end
render :partial => "show_summary_in_table",
:locals => {:protocol=>protocol, :limit=> limit}
end
def edit_warning_limit
number = params[:number].to_i
limit = [number - 2,0].max
render :partial => "edit_warning_limit",
:locals=>{:attribute_number=>params[:attribute_number],:limit_value=>limit}
end
end
|
package site.kason.tempera.util;
import kalang.ast.ExprNode;
import site.kason.klex.OffsetRange;
/**
*
* @author <NAME>
*/
public class OffsetUtil {
public static OffsetRange getOffsetOfExprNode(ExprNode expr){
kalang.compiler.OffsetRange os = expr.offset;
return new OffsetRange(os.startOffset, os.stopOffset, os.startLine, os.startLineColumn, os.stopLine, os.stopLineColumn);
}
}
|
#!/usr/bin/env bash
#
# Downloads and runs the latest stake-o-matic binary
#
solana_version=edge
curl -sSf https://raw.githubusercontent.com/solana-labs/solana/v1.0.0/install/panoptes-install-init.sh \
| sh -s - $solana_version \
--no-modify-path \
--data-dir ./panoptes-install \
--config ./panoptes-install/config.yml
export PATH="$PWD/panoptes-install/releases/$solana_version/solana-release/bin/:$PATH"
set -x
exec panoptes-stake-o-matic "$@"
|
#!/bin/bash
if [ "$#" -ne 2 ]; then
echo "Usage: $0 <log_file> <error_code>"
exit 1
fi
log_file=$1
error_code=$2
if [ ! -f "$log_file" ]; then
echo "Error: Log file not found"
exit 1
fi
error_count=$(grep -c "error code: $error_code" "$log_file")
echo "Occurrences of error code $error_code: $error_count" |
var searchData=
[
['eepromdata_101',['eepromData',['../class_device_name_helper_e_e_p_r_o_m.html#a9ffb38d00b78422b47b2a53797078127',1,'DeviceNameHelperEEPROM']]],
['eepromstart_102',['eepromStart',['../class_device_name_helper_e_e_p_r_o_m.html#afc26df6b84daff822f2dda23bcd3466a',1,'DeviceNameHelperEEPROM']]]
];
|
# export http_proxy=username:password@proxy-server-ip:8080
# export https_proxy=username:password@proxy-server-ip:8082
# export ftp_proxy=username:password@proxy-server-ip:8080
export http_proxy=http://58.220.95.9:80/
export https_proxy=http://58.220.95.9:80/ |
#!/bin/bash
source "$(dirname "$0")/functions.sh"
pp "create symbolic links"
if [ -n "${WINDIR-}" ]; then
{
echo "skipped, because windows"
echo "please use ... wscript setup.wsf"
} | pcat
exit 0
fi
files=(
.gitignore
.inputrc
.tigrc
)
for fn in "${files[@]}"; do
src="$PWD/${fn}"
dst="$HOME/${fn}"
ln -vsf "$src" "$dst"
done | pcat
|
import abc
from typing import Iterable
from apitest.model_maps import APIMetadata, EndPoint
class APIImporter(metaclass=abc.ABCMeta):
@property
@abc.abstractmethod
def metadata(self) -> APIMetadata:
raise NotImplementedError()
@property
@abc.abstractmethod
def end_points(self) -> Iterable[EndPoint]:
raise NotImplementedError()
|
<filename>client/src/routes/Content/route.js
import { Description as DescriptionIcon } from "@material-ui/icons";
import Root from "./Root";
const route = {
sequence: 50,
name: "Content",
label: "Content",
short: "Content",
path: "/content",
exact: true,
component: Root,
icon: DescriptionIcon,
user: true,
indent: true,
hidden: true,
};
export default route;
|
<filename>Lib/site-packages/PyQt5/examples/widgets/spinboxes.py<gh_stars>1-10
#!/usr/bin/env python
#############################################################################
##
## Copyright (C) 2013 Riverbank Computing Limited.
## Copyright (C) 2010 Nokia Corporation and/or its subsidiary(-ies).
## All rights reserved.
##
## This file is part of the examples of PyQt.
##
## $QT_BEGIN_LICENSE:BSD$
## You may use this file under the terms of the BSD license as follows:
##
## "Redistribution and use in source and binary forms, with or without
## modification, are permitted provided that the following conditions are
## met:
## * Redistributions of source code must retain the above copyright
## notice, this list of conditions and the following disclaimer.
## * Redistributions in binary form must reproduce the above copyright
## notice, this list of conditions and the following disclaimer in
## the documentation and/or other materials provided with the
## distribution.
## * Neither the name of Nokia Corporation and its Subsidiary(-ies) nor
## the names of its contributors may be used to endorse or promote
## products derived from this software without specific prior written
## permission.
##
## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
## "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
## LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
## A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
## OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
## SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
## LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
## DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
## THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
## (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
## OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE."
## $QT_END_LICENSE$
##
#############################################################################
from PyQt5.QtCore import QDate, QDateTime, Qt, QTime
from PyQt5.QtWidgets import (QApplication, QComboBox, QDateEdit, QDateTimeEdit,
QDoubleSpinBox, QGroupBox, QHBoxLayout, QLabel, QSpinBox, QTimeEdit,
QVBoxLayout, QWidget)
class Window(QWidget):
def __init__(self):
super(Window, self).__init__()
self.createSpinBoxes()
self.createDateTimeEdits()
self.createDoubleSpinBoxes()
layout = QHBoxLayout()
layout.addWidget(self.spinBoxesGroup)
layout.addWidget(self.editsGroup)
layout.addWidget(self.doubleSpinBoxesGroup)
self.setLayout(layout)
self.setWindowTitle("Spin Boxes")
def createSpinBoxes(self):
self.spinBoxesGroup = QGroupBox("Spinboxes")
integerLabel = QLabel("Enter a value between %d and %d:" % (-20, 20))
integerSpinBox = QSpinBox()
integerSpinBox.setRange(-20, 20)
integerSpinBox.setSingleStep(1)
integerSpinBox.setValue(0)
zoomLabel = QLabel("Enter a zoom value between %d and %d:" % (0, 1000))
zoomSpinBox = QSpinBox()
zoomSpinBox.setRange(0, 1000)
zoomSpinBox.setSingleStep(10)
zoomSpinBox.setSuffix('%')
zoomSpinBox.setSpecialValueText("Automatic")
zoomSpinBox.setValue(100)
priceLabel = QLabel("Enter a price between %d and %d:" % (0, 999))
priceSpinBox = QSpinBox()
priceSpinBox.setRange(0, 999)
priceSpinBox.setSingleStep(1)
priceSpinBox.setPrefix('$')
priceSpinBox.setValue(99)
spinBoxLayout = QVBoxLayout()
spinBoxLayout.addWidget(integerLabel)
spinBoxLayout.addWidget(integerSpinBox)
spinBoxLayout.addWidget(zoomLabel)
spinBoxLayout.addWidget(zoomSpinBox)
spinBoxLayout.addWidget(priceLabel)
spinBoxLayout.addWidget(priceSpinBox)
self.spinBoxesGroup.setLayout(spinBoxLayout)
def createDateTimeEdits(self):
self.editsGroup = QGroupBox("Date and time spin boxes")
dateLabel = QLabel()
dateEdit = QDateEdit(QDate.currentDate())
dateEdit.setDateRange(QDate(2005, 1, 1), QDate(2010, 12, 31))
dateLabel.setText("Appointment date (between %s and %s):" %
(dateEdit.minimumDate().toString(Qt.ISODate),
dateEdit.maximumDate().toString(Qt.ISODate)))
timeLabel = QLabel()
timeEdit = QTimeEdit(QTime.currentTime())
timeEdit.setTimeRange(QTime(9, 0, 0, 0), QTime(16, 30, 0, 0))
timeLabel.setText("Appointment time (between %s and %s):" %
(timeEdit.minimumTime().toString(Qt.ISODate),
timeEdit.maximumTime().toString(Qt.ISODate)))
self.meetingLabel = QLabel()
self.meetingEdit = QDateTimeEdit(QDateTime.currentDateTime())
formatLabel = QLabel("Format string for the meeting date and time:")
formatComboBox = QComboBox()
formatComboBox.addItem('yyyy-MM-dd hh:mm:ss (zzz \'ms\')')
formatComboBox.addItem('hh:mm:ss MM/dd/yyyy')
formatComboBox.addItem('hh:mm:ss dd/MM/yyyy')
formatComboBox.addItem('hh:mm:ss')
formatComboBox.addItem('hh:mm ap')
formatComboBox.activated[str].connect(self.setFormatString)
self.setFormatString(formatComboBox.currentText())
editsLayout = QVBoxLayout()
editsLayout.addWidget(dateLabel)
editsLayout.addWidget(dateEdit)
editsLayout.addWidget(timeLabel)
editsLayout.addWidget(timeEdit)
editsLayout.addWidget(self.meetingLabel)
editsLayout.addWidget(self.meetingEdit)
editsLayout.addWidget(formatLabel)
editsLayout.addWidget(formatComboBox)
self.editsGroup.setLayout(editsLayout)
def setFormatString(self, formatString):
self.meetingEdit.setDisplayFormat(formatString)
if self.meetingEdit.displayedSections() & QDateTimeEdit.DateSections_Mask:
self.meetingEdit.setDateRange(QDate(2004, 11, 1), QDate(2005, 11, 30))
self.meetingLabel.setText("Meeting date (between %s and %s):" %
(self.meetingEdit.minimumDate().toString(Qt.ISODate),
self.meetingEdit.maximumDate().toString(Qt.ISODate)))
else:
self.meetingEdit.setTimeRange(QTime(0, 7, 20, 0), QTime(21, 0, 0, 0))
self.meetingLabel.setText("Meeting time (between %s and %s):" %
(self.meetingEdit.minimumTime().toString(Qt.ISODate),
self.meetingEdit.maximumTime().toString(Qt.ISODate)))
def createDoubleSpinBoxes(self):
self.doubleSpinBoxesGroup = QGroupBox("Double precision spinboxes")
precisionLabel = QLabel("Number of decimal places to show:")
precisionSpinBox = QSpinBox()
precisionSpinBox.setRange(0, 100)
precisionSpinBox.setValue(2)
doubleLabel = QLabel("Enter a value between %d and %d:" % (-20, 20))
self.doubleSpinBox = QDoubleSpinBox()
self.doubleSpinBox.setRange(-20.0, 20.0)
self.doubleSpinBox.setSingleStep(1.0)
self.doubleSpinBox.setValue(0.0)
scaleLabel = QLabel("Enter a scale factor between %d and %d:" % (0, 1000))
self.scaleSpinBox = QDoubleSpinBox()
self.scaleSpinBox.setRange(0.0, 1000.0)
self.scaleSpinBox.setSingleStep(10.0)
self.scaleSpinBox.setSuffix('%')
self.scaleSpinBox.setSpecialValueText("No scaling")
self.scaleSpinBox.setValue(100.0)
priceLabel = QLabel("Enter a price between %d and %d:" % (0, 1000))
self.priceSpinBox = QDoubleSpinBox()
self.priceSpinBox.setRange(0.0, 1000.0)
self.priceSpinBox.setSingleStep(1.0)
self.priceSpinBox.setPrefix('$')
self.priceSpinBox.setValue(99.99)
precisionSpinBox.valueChanged.connect(self.changePrecision)
spinBoxLayout = QVBoxLayout()
spinBoxLayout.addWidget(precisionLabel)
spinBoxLayout.addWidget(precisionSpinBox)
spinBoxLayout.addWidget(doubleLabel)
spinBoxLayout.addWidget(self.doubleSpinBox)
spinBoxLayout.addWidget(scaleLabel)
spinBoxLayout.addWidget(self.scaleSpinBox)
spinBoxLayout.addWidget(priceLabel)
spinBoxLayout.addWidget(self.priceSpinBox)
self.doubleSpinBoxesGroup.setLayout(spinBoxLayout)
def changePrecision(self, decimals):
self.doubleSpinBox.setDecimals(decimals)
self.scaleSpinBox.setDecimals(decimals)
self.priceSpinBox.setDecimals(decimals)
if __name__ == '__main__':
import sys
app = QApplication(sys.argv)
window = Window()
window.show()
sys.exit(app.exec_())
|
package io.scalajs.npm.winston
package transports
import scala.scalajs.js
import scala.scalajs.js.annotation.JSBracketAccess
/**
* Winston Transports collection
* @author <EMAIL>
*/
@js.native
trait Transports extends js.Object {
///////////////////////////////////////////////////////////////////////////////////////////
// Properties
///////////////////////////////////////////////////////////////////////////////////////////
def console: Transport = js.native
def file: Transport = js.native
///////////////////////////////////////////////////////////////////////////////////////////
// Methods
///////////////////////////////////////////////////////////////////////////////////////////
@JSBracketAccess
def apply(index: Int): Transport = js.native
}
|
#!/bin/bash
#set -e
Uri=$1
HANAUSR=$2
HANAPWD=$3
HANASID=$4
HANANUMBER=$5
HANAVERS=$6
OS=$7
vmSize=$8
echo $1 >> /tmp/parameter.txt
echo $2 >> /tmp/parameter.txt
echo $3 >> /tmp/parameter.txt
echo $4 >> /tmp/parameter.txt
echo $5 >> /tmp/parameter.txt
echo $6 >> /tmp/parameter.txt
echo $7 >> /tmp/parameter.txt
if [ "$7" == "RHEL" ]; then
echo "Start REHL prerequisite" >> /tmp/parameter.txt
yum -y groupinstall base
yum -y install gtk2 libicu xulrunner sudo tcsh libssh2 expect cairo graphviz iptraf-ng
yum -y install compat-sap-c++-6
sudo mkdir -p /hana/{data,log,shared,backup}
sudo mkdir /usr/sap
sudo mkdir -p /hana/data/{sapbitslocal,sapbits}
yum -y install tuned-profiles-sap
systemctl start tuned
systemctl enable tuned
tuned-adm profile sap-netweaver
setenforce 0
#sed -i 's/\(SELINUX=enforcing\|SELINUX=permissive\)/SELINUX=disabled/g' \ > /etc/selinux/config
echo "start SELINUX" >> /tmp/parameter.txt
sed -i -e "s/\(SELINUX=enforcing\|SELINUX=permissive\)/SELINUX=disabled/g" /etc/selinux/config
echo "end SELINUX" >> /tmp/parameter.txt
echo "kernel.numa_balancing = 0" > /etc/sysctl.d/sap_hana.conf
ln -s /usr/lib64/libssl.so.1.0.1e /usr/lib64/libssl.so.1.0.1
ln -s /usr/lib64/libcrypto.so.0.9.8e /usr/lib64/libcrypto.so.0.9.8
ln -s /usr/lib64/libcrypto.so.1.0.1e /usr/lib64/libcrypto.so.1.0.1
echo always > /sys/kernel/mm/transparent_hugepage/enabled
echo never > /sys/kernel/mm/transparent_hugepage/enabled
echo "start Grub" >> /tmp/parameter.txt
sedcmd="s/rootdelay=300/rootdelay=300 transparent_hugepage=never intel_idle.max_cstate=1 processor.max_cstate=1/g"
sudo sed -i -e "$sedcmd" /etc/default/grub
echo "start Grub2" >> /tmp/parameter.txt
sudo grub2-mkconfig -o /boot/grub2/grub.cfg
echo "End Grub" >> /tmp/parameter.txt
echo "@sapsys soft nproc unlimited" >> /etc/security/limits.d/99-sapsys.conf
systemctl disable abrtd
systemctl disable abrt-ccpp
systemctl stop abrtd
systemctl stop abrt-ccpp
systemctl stop kdump.service
systemctl disable kdump.service
systemctl stop firewalld
systemctl disable firewalld
sudo mkdir -p /sources
yum -y install cifs-utils
# Install Unrar
echo "start RAR" >> /tmp/parameter.txt
wget http://www.rarlab.com/rar/unrar-5.0-RHEL5x64.tar.gz
tar -zxvf unrar-5.0-RHEL5x64.tar.gz
cp unrar /usr/bin/
chmod 755 /usr/bin/unrar
echo "End RAR" >> /tmp/parameter.txt
echo "End REHL prerequisite" >> /tmp/parameter.txt
else
#install hana prereqs
sudo zypper install -y glibc-2.22-51.6
sudo zypper install -y systemd-228-142.1
sudo zypper install -y unrar
sudo zypper install -y sapconf
sudo zypper install -y saptune
sudo mkdir /etc/systemd/login.conf.d
sudo mkdir -p /hana/{data,log,shared,backup}
sudo mkdir /usr/sap
sudo mkdir -p /hana/data/{sapbitslocal,sapbits}
# Install .NET Core and AzCopy
sudo zypper install -y libunwind
sudo zypper install -y libicu
curl -sSL -o dotnet.tar.gz https://go.microsoft.com/fwlink/?linkid=848824
sudo mkdir -p /opt/dotnet && sudo tar zxf dotnet.tar.gz -C /opt/dotnet
sudo ln -s /opt/dotnet/dotnet /usr/bin
wget -O azcopy.tar.gz https://aka.ms/downloadazcopyprlinux
tar -xf azcopy.tar.gz
sudo ./install.sh
sudo zypper se -t pattern
sudo zypper --non-interactive in -t pattern sap-hana
fi
# step2
echo $Uri >> /tmp/url.txt
cp -f /etc/waagent.conf /etc/waagent.conf.orig
sedcmd="s/ResourceDisk.EnableSwap=n/ResourceDisk.EnableSwap=y/g"
sedcmd2="s/ResourceDisk.SwapSizeMB=0/ResourceDisk.SwapSizeMB=163840/g"
cat /etc/waagent.conf | sed $sedcmd | sed $sedcmd2 > /etc/waagent.conf.new
cp -f /etc/waagent.conf.new /etc/waagent.conf
#sed -i -e "s/ResourceDisk.EnableSwap=n/ResourceDisk.EnableSwap=y/g" -e "s/ResourceDisk.SwapSizeMB=0/ResourceDisk.SwapSizeMB=163840/g" /etc/waagent.conf
number="5"
echo "logicalvols start" >> /tmp/parameter.txt
datavg1lun="$(lsscsi $number 0 0 0 | grep -o '.\{9\}$')"
pvcreate $datavg1lun
vgcreate datavg $datavg1lun
lvcreate -l 100%FREE -n datalv datavg
mkfs.xfs /dev/datavg/datalv
echo "logicalvols end" >> /tmp/parameter.txt
echo "mounthanashared start" >> /tmp/parameter.txt
mount -t xfs /dev/datavg/datalv /usr/sap
#mount -t cifs //saphanakit.file.core.windows.net/sapinstall/HANA1SP12/SAP_HANA_1.0_DSP_122.13 /hana/data/sapbitslocal/ -o vers=3.0,username=saphanakit,password=UVLxDAZmw937RVDNQBF+OetwlLYwitsbQPHH2tnEiTut/y+hRgx0YkBzUtEGI99mhDsT/KxgSxJ/h6HUu6JHoQ==,dir_mode=0777,file_mode=0777,sec=ntlmssp
mkdir -p /hana/data/sapbits
echo "mounthanashared end" >> /tmp/parameter.txt
echo "write to fstab start" >> /tmp/parameter.txt
echo "/dev/mapper/datavg-datalv /usr/sap xfs defaults 0 0" >> /etc/fstab
echo "//saphanakit.file.core.windows.net/sapinstall/HANA1SP12/SAP_HANA_1.0_DSP_122.13 /hana/data/sapbitslocal/ cifs vers=3.0,dir_mode=0777,file_mode=0777,username=saphanakit,password=UVLxDAZmw937RVDNQBF+OetwlLYwitsbQPHH2tnEiTut/y+hRgx0YkBzUtEGI99mhDsT/KxgSxJ/h6HUu6JHoQ==">> /etc/fstab
echo "write to fstab end" >> /tmp/parameter.txt
#if [ ! -d "/hana/data/sapbits" ]; then
# mkdir -p "/hana/data/sapbits"
#fi
#
#if [ "$6" == "2.0" ]; then
# cd /hana/data/sapbits
# echo "hana 2.0 download start" >> /tmp/parameter.txt
# /usr/bin/wget --quiet $Uri/SapBits/md5sums
# /usr/bin/wget --quiet $Uri/SapBits/51053381_part1.exe
# /usr/bin/wget --quiet $Uri/SapBits/51053381_part2.rar
# /usr/bin/wget --quiet $Uri/SapBits/51053381_part3.rar
# /usr/bin/wget --quiet $Uri/SapBits/51053381_part4.rar
# /usr/bin/wget --quiet "https://raw.githubusercontent.com/wkdang/SAPonAzure/master/hdbinst1.cfg"
# echo "hana 2.0 download end" >> /tmp/parameter.txt
#
# date >> /tmp/testdate
# cd /hana/data/sapbits
#
# echo "hana 2.0 unrar start" >> /tmp/parameter.txt
# cd /hana/data/sapbits
# unrar x 51053381_part1.exe
# echo "hana 2.0 unrar end" >> /tmp/parameter.txt
#
# echo "hana 2.0 prepare start" >> /tmp/parameter.txt
# cd /hana/data/sapbits
#
# cd /hana/data/sapbits
# myhost=`hostname`
# sedcmd="s/REPLACE-WITH-HOSTNAME/$myhost/g"
# sedcmd2="s/\/hana\/shared\/sapbits\/51052325/\/hana\/data\/sapbits\/51053381/g"
# sedcmd3="s/root_user=root/root_user=$HANAUSR/g"
# #sedcmd4="s/root_password=AweS0me@PW/root_password=$HANAPWD/g"
# sedcmd4="s/password=AweS0me@PW/password=$HANAPWD/g"
# sedcmd5="s/sid=H10/sid=$HANASID/g"
# sedcmd6="s/number=00/number=$HANANUMBER/g"
# #cat hdbinst1.cfg | sed $sedcmd | sed $sedcmd2 | sed $sedcmd3 | sed $sedcmd4 | sed $sedcmd5 | sed $sedcmd6 > hdbinst-local.cfg
# cp -f /hana/data/sapbits/hdbinst1.cfg /hana/data/sapbits/hdbinst-local.cfg
# sed -i -e $sedcmd -e $sedcmd2 -e $sedcmd3 -e $sedcmd4 -e $sedcmd5 -e $sedcmd6 /hana/data/sapbits/hdbinst-local.cfg
# echo "hana 2.0 prepare end" >> /tmp/parameter.txt
#
# echo "install hana 2.0 start" >> /tmp/parameter.txt
# cd /hana/data/sapbits/51053381/DATA_UNITS/HDB_LCM_LINUX_X86_64
# /hana/data/sapbits/51053381/DATA_UNITS/HDB_LCM_LINUX_X86_64/hdblcm -b --configfile /hana/data/sapbits/hdbinst-local.cfg
# echo "Log file written to '/var/tmp/hdb_H10_hdblcm_install_xxx/hdblcm.log' on host 'saphanaarm'." >> /tmp/parameter.txt
# echo "install hana 2.0 end" >> /tmp/parameter.txt
#
#else
# cd /hana/data/sapbits
#echo "hana 1.0 download start" >> /tmp/parameter.txt
#/usr/bin/wget --quiet $Uri/SapBits/md5sums
#/usr/bin/wget --quiet $Uri/SapBits/51052383_part1.exe
#/usr/bin/wget --quiet $Uri/SapBits/51052383_part2.rar
#/usr/bin/wget --quiet $Uri/SapBits/51052383_part3.rar
#/usr/bin/wget --quiet "https://raw.githubusercontent.com/wkdang/SAPonAzure/master/hdbinst.cfg"
#echo "hana 1.0 download end" >> /tmp/parameter.txt
#
#date >> /tmp/testdate
#cd /hana/data/sapbits
#
#echo "hana 1.0 unrar start" >> /tmp/parameter.txt
#cd /hana/data/sapbits
#unrar x 51052383_part1.exe
#echo "hana 1.0 unrar end" >> /tmp/parameter.txt
#
#echo "hana 1.0 prepare start" >> /tmp/parameter.txt
#cd /hana/data/sapbits
#
#cd /hana/data/sapbits
#myhost=`hostname`
#sedcmd="s/REPLACE-WITH-HOSTNAME/$myhost/g"
#sedcmd2="s/\/hana\/shared\/sapbits\/51052325/\/hana\/data\/sapbits\/51052383/g"
#sedcmd3="s/root_user=root/root_user=$HANAUSR/g"
#sedcmd4="s/password=AweS0me@PW/password=$HANAPWD/g"
#sedcmd5="s/sid=H10/sid=$HANASID/g"
#sedcmd6="s/number=00/number=$HANANUMBER/g"
##cat hdbinst.cfg | sed $sedcmd | sed $sedcmd2 | sed $sedcmd3 | sed $sedcmd4 | sed $sedcmd5 | sed $sedcmd6 > hdbinst-local.cfg
#cp -f /hana/data/sapbits/hdbinst.cfg /hana/data/sapbits/hdbinst-local.cfg
#sed -i -e $sedcmd -e $sedcmd2 -e $sedcmd3 -e $sedcmd4 -e $sedcmd5 -e $sedcmd6 /hana/data/sapbits/hdbinst-local.cfg
#echo "hana 1.0 prepare end" >> /tmp/parameter.txt
#
#echo "install hana 1.0 start" >> /tmp/parameter.txt
#cd /hana/data/sapbits/51052383/DATA_UNITS/HDB_LCM_LINUX_X86_64
#/hana/data/sapbits/51052383/DATA_UNITS/HDB_LCM_LINUX_X86_64/hdblcm -b --configfile /hana/data/sapbits/hdbinst-local.cfg
#echo "Log file written to '/var/tmp/hdb_H10_hdblcm_install_xxx/hdblcm.log' on host 'saphanaarm'." >> /tmp/parameter.txt
#echo "install hana 1.0 end" >> /tmp/parameter.txt
#
#
#fi
shutdown -r 1 |
#!/bin/sh
name=$1
while read line
do
echo -n "$line "
echo -n "$line" | wc -m
done < $name
|
def createArray(n):
res = []
for i in range(0, n):
res.append(i)
return res |
package org.zalando.intellij.swagger.validator.field;
import com.google.common.collect.ImmutableSet;
import com.intellij.codeInsight.intention.IntentionAction;
import com.intellij.psi.PsiElement;
import java.util.List;
import java.util.Set;
import org.zalando.intellij.swagger.completion.field.model.common.Field;
import org.zalando.intellij.swagger.traversal.YamlTraversal;
public class UnknownYamlKeyValidator extends UnknownKeyValidator {
private static final String VENDOR_EXTENSION_PREFIX = "x-";
private static final String MERGE_KEY = "<<";
private static final Set<String> IGNORED_KEY_PREFIXES = ImmutableSet.of(VENDOR_EXTENSION_PREFIX);
private static final Set<String> IGNORED_KEYS = ImmutableSet.of(MERGE_KEY);
private final YamlTraversal yamlTraversal;
public UnknownYamlKeyValidator(
final IntentionAction intentionAction, final YamlTraversal yamlTraversal) {
super(intentionAction);
this.yamlTraversal = yamlTraversal;
}
@Override
protected boolean isInvalid(final String key, final List<Field> availableKeys) {
return availableKeys.stream().noneMatch(field -> field.getName().equals(key));
}
@Override
protected boolean shouldIgnore(final String key, final PsiElement element) {
return IGNORED_KEY_PREFIXES.stream().anyMatch(key::startsWith)
|| IGNORED_KEYS.stream().anyMatch(key::equals)
|| yamlTraversal.isAnchorKey(element)
|| yamlTraversal.isChildOfAnchorKey(element);
}
}
|
<reponame>intelrug/nestjs-bunnycdn
import { DynamicModule, Global, Module, Provider, Type } from '@nestjs/common';
import { BunnyCDNOptions } from '@intelrug/bunnycdn';
import {
createBunnyCDNConnection,
getBunnyCDNConnectionToken,
getBunnyCDNOptionsToken,
} from './bunnycdn.utils';
import { BunnyCDNAsyncOptions, BunnyCDNOptionsFactory } from './bunnycdn.interfaces';
@Global()
@Module({})
export class BunnyCDNCoreModule {
static forRoot(options: BunnyCDNOptions, connection?: string): DynamicModule {
const bunnyCDNOptionsProvider: Provider = {
provide: getBunnyCDNOptionsToken(connection),
useValue: options,
};
const bunnyCDNConnectionProvider: Provider = {
provide: getBunnyCDNConnectionToken(connection),
useValue: createBunnyCDNConnection(options),
};
return {
module: BunnyCDNCoreModule,
providers: [bunnyCDNOptionsProvider, bunnyCDNConnectionProvider],
exports: [bunnyCDNOptionsProvider, bunnyCDNConnectionProvider],
};
}
public static forRootAsync(options: BunnyCDNAsyncOptions, connection?: string): DynamicModule {
const bunnyCDNConnectionProvider: Provider = {
provide: getBunnyCDNConnectionToken(connection),
useFactory(options: BunnyCDNOptions) {
return createBunnyCDNConnection(options);
},
inject: [getBunnyCDNOptionsToken(connection)],
};
return {
module: BunnyCDNCoreModule,
imports: options.imports,
providers: [...this.createAsyncProviders(options, connection), bunnyCDNConnectionProvider],
exports: [bunnyCDNConnectionProvider],
};
}
public static createAsyncProviders(options: BunnyCDNAsyncOptions, connection?: string): Provider[] {
if (!(options.useExisting || options.useFactory || options.useClass)) {
throw new Error('Invalid configuration. Must provide useFactory, useClass or useExisting');
}
if (options.useExisting || options.useFactory) {
return [this.createAsyncOptionsProvider(options, connection)];
}
const useClass = options.useClass as Type<BunnyCDNOptionsFactory>;
return [this.createAsyncOptionsProvider(options, connection), { provide: useClass, useClass: useClass }];
}
public static createAsyncOptionsProvider(options: BunnyCDNAsyncOptions, connection?: string): Provider {
if (!(options.useExisting || options.useFactory || options.useClass)) {
throw new Error('Invalid configuration. Must provide useFactory, useClass or useExisting');
}
if (options.useFactory) {
return {
provide: getBunnyCDNOptionsToken(connection),
useFactory: options.useFactory,
inject: options.inject || [],
};
}
const inject = [(options.useClass || options.useExisting) as Type<BunnyCDNOptionsFactory>];
return {
provide: getBunnyCDNOptionsToken(connection),
async useFactory(optionsFactory: BunnyCDNOptionsFactory): Promise<BunnyCDNOptions> {
return optionsFactory.createBunnyCDNModuleOptions();
},
inject,
};
}
}
|
import http from 'http';
import cors from 'cors'
import express, {Request, Response, Router} from 'express';
import {Config} from "./config";
import {Routes} from "../../infraestructure/handler/router/init";
import {Signature} from "./signature";
function index(req: Request, res: Response) {
res.json({
message: 'welcome api mail!'
})
}
function main() {
const app = express()
const server = http.createServer(app)
const config = Config.getConfiguration()
const signatures = new Signature().getCertificates()
const router = Router()
app.use(express.json())
app.use(cors({origin: ['*']}))
new Routes(router, config, signatures)
app.use(router)
app.get('/', index)
server.listen(config.port, () => {
console.log(`server online in: localhost:${config.port}`)
})
}
main()
|
pip3 list --outdated --format=freeze | grep -v '^\-e' | cut -d = -f 1 | xargs -n1 pip3 install -U
pip3 freeze > requirements.txt
find requirements.txt -type f -exec sed -i "" "s/==/>=/g" {} \;
# Do a package upgrade
pip install -r requirements.txt --upgrade
|
package cn.alumik.parsetree.parser;
import cn.alumik.parsetree.symbol.AbstractSymbol;
import cn.alumik.parsetree.symbol.AbstractTerminalSymbol;
public class Item {
private int mDot = 0;
private final Production mProduction;
private final AbstractTerminalSymbol mLookAhead;
public Item(Production production, AbstractTerminalSymbol lookAhead) {
mProduction = production;
mLookAhead = lookAhead;
}
public AbstractSymbol getNextSymbol() {
return mProduction.to().get(mDot);
}
public int getDot() {
return mDot;
}
public Production getProduction() {
return mProduction;
}
public AbstractTerminalSymbol getLookAhead() {
return mLookAhead;
}
public boolean isNotEnded() {
return mDot < mProduction.to().size() && !mProduction.to().get(0).getName().equals(AbstractTerminalSymbol.NULL);
}
public Item getNextItem() {
final Item item = new Item(mProduction, mLookAhead);
item.mDot = mDot + 1;
return item;
}
@Override
public boolean equals(Object obj) {
if (obj instanceof Item) {
final Item item = (Item) obj;
return item.mDot == mDot && item.mProduction.equals(mProduction) && item.mLookAhead.equals(mLookAhead);
}
return false;
}
@Override
public int hashCode() {
return mProduction.hashCode() ^ mDot;
}
@Override
public String toString() {
final StringBuilder stringBuilder = new StringBuilder();
stringBuilder.append(mProduction.from());
stringBuilder.append(" ->");
for (int i = 0; i < mDot; ++i) {
stringBuilder.append(" ");
stringBuilder.append(mProduction.to().get(i));
}
stringBuilder.append(" ·");
for (int i = mDot; i < mProduction.to().size(); i++) {
stringBuilder.append(" ");
stringBuilder.append(mProduction.to().get(i));
}
stringBuilder.append(", ");
stringBuilder.append(mLookAhead);
return stringBuilder.toString();
}
}
|
<filename>src/icons/svg/order.js
import React from 'react';
export default class Warning extends React.Component {
render(){
const { width, height, color } = this.props;
return (
<svg width={width} height={height} viewBox="0 0 140 140" version="1.1" >
<g id="Page-1" stroke="none" strokeWidth="1" fill="none" fillRule="evenodd">
<g id="Desktop-HD" transform="translate(-221.000000, -1105.000000)" fill={color} fillRule="nonzero">
<g id="icon_Order" transform="translate(230.000000, 1105.000000)">
<path d="M71.4411765,36.0294118 C69.5882353,34.1764706 66.7058824,34.1764706 64.8529412,36.0294118 L41.5882353,59.0882353 L33.3529412,50.8529412 C31.5,49 28.6176471,49 26.7647059,50.8529412 C24.9117647,52.7058824 24.9117647,55.5882353 26.7647059,57.4411765 L38.2941176,68.9705882 C40.1470588,70.8235294 43.0294118,70.8235294 44.8823529,68.9705882 L71.2352941,42.6176471 C73.0882353,40.7647059 73.0882353,37.6764706 71.4411765,36.0294118 Z" id="Shape"></path>
<path d="M105,11.5294118 C107.676471,11.5294118 109.941176,13.7941176 109.941176,16.4705882 L109.941176,123.529412 C109.941176,126.205882 107.676471,128.470588 105,128.470588 L16.4705882,128.470588 C13.7941176,128.470588 11.5294118,126.205882 11.5294118,123.529412 L11.5294118,16.4705882 C11.5294118,13.7941176 13.7941176,11.5294118 16.4705882,11.5294118 L105,11.5294118 Z M105,0 L16.4705882,0 C7.41176471,0 0,7.41176471 0,16.4705882 L0,123.529412 C0,132.588235 7.41176471,140 16.4705882,140 L105,140 C114.058824,140 121.470588,132.588235 121.470588,123.529412 L121.470588,16.4705882 C121.470588,7.41176471 114.058824,0 105,0 Z" id="Shape"></path>
</g>
</g>
</g>
</svg>
)
}
} |
#!/usr/bin/env bash
# vim:ts=4:sts=4:sw=4:et
#
# Author: Hari Sekhon
# Date: 2019-10-01 17:18:03 +0100 (Tue, 01 Oct 2019)
#
# https://github.com/harisekhon/bash-tools
#
# License: see accompanying Hari Sekhon LICENSE file
#
# If you're using my code you're welcome to connect with me on LinkedIn and optionally send me feedback to help improve or steer this or other code I publish
#
# http://www.linkedin.com/in/harisekhon
#
set -euo pipefail
[ -n "${DEBUG:-}" ] && set -x
srcdir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
# shellcheck source=lib/utils.sh
. "$srcdir/lib/utils.sh"
filelist="$(find "${1:-.}" -type f -name '*.json' | sort)"
if [ -z "$filelist" ]; then
return 0 &>/dev/null ||
exit 0
fi
section "JSON Syntax Checks"
start_time="$(start_timer)"
if [ -n "${NOSYNTAXCHECK:-}" ]; then
echo "\$NOSYNTAXCHECK environment variable set, skipping JSON syntax checks"
echo
elif [ -n "${QUICK:-}" ]; then
echo "\$QUICK environment variable set, skipping JSON syntax checks"
echo
else
if ! command -v jsonlint &>/dev/null; then
echo "jsonlint not found in \$PATH, not running JSON syntax checks"
return 0 &>/dev/null || exit 0
fi
type -P jsonlint
printf "version: "
jsonlint --version || :
echo
max_len=0
for x in $filelist; do
if [ ${#x} -gt $max_len ]; then
max_len=${#x}
fi
done
# to account for the semi colon
((max_len + 1))
for x in $filelist; do
isExcluded "$x" && continue
[[ "$x" =~ multirecord ]] && continue
printf "%-${max_len}s " "$x:"
set +eo pipefail
output="$(jsonlint "$x")"
# shellcheck disable=SC2181
if [ $? -eq 0 ]; then
echo "OK"
else
echo "FAILED"
if [ -z "${QUIET:-}" ]; then
echo
echo "$output"
echo
fi
if [ -z "${NOEXIT:-}" ]; then
return 1 &>/dev/null || exit 1
fi
fi
set -eo pipefail
done
time_taken "$start_time"
section2 "All JSON files passed syntax check"
fi
echo
|
package com.desafio.surittec.config;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.security.authentication.AuthenticationManager;
import org.springframework.security.oauth2.config.annotation.configurers.ClientDetailsServiceConfigurer;
import org.springframework.security.oauth2.config.annotation.web.configuration.AuthorizationServerConfigurerAdapter;
import org.springframework.security.oauth2.config.annotation.web.configuration.EnableAuthorizationServer;
import org.springframework.security.oauth2.config.annotation.web.configurers.AuthorizationServerEndpointsConfigurer;
import org.springframework.security.oauth2.provider.token.TokenStore;
import org.springframework.security.oauth2.provider.token.store.JwtAccessTokenConverter;
import org.springframework.security.oauth2.provider.token.store.JwtTokenStore;
@Configuration
@EnableAuthorizationServer
public class AuthorizationServerConfig extends AuthorizationServerConfigurerAdapter {
@Autowired
private AuthenticationManager authenticationManager;
@Override
public void configure(ClientDetailsServiceConfigurer clients) throws Exception {
clients.inMemory()
.withClient("angular")
.secret("{noop}angular")
.scopes("read", "write")
.authorizedGrantTypes("password", "refresh_token")
.accessTokenValiditySeconds(1800)
.refreshTokenValiditySeconds(3600 * 24);
}
@Override
public void configure(AuthorizationServerEndpointsConfigurer endpoints) throws Exception {
endpoints
.tokenStore(tokenStore())
.accessTokenConverter(accessTokenConverter())
.reuseRefreshTokens(false)
.authenticationManager(authenticationManager);
}
@Bean
public JwtAccessTokenConverter accessTokenConverter() {
JwtAccessTokenConverter accessTokenConverter = new JwtAccessTokenConverter();
accessTokenConverter.setSigningKey("suritec");
return accessTokenConverter;
}
@Bean
public TokenStore tokenStore() {
return new JwtTokenStore(accessTokenConverter());
}
}
|
cat /etc/issue
|
import random
import string
def main(size):
chars = string.ascii_letters + string.digits + '!@#$ç%&*-+'
rand = random.SystemRandom()
print(''.join(rand.choice(chars) for i in range(size)))
if __name__ == '__main__':
i = 0
size = input('White size for password: ')
main(int(size))
while i != 1:
response = input('Want another password? (Y/N) ')
if response.upper() == 'Y':
main(int(size))
else:
i = 1
|
package mybatis.test;
import mybatis.bean.RewardOrder;
import org.apache.ibatis.io.Resources;
import org.apache.ibatis.session.SqlSession;
import org.apache.ibatis.session.SqlSessionFactory;
import org.apache.ibatis.session.SqlSessionFactoryBuilder;
import org.junit.Before;
import org.junit.Test;
import java.io.IOException;
import java.io.InputStream;
import java.util.List;
/**
* @Author: wangcf
* @Date: 2019/4/8 22:36
*/
public class MyBatisTest {
private SqlSessionFactory sqlSessionFactory = null;
@Before
public void init() throws IOException {
//1.创建sqlSessionFactoryBuilder
SqlSessionFactoryBuilder sqlSessionFactoryBuilder = new SqlSessionFactoryBuilder();
//2.加载mybatis 的 SqlMapConfig.xml 配置文件
InputStream inputStream = Resources.getResourceAsStream("mybatis/SqlMapConfig.xml");
//3.创建SqlSessionFactory对象
sqlSessionFactory = sqlSessionFactoryBuilder.build(inputStream);
}
@Test
public void testQuerRewardOrderList() {
//4.创建SqlSession对象
SqlSession sqlSession = sqlSessionFactory.openSession();
//5.执行SQL
List<Object> rewardOrderList = sqlSession.selectList("queryRewardOrderList");
for (Object rewardOrder : rewardOrderList) {
System.out.println(rewardOrder);
}
sqlSession.close();
}
@Test
public void testQueryRewardOrderById() {
SqlSession sqlSession = sqlSessionFactory.openSession();
int rewardId = 1;
Object object = sqlSession.selectOne("queryRewardOrderById", rewardId);
System.out.println(object.toString());
sqlSession.close();
}
@Test
public void testSaveRewardOrder() {
SqlSession sqlSession = sqlSessionFactory.openSession();
RewardOrder rewardOrder = new RewardOrder();
//rewardOrder.setRewardId("XXX123");
rewardOrder.setRewardName("打饭");
rewardOrder.setRewardContent("快去给我打饭吧,饿了!!!");
sqlSession.insert("saveRewardOrder", rewardOrder);
sqlSession.commit();
sqlSession.close();
}
@Test
public void testUpdateRewardOrder() {
SqlSession sqlSession = sqlSessionFactory.openSession();
RewardOrder rewardOrder = new RewardOrder();
rewardOrder.setRewardId("0");
rewardOrder.setRewardName("修改好了!!!");
sqlSession.update("updateRewardOrderById", rewardOrder);
sqlSession.commit();
sqlSession.close();
}
@Test
public void testDeleteRewardOrderById() {
SqlSession sqlSession = sqlSessionFactory.openSession();
String rewardId = "0";
sqlSession.delete("deleteRewardOrderById", rewardId);
sqlSession.commit();
sqlSession.close();
}
}
|
package fr.syncrase.ecosyst.aop.crawlers.service.aujardin;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;
import fr.syncrase.ecosyst.domain.Plante;
public class AuJardinCrawler {
List<Plante> plantesCrawlees;
public AuJardinCrawler() {
plantesCrawlees = new ArrayList<>();
try {
Elements sectionsPrincipales = getAllMainSections();
for (Element section : sectionsPrincipales) {
Elements listePlantes = getAllPlants(section);
for (Element lienPlante : listePlantes) {
Plante plante = extractPlante(lienPlante);
plantesCrawlees.add(plante);
}
break;// TODO supprimer après que l'extraction d'une plante soit terminée
}
} catch (IOException e) {
System.out.println("Erreur");
}
}
private Elements getAllPlants(Element section) throws IOException {
Document page;
page = Jsoup.connect(getValidUrl(section.attr("href"))).get();
Elements listePlantes = page.select("ul.rubrique li a[href]");
return listePlantes;
}
private Elements getAllMainSections() throws IOException {
Document page = Jsoup.connect("https://www.aujardin.info/plantes/").get();
Elements sectionsPrincipales = page.select("div.items a[href]");
return sectionsPrincipales;
}
private Plante extractPlante(Element lienPlante) throws IOException {
// FichePlante fp = new FichePlante(Jsoup.connect(getValidUrl(lienPlante.attr("href"))).get());
// Document page = ;
// return fp.getPlante();
return null;
}
private static String getValidUrl(String scrappedUrl) {
return "https://www" + scrappedUrl.split("www")[1];
}
public List<Plante> getPlantesCrawlees() {
return plantesCrawlees;
}
}
|
package com.telenav.osv.manager.network;
import android.content.Context;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.Process;
import com.android.volley.ExecutorDelivery;
import com.android.volley.Network;
import com.android.volley.Request;
import com.android.volley.RequestQueue;
import com.android.volley.toolbox.BasicNetwork;
import com.android.volley.toolbox.DiskBasedCache;
import com.android.volley.toolbox.HttpStack;
import com.android.volley.toolbox.HurlStack;
import com.telenav.osv.application.ApplicationPreferences;
import com.telenav.osv.application.KVApplication;
import com.telenav.osv.application.PreferenceTypes;
import com.telenav.osv.common.Injection;
import com.telenav.osv.network.endpoint.FactoryServerEndpointUrl;
import com.telenav.osv.utils.BackgroundThreadPool;
import java.io.File;
/**
* abstract networking class
* Created by Kalman on 02/05/2017.
*/
public abstract class NetworkManager {
static final int UPLOAD_REQUEST_TIMEOUT = 30000;
private static final String TAG = "NetworkManager";
/**
* context used for operations, should use application context
*/
final Context mContext;
final ApplicationPreferences appPrefs;
protected FactoryServerEndpointUrl factoryServerEndpointUrl;
/**
* request queue for operations
* adding a request here will be automatically run in the next available time
*/
RequestQueue mQueue;
String mAccessToken;
private HandlerThread mQueueThread;
private Handler backgroundHandler;
NetworkManager(Context context) {
this.mContext = context;
mQueueThread = new HandlerThread("QueueThread", Process.THREAD_PRIORITY_BACKGROUND);
mQueueThread.start();
appPrefs = ((KVApplication) mContext.getApplicationContext()).getAppPrefs();
//ToDo: remove the injection from inside the constructor to a parameter
factoryServerEndpointUrl = Injection.provideNetworkFactoryUrl(appPrefs);
mQueue = newRequestQueue(mContext, 4);
}
/**
* Creates a default instance of the worker pool and calls {@link RequestQueue#start()} on it.
* @param context A {@link Context} to use for creating the cache dir.
* @return A started {@link RequestQueue} instance.
*/
RequestQueue newRequestQueue(Context context, int nrOfThreads) {
File cacheDir = new File(context.getCacheDir(), "volley");
HttpStack stack = new HurlStack();
Network network = new BasicNetwork(stack);
if (mQueueThread == null) {
mQueueThread = new HandlerThread("QueueThread", Process.THREAD_PRIORITY_BACKGROUND);
}
backgroundHandler = new Handler(mQueueThread.getLooper());
RequestQueue queue = new RequestQueue(new DiskBasedCache(cacheDir), network, nrOfThreads, new ExecutorDelivery(backgroundHandler));
queue.start();
return queue;
}
void runInBackground(Runnable runnable) {
BackgroundThreadPool.post(runnable);
}
String getAccessToken() {
if (mAccessToken == null) {
mAccessToken = appPrefs.getStringPreference(PreferenceTypes.K_ACCESS_TOKEN);
}
return mAccessToken;
}
void destroy() {
mQueue.cancelAll(new RequestQueue.RequestFilter() {
@Override
public boolean apply(Request<?> request) {
return true;
}
});
backgroundHandler.postDelayed(new Runnable() {
@Override
public void run() {
try {
HandlerThread thread = mQueueThread;
mQueueThread = null;
thread.quit();
} catch (Exception ignored) {
}
}
}, 300);
}
}
|
<reponame>vanhullc/onAir
import { ActionReducerMap } from '@ngrx/store';
import { UserState, userInitialState } from './user/user.model';
import { UsersState, usersInitialState } from './users/users.model';
import { RadioState, radioInitialState } from './radio/radio.model';
import { RadiosState, radiosInitialState } from './radios/radios.model';
import { userReducer } from './user/user.reducer';
import { usersReducer } from './users/users.reducer';
import { radioReducer } from './radio/radio.reducer';
import { radiosReducer } from './radios/radios.reducer';
export interface State {
user: UserState,
users: UsersState,
radio: RadioState,
radios: RadiosState
}
export const reducers: ActionReducerMap<State> = {
user: userReducer,
users: usersReducer,
radio: radioReducer,
radios: radiosReducer
}
export const reducerInitialState: State = {
user: userInitialState,
users: usersInitialState,
radio: radioInitialState,
radios: radiosInitialState
} |
package com.my.blog.website.service.impl;
import com.github.pagehelper.PageHelper;
import com.my.blog.website.dao.MetaVoMapper;
import com.my.blog.website.modal.Vo.CategoryVo;
import com.my.blog.website.service.CategoryService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import java.util.List;
@Service
public class CategoryServiceImp implements CategoryService {
@Autowired
private MetaVoMapper metaVoMapper;
//首页 查询文章目录
@Transactional
@Override
public List<CategoryVo> showCategory(String desc, int currentPage) {
if(desc.equals("0")){
//查询全部
PageHelper.startPage(currentPage,9);
List<CategoryVo> categoryVos = metaVoMapper.selectAll();
return categoryVos;
}else{
//TODO 根据目录id查询相应目录
return null;
}
}
}
|
<filename>src/example-components/MarketingPricingTables/MarketingPricingTables5/index.js
import React from 'react';
import { FontAwesomeIcon } from '@fortawesome/react-fontawesome';
import { Grid, Container, Card, Button } from '@material-ui/core';
export default function LivePreviewExample() {
return (
<>
<div className="py-4">
<Container>
<div className="pb-5 text-center">
<h1 className="display-4 text-black mb-2 font-weight-bold">
Plans & pricing
</h1>
<p className="font-size-lg text-black-50">
View any of the 5+ live previews we've set up to learn why
this dashboard template is the last one you'll ever need!
</p>
</div>
<Grid container spacing={6}>
<Grid item lg={4}>
<Card className="shadow-xxl mb-5 mb-lg-0">
<div className="card-header text-center d-block py-4 bg-secondary">
<div className="my-4 bg-first text-white d-inline-block shadow-xxl text-uppercase font-weight-bold d-40 w-auto px-4 font-size-xs rounded-pill">
Developer
</div>
<div className="font-weight-bold line-height-1 text-second text-uppercase display-2">
<small>$</small>99
</div>
<div className="font-size-md text-black-50">
monthly fee, for a single user
</div>
<div className="mt-4 pb-4">
<Button
className="rounded-sm font-weight-bold px-4 btn-outline-second"
variant="text">
Purchase now
</Button>
</div>
</div>
<div className="divider" />
<div className="px-5 py-4">
<div className="d-flex align-items-center py-2">
<div className="d-30 rounded-circle btn-icon bg-neutral-success text-success mr-3">
<FontAwesomeIcon
icon={['fas', 'check']}
className="font-size-xs"
/>
</div>
<div className="text-second opacity-7">Unlimited Tasks</div>
</div>
<div className="divider opacity-8 my-1 mx-2" />
<div className="d-flex align-items-center py-2">
<div className="d-30 rounded-circle btn-icon bg-neutral-success text-success mr-3">
<FontAwesomeIcon
icon={['fas', 'check']}
className="font-size-xs"
/>
</div>
<div className="text-second opacity-7">Unlimited Teams</div>
</div>
<div className="divider opacity-8 my-1 mx-2" />
<div className="d-flex align-items-center py-2">
<div className="d-30 rounded-circle btn-icon bg-neutral-success text-success mr-3">
<FontAwesomeIcon
icon={['fas', 'check']}
className="font-size-xs"
/>
</div>
<div className="text-second opacity-7">
All Integrations
</div>
</div>
<div className="divider opacity-8 my-1 mx-2" />
<div className="d-flex align-items-center py-2">
<div className="d-30 rounded-circle btn-icon bg-neutral-danger text-danger mr-3">
<FontAwesomeIcon
icon={['fas', 'times']}
className="font-size-xs"
/>
</div>
<div className="text-danger">Premium support</div>
</div>
</div>
</Card>
</Grid>
<Grid item lg={4}>
<Card className="shadow-xxl mb-5 mb-lg-0">
<div className="card-header text-center d-block py-4 bg-secondary">
<div className="my-4 bg-success text-white d-inline-block shadow-xxl text-uppercase font-weight-bold d-40 w-auto px-4 font-size-xs rounded-pill">
Designer
</div>
<div className="font-weight-bold line-height-1 text-second text-uppercase display-2">
<small>$</small>199
</div>
<div className="font-size-md text-black-50">
monthly fee, for a single user
</div>
<div className="mt-4 pb-4">
<Button className="rounded-sm font-weight-bold py-3 px-5 btn-second">
Purchase now
</Button>
</div>
</div>
<div className="divider" />
<div className="px-5 py-4">
<div className="d-flex align-items-center py-2">
<div className="d-30 rounded-circle btn-icon bg-neutral-success text-success mr-3">
<FontAwesomeIcon
icon={['fas', 'check']}
className="font-size-xs"
/>
</div>
<div className="text-second opacity-7">Unlimited Tasks</div>
</div>
<div className="divider opacity-8 my-1 mx-2" />
<div className="d-flex align-items-center py-2">
<div className="d-30 rounded-circle btn-icon bg-neutral-success text-success mr-3">
<FontAwesomeIcon
icon={['fas', 'check']}
className="font-size-xs"
/>
</div>
<div className="text-second opacity-7">Unlimited Teams</div>
</div>
<div className="divider opacity-8 my-1 mx-2" />
<div className="d-flex align-items-center py-2">
<div className="d-30 rounded-circle btn-icon bg-neutral-success text-success mr-3">
<FontAwesomeIcon
icon={['fas', 'check']}
className="font-size-xs"
/>
</div>
<div className="text-second opacity-7">
All Integrations
</div>
</div>
<div className="divider opacity-8 my-1 mx-2" />
<div className="d-flex align-items-center py-2">
<div className="d-30 rounded-circle btn-icon bg-neutral-danger text-danger mr-3">
<FontAwesomeIcon
icon={['fas', 'times']}
className="font-size-xs"
/>
</div>
<div className="text-danger">Premium support</div>
</div>
</div>
</Card>
</Grid>
<Grid item lg={4}>
<Card className="shadow-xxl mb-5 mb-lg-0">
<div className="card-header text-center d-block py-4 bg-secondary">
<div className="my-4 bg-danger text-white d-inline-block shadow-xxl text-uppercase font-weight-bold d-40 w-auto px-4 font-size-xs rounded-pill">
Enterprise
</div>
<div className="font-weight-bold line-height-1 text-second text-uppercase display-2">
<small>$</small>599
</div>
<div className="font-size-md text-black-50">
monthly fee, for a single user
</div>
<div className="mt-4 pb-4">
<Button
className="rounded-sm font-weight-bold px-4 btn-outline-second"
variant="text">
Purchase now
</Button>
</div>
</div>
<div className="divider" />
<div className="px-5 py-4">
<div className="d-flex align-items-center py-2">
<div className="d-30 rounded-circle btn-icon bg-neutral-success text-success mr-3">
<FontAwesomeIcon
icon={['fas', 'check']}
className="font-size-xs"
/>
</div>
<div className="text-second opacity-7">Unlimited Tasks</div>
</div>
<div className="divider opacity-8 my-1 mx-2" />
<div className="d-flex align-items-center py-2">
<div className="d-30 rounded-circle btn-icon bg-neutral-success text-success mr-3">
<FontAwesomeIcon
icon={['fas', 'check']}
className="font-size-xs"
/>
</div>
<div className="text-second opacity-7">Unlimited Teams</div>
</div>
<div className="divider opacity-8 my-1 mx-2" />
<div className="d-flex align-items-center py-2">
<div className="d-30 rounded-circle btn-icon bg-neutral-success text-success mr-3">
<FontAwesomeIcon
icon={['fas', 'check']}
className="font-size-xs"
/>
</div>
<div className="text-second opacity-7">
All Integrations
</div>
</div>
<div className="divider opacity-8 my-1 mx-2" />
<div className="d-flex align-items-center py-2">
<div className="d-30 rounded-circle btn-icon bg-neutral-success text-success mr-3">
<FontAwesomeIcon
icon={['fas', 'check']}
className="font-size-xs"
/>
</div>
<div className="text-second opacity-7">Premium support</div>
</div>
</div>
</Card>
</Grid>
</Grid>
</Container>
</div>
</>
);
}
|
<reponame>marionFlx/community
module.exports = {
chainWebpack: config => {
// fork-ts-checker is sadly ignoring the Vue shim
// and throws incorrect errors
// we disable it as it is just a nice to have to speed up the build
config.plugins.delete('fork-ts-checker');
config.module
.rule('ts')
.use('ts-loader')
.tap(options => {
return { ...options, transpileOnly: false };
});
}
};
|
#! /usr/bin/env python3
# -*-coding:UTF-8 -*-
# @Time : 2019/01/04 16:19:54
# @Author : che
# @Email : <EMAIL>
import argparse
parser = argparse.ArgumentParser(description='Search some files')
parser.add_argument(dest='filenames', metavar='filename', nargs='*')
parser.add_argument('-p', '--pat', metavar='pattern', required=True,
dest='patterns', action='append', help='text pattern to \
search for')
parser.add_argument('-v', dest='verbose', action='store_true', help='verbose \
mode')
parser.add_argument('-o', dest='outfile', action='store', help='output file')
parser.add_argument('--speed', dest='speed', action='store', choices={'slow', \
'fast'}, default='slow', help='search speed')
args = parser.parse_args()
print(args.filenames)
print(args.patterns)
print(args.verbose)
print(args.outfile)
print(args.speed)
|
<gh_stars>0
package proptics.internal
/** @tparam S the source of a [[proptics.Prism_]]
* @tparam T the modified source of a [[proptics.Prism_]]
* @tparam A the focus of a [[proptics.Prism_]]
*/
private[proptics] trait PrismFunctions[S, T, A] {
def viewOrModify(s: S): Either[T, A]
}
|
"""Tests for predictions
To run tests:
pytest .
These tests depend on a postgres db. Either you can specify the URL of an
empty db with TEST_DATABASE_URL, or you can let this script delete (if exists)
and create a db called postgres:///predictionstest.
"""
import os
import pytest
import datetime
import subprocess
HOUR = datetime.timedelta(seconds=3600)
assert 'DATABASE_URL' not in os.environ
if 'TEST_DATABASE_URL' in os.environ:
os.environ['DATABASE_URL'] = os.environ['TEST_DATABASE_URL']
else:
# No db supplied, make our own. Drop and recreate.
_localdb = 'predictionstest'
command = ['dropdb', _localdb]
pipes = subprocess.Popen(command, stderr=subprocess.PIPE)
stdout, stderr = pipes.communicate()
if pipes.returncode != 0 and b'does not exist' not in stderr:
raise Exception('"%s" failed with stderr=%s' % (
' '.join(command), stderr.strip()))
subprocess.check_call(['createdb', _localdb])
os.environ['DATABASE_URL'] = 'postgres:///%s' % _localdb
import app
db = app.db
db.create_all()
@pytest.fixture
def s():
yield db.session
db.session.rollback()
db.session.close()
def run(s, command, *args):
try:
user = app.lookup_or_create_user(s, 'test')
out = command(s, user, *args)
except Exception:
# Sessions are also rolled back after each test, but we don't expect
# failed commands to have any effect on the db (normally handle_request
# deals with this).
s.rollback()
raise
return out
def run_error(s, error, command, *args):
with pytest.raises(app.PredictionsError) as e:
run(s, command, *args)
assert error in str(e.value)
def test_create(s):
out = run(s, app.create, 'test-contract1', 'terms', '1 hour', '.5')
assert 'Created contract' in out
run_error(s, 'already exists',
app.create, 'test-contract1', 'terms', '1 hour', '.5')
run_error(s, 'percentage >= 100%: 50',
app.create, 'test-contract2', 'terms', '1 hour', '50')
run_error(s, 'percentage <= 0%: -1',
app.create, 'test-contract2', 'terms', '1 hour', '-1')
run_error(s, 'closed at',
app.create, 'test-contract2', 'terms', '-1s', '.5')
def test_help(s):
out = run(s, app.help)
assert '/predict more_help' in out
def test_more_help(s):
out = run(s, app.more_help)
assert '/predict list_resolved' in out
def test_list(s):
out = run(s, app.list)
assert 'no active contracts' in out
run(s, app.create, 'test-contract1', 'terms', '1 hour', '.5')
run(s, app.create, 'test-contract2', 'terms', '1 hour', '.5')
run(s, app.create, 'test-contract3', 'terms', '1 hour', '.5')
run(s, app.create, 'test-contract4', 'terms', '1 hour', '.5')
run(s, app.create, 'test-contract5', 'terms', '1 hour', '.5')
run(s, app.create, 'test-contract6', 'terms', '1 hour', '.5')
run(s, app.cancel, 'test-contract3')
run(s, app.cancel, 'test-contract4')
run(s, app.resolve, 'test-contract5', 'true')
run(s, app.resolve, 'test-contract6', 'false')
assert '''\
test-contract1
test-contract2''' == run(s, app.list)
assert '''\
test-contract3
test-contract4''' == run(s, app.list_cancelled)
assert '''\
test-contract5
test-contract6''' == run(s, app.list_resolved)
def test_list(s):
out = run(s, app.list)
assert 'no active contracts' in out
run(s, app.create, 'test-contract1', 'terms', '1 hour', '.5')
run(s, app.create, 'test-contract2', 'terms', '1 hour', '.5')
run(s, app.create, 'test-contract3', 'terms', '1 hour', '.5')
out = run(s, app.list)
assert '''\
test-contract1
test-contract2
test-contract3''' == out
def test_predict(s):
run_error(s, 'unknown contract',
app.predict, 'test-contract1', '.6')
run(s, app.create, 'test-contract1', 'terms', '1 hour', '.5')
run(s, app.create, 'test-contract2', 'terms', '1 hour', '.5')
run(s, app.predict, 'test-contract1', '.6')
run(s, app.predict, 'test-contract1', '.7')
run(s, app.predict, 'test-contract2', '60%')
run(s, app.resolve, 'test-contract1', 'false')
run_error(s, 'already resolved',
app.predict, 'test-contract1', '1%')
# skipping parse-float tests and closed-contract tests, because test_create
# already coverse these.
def test_resolve(s):
run_error(s, 'unknown contract',
app.cancel, 'test-contract1')
run(s, app.create, 'test-contract1', 'terms', '1 hour', '.5')
run(s, app.cancel, 'test-contract1')
run_error(s, 'already cancelled',
app.cancel, 'test-contract1')
run(s, app.create, 'test-contract2', 'terms', '1 hour', '.5')
with pytest.raises(app.PredictionsError) as e:
app.resolve(s, app.lookup_or_create_user(s, 'test2'),
'test-contract2')
assert 'Only test can resolve test-contract2' in str(e.value)
def test_resolve(s):
run_error(s, 'unknown contract',
app.resolve, 'test-contract1', 'true')
run(s, app.create, 'test-contract1', 'terms', '1 hour', '.5')
run(s, app.resolve, 'test-contract1', 'true')
run_error(s, 'already resolved',
app.resolve, 'test-contract1', 'true')
run(s, app.create, 'test-contract2', 'terms', '1 hour', '.5')
run_error(s, 'Predictions must be resolved to "true" or "false"',
app.resolve, 'test-contract2', 'cabbage')
run(s, app.create, 'test-contract3', 'terms', '1 hour', '.5')
with pytest.raises(app.PredictionsError) as e:
app.resolve(s, app.lookup_or_create_user(s, 'test2'),
'test-contract3', 'true')
assert 'Only test can resolve test-contract3' in str(e.value)
def test_show(s):
run_error(s, 'unknown contract',
app.show, 'test-contract1')
run(s, app.create, 'test-contract1', 'terms', '1 hour', '.5')
out = run(s, app.show, 'test-contract1')
assert 'terms (Unresolved)' in out
assert 'Closes' in out
user1 = app.lookup_or_create_user(s, 'user1')
user2 = app.lookup_or_create_user(s, 'user2')
app.predict(s, user1, 'test-contract1', '.6')
app.predict(s, user2, 'test-contract1', '.8')
app.predict(s, user1, 'test-contract1', '.4')
app.predict(s, user1, 'test-contract1', '.2')
app.predict(s, user2, 'test-contract1', '.1')
app.predict(s, user1, 'test-contract1', '.0001')
run(s, app.resolve, 'test-contract1', 'false')
out = run(s, app.show, 'test-contract1')
assert 'terms (Resolved False)' in out
assert 'Was to close' in out
assert '50.00% test (' in out
assert '60.00% user1 (' in out
assert '80.00% user2 (' in out
assert '40.00% user1 (' in out
assert '20.00% user1 (' in out
assert '10.00% user2 (' in out
assert '0.01% user1 (' in out
assert 'scores:' in out
assert 'user1: 126.84' in out
assert 'user2: -57.54' in out
run(s, app.cancel, 'test-contract1')
out = run(s, app.show, 'test-contract1')
assert 'scores' not in out
assert 'Was to close' in out
assert 'Cancelled' in out
# User 'test' doesn't get lots of points for setting the house odds at 1%
# and then immediately predicting 99%.
run(s, app.create, 'test-contract2', 'terms', '1 hour', '.01')
run(s, app.predict, 'test-contract2', '.99')
app.predict(s, user1, 'test-contract2', '.9')
run(s, app.predict, 'test-contract2', '.99')
run(s, app.resolve, 'test-contract2', 'true')
out = run(s, app.show, 'test-contract2')
assert 'test: 9.53' in out
assert 'user1: -9.53' in out
def test_dt_to_string():
assert app.dt_to_string(
app.now() + datetime.timedelta(seconds=60*60*24+5)) == '1d from now'
assert app.dt_to_string(
app.now() - datetime.timedelta(seconds=60*60*24+5)) == '1d ago'
assert app.dt_to_string(
app.now() + datetime.timedelta(seconds=60*60+5)) == '1hr from now'
assert app.dt_to_string(
app.now() - datetime.timedelta(seconds=60*60+5)) == '1hr ago'
assert app.dt_to_string(
app.now() + datetime.timedelta(seconds=60*34+5)) == '34min from now'
assert app.dt_to_string(
app.now() - datetime.timedelta(seconds=60*34+5)) == '34min ago'
|
<gh_stars>1-10
package br.com.swconsultoria.nfe.util;
import br.com.swconsultoria.nfe.Assinar;
import br.com.swconsultoria.nfe.dom.ConfiguracoesNfe;
import br.com.swconsultoria.nfe.dom.Evento;
import br.com.swconsultoria.nfe.dom.enuns.AssinaturaEnum;
import br.com.swconsultoria.nfe.dom.enuns.EventosEnum;
import br.com.swconsultoria.nfe.exception.NfeException;
import br.com.swconsultoria.nfe.schema.envEpec.*;
import javax.xml.bind.JAXBException;
import java.time.ZoneId;
import java.util.Collections;
import java.util.List;
/**
* @author <NAME> - <EMAIL>
* Data: 02/03/2019 - 22:51
*/
public class EpecUtil {
/**
* MOnta o Evento de epec Lote
*
* @param epec
* @param configuracao
* @return
* @throws NfeException
*/
public static TEnvEvento montaEpec(Evento epec, ConfiguracoesNfe configuracao, ZoneId zoneId) throws NfeException {
return montaEpec(Collections.singletonList(epec),configuracao,zoneId);
}
/**
* MOnta o Evento de epec Lote
*
* @param epec
* @param configuracao
* @return
* @throws NfeException
*/
public static TEnvEvento montaEpec(Evento epec, ConfiguracoesNfe configuracao) throws NfeException {
return montaEpec(Collections.singletonList(epec),configuracao);
}
/**
* MOnta o Evento de epec Lote
*
* @param listaEpec
* @param configuracao
* @return
* @throws NfeException
*/
public static TEnvEvento montaEpec(List<Evento> listaEpec, ConfiguracoesNfe configuracao) throws NfeException {
return montaEpec(listaEpec,configuracao,null);
}
/**
* MOnta o Evento de epec Lote
*
* @param listaEpec
* @param configuracao
* @return
* @throws NfeException
*/
public static TEnvEvento montaEpec(List<Evento> listaEpec, ConfiguracoesNfe configuracao, ZoneId zoneId) throws NfeException {
if (listaEpec.size() > 20) {
throw new NfeException("Podem ser enviados no máximo 20 eventos no Lote.");
}
TEnvEvento enviEvento = new TEnvEvento();
enviEvento.setVersao(ConstantesUtil.VERSAO.EVENTO_EPEC);
enviEvento.setIdLote("1");
listaEpec.forEach(epec -> {
String id = "ID" + EventosEnum.EPEC.getCodigo() + epec.getChave() + "01";
TEvento eventoEpec = new TEvento();
eventoEpec.setVersao(ConstantesUtil.VERSAO.EVENTO_EPEC);
TEvento.InfEvento infoEvento = new TEvento.InfEvento();
infoEvento.setId(id);
infoEvento.setCOrgao(String.valueOf(configuracao.getEstado().getCodigoUF()));
infoEvento.setTpAmb(configuracao.getAmbiente().getCodigo());
infoEvento.setCPF(epec.getCpf());
infoEvento.setCNPJ(epec.getCnpj());
infoEvento.setChNFe(epec.getChave());
infoEvento.setDhEvento(XmlNfeUtil.dataNfe(epec.getDataEvento(),zoneId));
infoEvento.setTpEvento(EventosEnum.EPEC.getCodigo());
infoEvento.setNSeqEvento("1");
infoEvento.setVerEvento(ConstantesUtil.VERSAO.EVENTO_EPEC);
TEvento.InfEvento.DetEvento detEvento = new TEvento.InfEvento.DetEvento();
detEvento.setVersao(ConstantesUtil.VERSAO.EVENTO_EPEC);
detEvento.setDescEvento("EPEC");
detEvento.setCOrgaoAutor(configuracao.getEstado().getCodigoUF());
detEvento.setTpAutor("1");
detEvento.setVerAplic("1.0.0");
detEvento.setDhEmi(XmlNfeUtil.dataNfe(epec.getDataEvento(),zoneId));
detEvento.setTpNF(epec.getEventoEpec().getTipoNF());
detEvento.setIE(epec.getEventoEpec().getIeEmitente());
TEvento.InfEvento.DetEvento.Dest dest = new TEvento.InfEvento.DetEvento.Dest();
dest.setUF(TUf.valueOf(epec.getEventoEpec().getEstadoDestinatario().toString()));
dest.setCNPJ(epec.getEventoEpec().getCnpjDestinatario());
dest.setCPF(epec.getEventoEpec().getCpfDestinatario());
dest.setIE(epec.getEventoEpec().getIeDestinatario());
dest.setVNF(epec.getEventoEpec().getvNF());
dest.setVICMS(epec.getEventoEpec().getvICMS());
dest.setVST(epec.getEventoEpec().getvST());
detEvento.setDest(dest);
infoEvento.setDetEvento(detEvento);
eventoEpec.setInfEvento(infoEvento);
enviEvento.getEvento().add(eventoEpec);
});
return enviEvento;
}
/**
* Cria o ProcEvento de CCe
*
* @param config
* @param enviEvento
* @param retorno
* @return
* @throws JAXBException
* @throws NfeException
*/
public static String criaProcEventoEpec(ConfiguracoesNfe config, TEnvEvento enviEvento, TRetEnvEvento retorno) throws JAXBException, NfeException {
String xml = XmlNfeUtil.objectToXml(enviEvento);
xml = xml.replaceAll(" xmlns:ns2=\"http://www.w3.org/2000/09/xmldsig#\"", "");
xml = xml.replaceAll("<evento v", "<evento xmlns=\"http://www.portalfiscal.inf.br/nfe\" v");
String assinado = Assinar.assinaNfe(ConfiguracoesUtil.iniciaConfiguracoes(config), xml, AssinaturaEnum.EVENTO);
TProcEvento procEvento = new TProcEvento();
procEvento.setEvento(XmlNfeUtil.xmlToObject(assinado, TEnvEvento.class).getEvento().get(0));
procEvento.setRetEvento(retorno.getRetEvento().get(0));
procEvento.setVersao(ConstantesUtil.VERSAO.EVENTO_EPEC);
return XmlNfeUtil.objectToXml(procEvento);
}
}
|
<filename>lang/py/cookbook/v2/source/cb2_5_4_sol_1.py
class hist(dict):
def add(self, item, increment=1):
''' add 'increment' to the entry for 'item' '''
self[item] = increment + self.get(item, 0)
def counts(self, reverse=False):
''' return list of keys sorted by corresponding values '''
aux = [ (self[k], k) for k in self ]
aux.sort()
if reverse: aux.reverse()
return [k for v, k in aux]
|
import { gql } from '@apollo/client';
import { RATE_LIMIT } from './fragment';
export const GET_RATE_LIMIT = gql`
${RATE_LIMIT}
query {
...RateLimit
}
`;
|
def convertToCapitalize(sentence)
words = sentence.split()
capitalize = [word.capitalize() for word in words]
return " ".join(capitalize) |
#!/bin/bash -f
#*********************************************************************************************************
# Vivado (TM) v2019.2 (64-bit)
#
# Filename : mb_design.sh
# Simulator : Xilinx Vivado Simulator
# Description : Simulation script for compiling, elaborating and verifying the project source files.
# The script will automatically create the design libraries sub-directories in the run
# directory, add the library logical mappings in the simulator setup file, create default
# 'do/prj' file, execute compilation, elaboration and simulation steps.
#
# Generated by Vivado on Wed May 27 17:33:57 +0100 2020
# SW Build 2708876 on Wed Nov 6 21:40:23 MST 2019
#
# Copyright 1986-2019 Xilinx, Inc. All Rights Reserved.
#
# usage: mb_design.sh [-help]
# usage: mb_design.sh [-lib_map_path]
# usage: mb_design.sh [-noclean_files]
# usage: mb_design.sh [-reset_run]
#
#*********************************************************************************************************
# Command line options
xv_boost_lib_path=C:/Xilinx/Vivado/2019.2/tps/boost_1_64_0
xvlog_opts="--relax"
xvhdl_opts="--relax"
# Script info
echo -e "mb_design.sh - Script generated by export_simulation (Vivado v2019.2 (64-bit)-id)\n"
# Main steps
run()
{
check_args $# $1
setup $1 $2
compile
elaborate
simulate
}
# RUN_STEP: <compile>
compile()
{
# Compile design files
xvlog $xvlog_opts -prj vlog.prj 2>&1 | tee compile.log
xvhdl $xvhdl_opts -prj vhdl.prj 2>&1 | tee compile.log
}
# RUN_STEP: <elaborate>
elaborate()
{
xelab --relax --debug typical --mt auto -L generic_baseblocks_v2_1_0 -L axi_infrastructure_v1_1_0 -L axi_register_slice_v2_1_20 -L fifo_generator_v13_2_5 -L axi_data_fifo_v2_1_19 -L axi_crossbar_v2_1_21 -L xil_defaultlib -L axi_lite_ipif_v3_0_4 -L axi_intc_v4_1_14 -L xlconcat_v2_1_3 -L mdm_v3_2_17 -L lib_cdc_v1_0_2 -L proc_sys_reset_v5_0_13 -L interrupt_control_v3_1_4 -L axi_gpio_v2_0_22 -L lib_pkg_v1_0_2 -L lib_srl_fifo_v1_0_2 -L axi_uartlite_v2_0_24 -L axi_timer_v2_0_22 -L fit_timer_v2_0_10 -L emc_common_v3_0_5 -L axi_emc_v3_0_20 -L microblaze_v11_0_2 -L lmb_v10_v3_0_10 -L lmb_bram_if_cntlr_v4_0_17 -L blk_mem_gen_v8_4_4 -L lib_fifo_v1_0_14 -L axi_datamover_v5_1_22 -L axi_sg_v4_1_13 -L axi_dma_v7_1_21 -L axis_infrastructure_v1_1_0 -L axis_data_fifo_v2_0_2 -L axi_protocol_converter_v2_1_20 -L unisims_ver -L unimacro_ver -L secureip -L xpm --snapshot mb_design xil_defaultlib.mb_design xil_defaultlib.glbl -log elaborate.log
}
# RUN_STEP: <simulate>
simulate()
{
xsim mb_design -key {Behavioral:sim_1:Functional:mb_design} -tclbatch cmd.tcl -protoinst "protoinst_files/mb_design.protoinst" -log simulate.log
}
# STEP: setup
setup()
{
case $1 in
"-lib_map_path" )
if [[ ($2 == "") ]]; then
echo -e "ERROR: Simulation library directory path not specified (type \"./mb_design.sh -help\" for more information)\n"
exit 1
fi
copy_setup_file $2
;;
"-reset_run" )
reset_run
echo -e "INFO: Simulation run files deleted.\n"
exit 0
;;
"-noclean_files" )
# do not remove previous data
;;
* )
copy_setup_file $2
esac
# Add any setup/initialization commands here:-
# <user specific commands>
}
# Copy xsim.ini file
copy_setup_file()
{
file="xsim.ini"
lib_map_path="C:/Xilinx/Vivado/2019.2/data/xsim"
if [[ ($1 != "") ]]; then
lib_map_path="$1"
fi
if [[ ($lib_map_path != "") ]]; then
src_file="$lib_map_path/$file"
if [[ -e $src_file ]]; then
cp $src_file .
fi
# Map local design libraries to xsim.ini
map_local_libs
fi
}
# Map local design libraries
map_local_libs()
{
updated_mappings=()
local_mappings=()
# Local design libraries
local_libs=(xil_defaultlib)
if [[ 0 == ${#local_libs[@]} ]]; then
return
fi
file="xsim.ini"
file_backup="xsim.ini.bak"
if [[ -e $file ]]; then
rm -f $file_backup
# Create a backup copy of the xsim.ini file
cp $file $file_backup
# Read libraries from backup file and search in local library collection
while read -r line
do
IN=$line
# Split mapping entry with '=' delimiter to fetch library name and mapping
read lib_name mapping <<<$(IFS="="; echo $IN)
# If local library found, then construct the local mapping and add to local mapping collection
if `echo ${local_libs[@]} | grep -wq $lib_name` ; then
line="$lib_name=xsim.dir/$lib_name"
local_mappings+=("$lib_name")
fi
# Add to updated library mapping collection
updated_mappings+=("$line")
done < "$file_backup"
# Append local libraries not found originally from xsim.ini
for (( i=0; i<${#local_libs[*]}; i++ )); do
lib_name="${local_libs[i]}"
if `echo ${local_mappings[@]} | grep -wvq $lib_name` ; then
line="$lib_name=xsim.dir/$lib_name"
updated_mappings+=("$line")
fi
done
# Write updated mappings in xsim.ini
rm -f $file
for (( i=0; i<${#updated_mappings[*]}; i++ )); do
lib_name="${updated_mappings[i]}"
echo $lib_name >> $file
done
else
for (( i=0; i<${#local_libs[*]}; i++ )); do
lib_name="${local_libs[i]}"
mapping="$lib_name=xsim.dir/$lib_name"
echo $mapping >> $file
done
fi
}
# Delete generated data from the previous run
reset_run()
{
files_to_remove=(xelab.pb xsim.jou xvhdl.log xvlog.log compile.log elaborate.log simulate.log xelab.log xsim.log run.log xvhdl.pb xvlog.pb mb_design.wdb xsim.dir)
for (( i=0; i<${#files_to_remove[*]}; i++ )); do
file="${files_to_remove[i]}"
if [[ -e $file ]]; then
rm -rf $file
fi
done
}
# Check command line arguments
check_args()
{
if [[ ($1 == 1 ) && ($2 != "-lib_map_path" && $2 != "-noclean_files" && $2 != "-reset_run" && $2 != "-help" && $2 != "-h") ]]; then
echo -e "ERROR: Unknown option specified '$2' (type \"./mb_design.sh -help\" for more information)\n"
exit 1
fi
if [[ ($2 == "-help" || $2 == "-h") ]]; then
usage
fi
}
# Script usage
usage()
{
msg="Usage: mb_design.sh [-help]\n\
Usage: mb_design.sh [-lib_map_path]\n\
Usage: mb_design.sh [-reset_run]\n\
Usage: mb_design.sh [-noclean_files]\n\n\
[-help] -- Print help information for this script\n\n\
[-lib_map_path <path>] -- Compiled simulation library directory path. The simulation library is compiled\n\
using the compile_simlib tcl command. Please see 'compile_simlib -help' for more information.\n\n\
[-reset_run] -- Recreate simulator setup files and library mappings for a clean run. The generated files\n\
from the previous run will be removed. If you don't want to remove the simulator generated files, use the\n\
-noclean_files switch.\n\n\
[-noclean_files] -- Reset previous run, but do not remove simulator generated files from the previous run.\n\n"
echo -e $msg
exit 1
}
# Launch script
run $1 $2
|
package stores
import (
"context"
)
type Reader interface {
Read(ctx context.Context, records chan<- Record) error
}
|
#!/bin/bash
# Archived program command-line for experiment
# Copyright 2016 Xiang Zhang
#
# Usage: bash {this_file} [additional_options]
set -x;
set -e;
th main.lua -driver_location models/ifeng/charbag -train_data_file data/ifeng/topic/train_charbag.t7b -test_data_file data/ifeng/topic/test_charbag.t7b "$@";
|
#
# Initializes Oh My Zsh.
#
# Authors:
# Robby Russell <robby@planetargon.com>
# Sorin Ionescu <sorin.ionescu@gmail.com>
#
# Check for the minimum supported version.
min_zsh_version='4.3.10'
if ! autoload -Uz is-at-least || ! is-at-least "$min_zsh_version"; then
print "omz: old shell detected, minimum required: $min_zsh_version" >&2
fi
unset min_zsh_version
# Disable color and theme in dumb terminals.
if [[ "$TERM" == 'dumb' ]]; then
zstyle ':omz:*:*' color 'no'
zstyle ':omz:prompt' theme 'off'
fi
# Get enabled plugins.
zstyle -a ':omz:load' plugin 'plugins'
# Add functions to fpath.
fpath=(
${0:h}/themes/*(/FN)
${plugins:+${0:h}/plugins/${^plugins}/{functions,completions}(/FN)}
${0:h}/{functions,completions}(/FN)
$fpath
)
# Load and initialize the completion system ignoring insecure directories.
autoload -Uz compinit && compinit -i
# Source files (the order matters).
source "${0:h}/helper.zsh"
source "${0:h}/environment.zsh"
source "${0:h}/terminal.zsh"
source "${0:h}/keyboard.zsh"
source "${0:h}/completion.zsh"
source "${0:h}/history.zsh"
source "${0:h}/directory.zsh"
source "${0:h}/alias.zsh"
source "${0:h}/spectrum.zsh"
source "${0:h}/utility.zsh"
# Autoload Zsh functions.
autoload -Uz age
autoload -Uz zargs
autoload -Uz zcalc
autoload -Uz zmv
# Source plugins defined in ~/.zshrc.
for plugin in "$plugins[@]"; do
zstyle ":omz:plugin:$plugin" enable 'yes'
if [[ ! -d "${0:h}/plugins/$plugin" ]]; then
print "omz: no such plugin: $plugin" >&2
fi
if [[ -f "${0:h}/plugins/$plugin/init.zsh" ]]; then
source "${0:h}/plugins/$plugin/init.zsh"
fi
done
unset plugin plugins
# Autoload Oh My Zsh functions.
for fdir in "$fpath[@]"; do
if [[ "$fdir" == ${0:h}/(|*/)functions ]]; then
for func in $fdir/[^_.]*(N.:t); do
autoload -Uz $func
done
fi
done
unset fdir func
# Set environment variables for launchd processes.
if [[ "$OSTYPE" == darwin* ]]; then
for env_var in PATH MANPATH; do
launchctl setenv "$env_var" "${(P)env_var}" &!
done
unset env_var
fi
# Load and run the prompt theming system.
autoload -Uz promptinit && promptinit
# Load the prompt theme.
zstyle -a ':omz:prompt' theme 'prompt_argv'
if (( $#prompt_argv > 0 )); then
prompt "$prompt_argv[@]"
else
prompt 'off'
fi
unset prompt_argv
# Compile the completion dump, to increase startup speed.
dump_file="$HOME/.zcompdump"
if [[ "$dump_file" -nt "${dump_file}.zwc" || ! -f "${dump_file}.zwc" ]]; then
zcompile "$dump_file"
fi
unset dump_file
|
<filename>src/main/java/com/alipay/api/domain/PaytoolRefundResultDetail.java
package com.alipay.api.domain;
import java.util.Date;
import java.util.List;
import com.alipay.api.AlipayObject;
import com.alipay.api.internal.mapping.ApiField;
import com.alipay.api.internal.mapping.ApiListField;
/**
* 支付工具退款结果明细
*
* @author auto create
* @since 1.0, 2019-08-17 17:44:09
*/
public class PaytoolRefundResultDetail extends AlipayObject {
private static final long serialVersionUID = 2246389958266638595L;
/**
* 支付工具退款完成时间。格式为:yyyy-MM-dd HH:mm:ss
*/
@ApiField("gmt_refund")
private Date gmtRefund;
/**
* 支付宝支付工具单据号
*/
@ApiField("paytool_bill_no")
private String paytoolBillNo;
/**
* 商户支付工具单据号
*/
@ApiField("paytool_request_no")
private String paytoolRequestNo;
/**
* 退款支付工具金额。单位为元,精确到小数点后两位
*/
@ApiField("refund_amount")
private String refundAmount;
/**
* 该支付工具的退款资金组成明细。仅当该支付工具驱动支付宝发生资金流时返回该字段。
*/
@ApiListField("refund_fund_bill_list")
@ApiField("trade_fund_bill")
private List<TradeFundBill> refundFundBillList;
/**
* 支付工具退款状态;
退款:REFUND_SUCCESS,退款处理中:REFUND_INPROCESS,退款失败:REFUND_FAIL
*/
@ApiField("status")
private String status;
/**
* 支付宝统一分配的支付工具编码;
现金:CASH;支付宝:ALIPAY,营销:TMARKETING;POS支付:POS,商户预付卡:MERCHANT_MCARD,OTHER:其他
*/
@ApiField("tool_code")
private String toolCode;
public Date getGmtRefund() {
return this.gmtRefund;
}
public void setGmtRefund(Date gmtRefund) {
this.gmtRefund = gmtRefund;
}
public String getPaytoolBillNo() {
return this.paytoolBillNo;
}
public void setPaytoolBillNo(String paytoolBillNo) {
this.paytoolBillNo = paytoolBillNo;
}
public String getPaytoolRequestNo() {
return this.paytoolRequestNo;
}
public void setPaytoolRequestNo(String paytoolRequestNo) {
this.paytoolRequestNo = paytoolRequestNo;
}
public String getRefundAmount() {
return this.refundAmount;
}
public void setRefundAmount(String refundAmount) {
this.refundAmount = refundAmount;
}
public List<TradeFundBill> getRefundFundBillList() {
return this.refundFundBillList;
}
public void setRefundFundBillList(List<TradeFundBill> refundFundBillList) {
this.refundFundBillList = refundFundBillList;
}
public String getStatus() {
return this.status;
}
public void setStatus(String status) {
this.status = status;
}
public String getToolCode() {
return this.toolCode;
}
public void setToolCode(String toolCode) {
this.toolCode = toolCode;
}
}
|
package io.renrenapi.entity;
import com.baomidou.mybatisplus.annotation.TableId;
import com.baomidou.mybatisplus.annotation.TableName;
import lombok.Data;
import java.io.Serializable;
import java.util.Date;
/**
*
*
* @author wcf
* @email <EMAIL>
* @date 2019-08-01 16:57:05
*/
@Data
@TableName("tb_House")
public class HouseEntity implements Serializable {
private static final long serialVersionUID = 1L;
/**
*
*/
@TableId
private Long id;
/**
*
*/
private Long renttype;
/**
*
*/
private String name;
/**
*
*/
private Float unitprice;
/**
*
*/
private Float totalprice;
/**
*
*/
private Long buildingid;
/**
*
*/
private Long worknum;
/**
*
*/
private String heightcatedesc;
/**
*
*/
private Long visitnum;
/**
*
*/
private Float area;
/**
* 装修
*/
private String decoratetypedesc;
/**
*
*/
private String rentexperience;
/**
*
*/
private Long evalutedegree;
/**
*
*/
private String description;
/**
*
*/
private String pic;
/**
*
*/
private String traffic;
/**
*
*/
private String isfavorites;
/**
* 维度
*/
private Float lat;
/**
* 经度
*/
private Float lng;
/**
* 朝向
*/
private String orientationdesc;
/**
* 房间状态
*/
private String housestatus;
/**
* 租金类型(按面积)
*/
private String renttypename;
/**
* 分享链接
*/
private String houseshareurl;
/**
*
*/
private String houseshareurlwithlabels;
/**
* 房型图
*/
private String housetypepicture;
/**
* 房间特性
*/
private String housefeature;
/**
* 房屋佣金
*/
private String houselabels;
/**
* 佣金介绍
*/
private String brokeragedesc;
/**
* 装修交付类型
*/
private String deliverystandard;
/**
* 装修描述
*/
private String deliverydesc;
/**
* 特殊服务
*/
private String specialexplain;
/**
* 最短租期
*/
private String minrentperiod;
/**
* 免租期
*/
private String remitrentperiod;
/**
* 详细地址
*/
private String address;
/**
*
*/
private String supperoffice;
/**
* 带看
*/
private Long housewithseecount;
/**
*
*/
private Long housewithsigndistance;
/**
* 建筑类型
*/
private String buildingtype;
/**
*
*/
private Long buildingtypeid;
}
|
addSbtPlugin("org.scala-lang.modules.scalajs" % "scalajs-sbt-plugin" % "0.4.0")
|
<filename>sources/Engine/Modules/Audio/AudioSystem.cpp
#include "precompiled.h"
#pragma hdrstop
#include "AudioSystem.h"
#include <utility>
#include <spdlog/spdlog.h>
#include "Modules/Graphics/GraphicsSystem/TransformComponent.h"
#include "Exceptions/exceptions.h"
#include "ALDebug.h"
AudioSystem::AudioSystem(std::shared_ptr<GraphicsScene> environmentState)
: m_environmentState(std::move(environmentState))
{
}
AudioSystem::~AudioSystem()
{
SW_ASSERT(m_audioListener == nullptr);
SW_ASSERT(m_audioContext == nullptr);
SW_ASSERT(m_audioDevice == nullptr);
}
void AudioSystem::configure()
{
spdlog::info("Configure audio system");
AL_CALL_BLOCK_BEGIN();
m_audioDevice = alcOpenDevice(nullptr);
AL_CALL_BLOCK_END();
if (m_audioDevice == nullptr) {
THROW_EXCEPTION(EngineRuntimeException, "Audio device can not be opened");
}
AL_CALL_BLOCK_BEGIN();
m_audioContext = alcCreateContext(m_audioDevice, nullptr);
AL_CALL_BLOCK_END();
if (m_audioContext == nullptr) {
THROW_EXCEPTION(EngineRuntimeException, "Audio context can not be created");
}
AL_CALL_BLOCK_BEGIN();
bool contextMakingCurrentStatus = alcMakeContextCurrent(m_audioContext);
AL_CALL_BLOCK_END();
if (!contextMakingCurrentStatus) {
THROW_EXCEPTION(EngineRuntimeException, "Audio context can not be set as current");
}
m_audioListener = std::make_unique<AudioListener>();
getGameWorld()->subscribeEventsListener<GameObjectAddComponentEvent<AudioSourceComponent>>(this);
}
void AudioSystem::unconfigure()
{
spdlog::info("Unconfigure audio system");
getGameWorld()->unsubscribeEventsListener<GameObjectAddComponentEvent<AudioSourceComponent>>(this);
m_audioListener.reset();
alcMakeContextCurrent(nullptr);
alcDestroyContext(m_audioContext);
m_audioContext = nullptr;
alcCloseDevice(m_audioDevice);
m_audioDevice = nullptr;
}
void AudioSystem::update(float delta)
{
ARG_UNUSED(delta);
std::shared_ptr<Camera> activeCamera = m_environmentState->getActiveCamera();
if (activeCamera) {
const Transform& currentCameraTransform = *m_environmentState->getActiveCamera()->getTransform();
m_audioListener->setPosition(currentCameraTransform.getPosition());
m_audioListener->setOrientation(currentCameraTransform.getOrientation());
}
for (auto object : getGameWorld()->allWith<AudioSourceComponent>()) {
auto transformComponent = object.getComponent<TransformComponent>();
auto& audioSource = object.getComponent<AudioSourceComponent>()->getSource();
if (transformComponent->isOnline()) {
audioSource.updateInternalState();
if (!transformComponent->isStatic()) {
audioSource.setPosition(transformComponent->getTransform().getPosition());
}
}
else {
audioSource.stop();
}
}
}
const AudioListener& AudioSystem::getListener() const
{
return *m_audioListener;
}
AudioListener& AudioSystem::getListener()
{
return *m_audioListener;
}
EventProcessStatus AudioSystem::receiveEvent(const GameObjectAddComponentEvent<AudioSourceComponent>& event)
{
auto& transform = event.gameObject.getComponent<TransformComponent>()->getTransform();
auto source = event.component->getSourcePtr();
source->setPosition(transform.getPosition());
return EventProcessStatus::Processed;
}
|
#include <iostream>
#include <cmath>
class Shape {
public:
virtual double area() const = 0;
virtual double perimeter() const = 0;
};
class Rectangle : public Shape {
private:
double width, height;
public:
Rectangle(double w, double h) : width(w), height(h) {}
double area() const override {
return width * height;
}
double perimeter() const override {
return 2 * (width + height);
}
};
class Circle : public Shape {
private:
double radius;
public:
Circle(double r) : radius(r) {}
double area() const override {
return M_PI * radius * radius;
}
double perimeter() const override {
return 2 * M_PI * radius;
}
};
int main() {
Rectangle rect(3.0, 4.0);
Circle circle(5.0);
std::cout << "Rectangle area: " << rect.area() << ", perimeter: " << rect.perimeter() << std::endl;
std::cout << "Circle area: " << circle.area() << ", circumference: " << circle.perimeter() << std::endl;
return 0;
} |
class HTMLParser:
def __init__(self):
self.depth = 0
self.maxdepth = 0
def start(self, tag):
"""
Updates the depth when an opening tag is encountered.
Args:
tag (str): The name of the opening tag.
Returns:
None
"""
self.depth += 1
if self.depth > self.maxdepth:
self.maxdepth = self.depth
def end(self, tag):
"""
Updates the depth when a closing tag is encountered.
Args:
tag (str): The name of the closing tag.
Returns:
None
"""
self.depth -= 1
def close(self):
"""
Returns the maximum depth reached during the parsing of HTML content.
Returns:
int: The maximum depth reached.
"""
return self.maxdepth |
<reponame>ReCursia/Sonic<filename>core/src/com/studentsteam/sonic/screens/SaveScreen.java
package com.studentsteam.sonic.screens;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.Input;
import com.badlogic.gdx.Screen;
import com.badlogic.gdx.audio.Music;
import com.badlogic.gdx.audio.Sound;
import com.badlogic.gdx.graphics.GL20;
import com.badlogic.gdx.graphics.OrthographicCamera;
import com.badlogic.gdx.scenes.scene2d.Stage;
import com.badlogic.gdx.utils.Array;
import com.badlogic.gdx.utils.viewport.FitViewport;
import com.badlogic.gdx.utils.viewport.Viewport;
import com.studentsteam.sonic.Main;
import com.studentsteam.sonic.screens.assets.BackgroundImage;
import com.studentsteam.sonic.screens.assets.SaveSlot;
import com.studentsteam.sonic.threads.ReportThread;
import com.studentsteam.sonic.tools.database.DatabaseSaves;
import com.studentsteam.sonic.tools.database.DataItem;
import org.apache.log4j.Logger;
public class SaveScreen implements Screen {
//Logs
private final Logger log = Logger.getLogger(this.getClass());
private Main game;
private BackgroundImage background;
//Music
private Music music;
//Audio
private Sound switchSound;
private Sound switchError;
private Sound switchPress;
//Reports
private Sound saveSound;
//Save slots
private Array<SaveSlot> slots;
//Даннные о сохранении
private Array<DataItem> items;
private int currentPos;
//Stage and viewport
private Stage stage;
private Viewport viewport;
public SaveScreen(Main game){
log.info("Запуск экрана сохранения");
this.game = game;
this.background = new BackgroundImage("save/background.png");
//Music
this.music = Gdx.audio.newMusic(Gdx.files.internal("save/background.mp3"));
music.setLooping(true);
music.setVolume(0.1f);
music.play();
//Audio
this.switchSound = Gdx.audio.newSound(Gdx.files.internal("save/S3K_4A.wav"));
this.switchPress = Gdx.audio.newSound(Gdx.files.internal("save/S3K_9F.wav"));
this.switchError = Gdx.audio.newSound(Gdx.files.internal("save/S3K_40.wav"));
this.saveSound = Gdx.audio.newSound(Gdx.files.internal("save/save.mp3"));
//Setting stage and viewport
viewport = new FitViewport(Gdx.graphics.getWidth(), Gdx.graphics.getHeight(), new OrthographicCamera());
stage = new Stage(viewport, game.batch);
//Getting save items from database
items = DatabaseSaves.getData();
//Creating slots
currentPos = 0;
slots = new Array<SaveSlot>();
for(int i = 0; i < 4;i++){
SaveSlot slot = new SaveSlot();
slots.add(slot);
for(DataItem item: items){
if(item.index == i) slot.setData(item);
}
//Set position of slot on screen
slot.setPosition(50+i*150,180);
}
//The first slot is selected
slots.get(currentPos).setSelected(true);
//Adding actors
stage.addActor(background);
for(SaveSlot slot:slots){
stage.addActor(slot);
}
}
@Override
public void show() {
}
/**
* Switch current position to the next slot
*/
private void selectNextSlot(){
log.info("Выбор следующего слота");
slots.get(currentPos+1).setSelected(true);
slots.get(currentPos).setSelected(false);
currentPos += 1;
}
/**
* Switch current position to the previous slot
*/
private void selectPreviousSlot(){
log.info("Выбор предыдущего слота");
slots.get(currentPos-1).setSelected(true);
slots.get(currentPos).setSelected(false);
currentPos -= 1;
}
private void handleInput(float dt){
//Slot is chosen
if(Gdx.input.isKeyJustPressed(Input.Keys.SPACE)){
Main.SAVE_INDEX = currentPos;
game.setScreen(new PlayScreen(game,items));
switchPress.play(0.3f);
dispose();
}
//Html report
else if(Gdx.input.isKeyJustPressed(Input.Keys.F5)){
ReportThread thread = new ReportThread("ReportHTMl", ReportThread.State.HTML,saveSound);
thread.start();
}
//Pdf report
else if(Gdx.input.isKeyJustPressed(Input.Keys.F6)){
ReportThread thread = new ReportThread("ReportPDF", ReportThread.State.PDF,saveSound);
thread.start();
}
//Cursor position
try{
if(Gdx.input.isKeyJustPressed(Input.Keys.RIGHT)){
selectNextSlot();
switchSound.play(0.3f);
}
else if(Gdx.input.isKeyJustPressed(Input.Keys.LEFT)){
selectPreviousSlot();
switchSound.play(0.3f);
}
}catch (IndexOutOfBoundsException err) {
//Play error sound
switchError.play(0.3f);
}
}
private void update(float delta){
handleInput(delta);
stage.act(delta);
}
@Override
public void render(float delta) {
//Update
update(delta);
//Clears the screen
Gdx.gl.glClear(GL20.GL_COLOR_BUFFER_BIT);
//Draw stage
stage.draw();
}
@Override
public void resize(int width, int height) {
}
@Override
public void pause() {
}
@Override
public void resume() {
}
@Override
public void hide() {
}
@Override
public void dispose() {
slots.clear();
background.dispose();
stage.dispose();
music.dispose();
}
}
|
import React, { useState } from 'react';
import { Flair } from '@clowdr-app/clowdr-db-schema/build/DataLayer';
import "./FlairInput.scss";
import FlairChip from '../../Profile/FlairChip/FlairChip';
import useSafeAsync from '../../../hooks/useSafeAsync';
import useConference from '../../../hooks/useConference';
import useUserRoles from '../../../hooks/useUserRoles';
interface Props {
name: string;
flairs: Flair[];
setFlairs: (flairs: Flair[]) => void;
disabled?: boolean;
}
export default function FlairInput(props: Props) {
const conference = useConference();
const [allFlairs, setAllFlairs] = useState<Flair[]>([]);
const { isAdmin, isManager } = useUserRoles();
useSafeAsync(async () => {
let results = await Flair.getAll(conference.id);
if (!isAdmin) {
results = results.filter(x =>
x.label.toLowerCase() !== "admin"
);
}
if (!isManager) {
results = results.filter(x =>
x.label.toLowerCase() !== "manager"
&& x.label.toLowerCase() !== "moderator"
&& x.label.toLowerCase() !== "mod"
);
}
return results;
}, setAllFlairs, []);
const isSelected = (flair: Flair) => props.flairs.find(x => x.id === flair.id) !== undefined;
return <div className="flair-input">
{allFlairs
.sort((x, y) => x.label.localeCompare(y.label))
.map((flair, i) =>
<div className="chip-container" key={i}>
<FlairChip
flair={flair}
unselected={!isSelected(flair)}
onClick={() => {
if (props.disabled) {
return;
}
if (isSelected(flair)) {
props.setFlairs(props.flairs.filter(x => x.id !== flair.id));
} else {
props.setFlairs([...props.flairs, flair])
}
}}
/>
</div>
)}
</div>;
}
|
import React from 'react';
import s from './index.scss';
interface Props {
msgCount?: number;
isMsg?: boolean;
}
const Placehold: React.FC<Props> = ({ msgCount, isMsg }) => {
return (
<>
{msgCount ? (
<div className={s.hasMag}>
{msgCount}条{isMsg ? '留言' : '评论'}
</div>
) : (
<div className={s.noMag}>暂时没有{isMsg ? '留言' : '评论'} ~</div>
)}
</>
);
};
export default Placehold;
|
#!/bin/sh
#p=$(dirname $_)
#echo "$p"
#path=$(dirname $0)
#path=${path/\./$(pwd)}
#echo $path
p=.
if [ ! -d "lib" ]; then
mkdir "lib"
fi
if [ ! -d "lib/linux" ]; then
mkdir "lib/linux"
fi
if [ ! -d "lib/linux/Debug" ]; then
mkdir "lib/linux/Debug"
fi
if [ ! -d "lib/linux/Release" ]; then
mkdir "lib/linux/Release"
fi
# copy cocosdenshino depended libs into lib/linux/Debug/
cp CocosDenshion/third_party/fmod/api/lib/*.so lib/linux/Debug
if [ $# -ne 0 ]; then
if [ $1 = "clean" ]; then
cd $p/cocos2dx/proj.linux
make clean
cd ../../
cd $p/CocosDenshion/proj.linux
make clean
cd ../..
cd $p/Box2D/proj.linux
make clean
cd ../..
cd $p/chipmunk/proj.linux
make clean
cd ../../
rm -r lib/linux/Debug/*.so
fi
else
cd $p/cocos2dx/proj.linux
echo "**********************building cocos2dx**************************"
make
cp -f libcocos2d.so ../../lib/linux/Debug
echo "**********************building cocosdenshion*********************"
cd ../../
cd $p/CocosDenshion/proj.linux
make
cp -f libcocosdenshion.so ../../lib/linux/Debug
echo "**********************building Box2D******************************"
cd ../..
cd $p/Box2D/proj.linux
make
cp -f libbox2d.a ../../lib/linux/Debug
echo "**********************building chipmunk***************************"
cd ../..
cd $p/chipmunk/proj.linux
make
cp -f libchipmunk.a ../../lib/linux/Debug
cd ../../
fi
|
package iptree32
// !!!DON'T EDIT!!! Generated by infobloxopen/go-trees/etc from <name>tree{{.bits}} with etc -s uint32 -d uintX.yaml -t ./<name>tree\{\{.bits\}\}
import (
"fmt"
"net"
"strings"
"testing"
)
func TestInsertNet(t *testing.T) {
r := NewTree()
newR := r.InsertNet(nil, 1)
if newR != r {
t.Errorf("Expected no changes inserting nil network but got:\n%s\n", newR.root32.Dot())
}
newR = r.InsertNet(&net.IPNet{IP: nil, Mask: nil}, 1)
if newR != r {
t.Errorf("Expected no changes inserting invalid network but got:\n%s\n", newR.root32.Dot())
}
_, n, _ := net.ParseCIDR("192.0.2.0/24")
newR = r.InsertNet(n, 1)
if newR == r {
t.Errorf("Expected new root after insertion of new IPv4 address but got previous")
} else {
assertTree32Node(newR, 0xc0000200, 24, 1, "tree with single IPv4 address inserted", t)
}
_, n, _ = net.ParseCIDR("2001:db8::/32")
r1 := r.InsertNet(n, 1)
if r1 == r {
t.Errorf("Expected new root after insertion of new IPv6 address but got previous")
} else {
assertTree64Node(r1, 0x20010db800000000, 32, 0x0, 0, 1,
"tree with single IPv6 address inserted", t)
}
_, n, _ = net.ParseCIDR("2001:db8:0:0:0:ff::/96")
r2 := r1.InsertNet(n, 2)
if r2 == r1 {
t.Errorf("Expected new root after insertion of second IPv6 address but got previous")
} else {
assertTree64Node(r2, 0x20010db800000000, 64, 0x000000ff00000000, 32, 2,
"tree with second IPv6 address inserted", t)
}
_, n, _ = net.ParseCIDR("2001:db8:0:0:0:fe::/96")
r3 := r2.InsertNet(n, 3)
if r3 == r1 {
t.Errorf("Expected new root after insertion of third IPv6 address but got previous")
} else {
assertTree64Node(r3, 0x20010db800000000, 64, 0x000000fe00000000, 32, 3,
"tree with third IPv6 address inserted", t)
}
}
func TestInplaceInsertNet(t *testing.T) {
r := NewTree()
r.InplaceInsertNet(nil, 1)
if r.root32 != nil || r.root64 != nil {
t.Error("Expected empty tree after inserting nil network")
}
r.InplaceInsertNet(&net.IPNet{IP: nil, Mask: nil}, 1)
if r.root32 != nil || r.root64 != nil {
t.Error("Expected empty tree after inserting invalid network")
}
_, n, _ := net.ParseCIDR("192.0.2.0/24")
r.InplaceInsertNet(n, 1)
if r.root32 == nil {
t.Error("Expected some data in 32-bit tree")
} else {
assertTree32Node(r, 0xc0000200, 24, 1, "tree with single IPv4 address inserted", t)
}
_, n, _ = net.ParseCIDR("2001:db8::/32")
r.InplaceInsertNet(n, 1)
if r.root64 == nil {
t.Error("Expected some data in 64-bit tree")
} else {
assertTree64Node(r, 0x20010db800000000, 32, 0x0, 0, 1,
"tree with single IPv6 address inserted", t)
}
_, n, _ = net.ParseCIDR("2001:db8:0:0:0:ff::/96")
r.InplaceInsertNet(n, 2)
if r.root64 == nil {
t.Error("Expected some data in 64-bit tree")
} else {
assertTree64Node(r, 0x20010db800000000, 64, 0x000000ff00000000, 32, 2,
"tree with second IPv6 address inserted", t)
}
_, n, _ = net.ParseCIDR("2001:db8:0:0:0:fe::/96")
r.InplaceInsertNet(n, 3)
if r.root64 == nil {
t.Error("Expected some data in 64-bit tree")
} else {
assertTree64Node(r, 0x20010db800000000, 64, 0x000000fe00000000, 32, 3,
"tree with third IPv6 address inserted", t)
}
}
func (p Pair) String() string {
return fmt.Sprintf("%s: %d", p.Key, p.Value)
}
func TestEnumerate(t *testing.T) {
var r *Tree
for p := range r.Enumerate() {
t.Errorf("Expected no nodes in empty tree but got at least one: %s", p)
break
}
r = NewTree()
_, n, _ := net.ParseCIDR("192.0.2.0/24")
r = r.InsertNet(n, 20)
_, n, _ = net.ParseCIDR("2001:db8::/32")
r = r.InsertNet(n, 21)
_, n, _ = net.ParseCIDR("2001:db8:1::/48")
r = r.InsertNet(n, 22)
_, n, _ = net.ParseCIDR("2001:db8:0:0:0:ff::/96")
r = r.InsertNet(n, 30)
items := []string{}
for p := range r.Enumerate() {
items = append(items, p.String())
}
s := strings.Join(items, ",\n\t")
e := "192.0.2.0/24: 20,\n\t" +
"2001:db8::/32: 21,\n\t" +
"2001:db8::ff:0:0/96: 30,\n\t" +
"2001:db8:1::/48: 22"
if s != e {
t.Errorf("Expected following nodes\n\t%s\nbut got\n\t%s", e, s)
}
}
func TestGetByNet(t *testing.T) {
r := NewTree()
_, n4, _ := net.ParseCIDR("192.0.2.0/24")
r = r.InsertNet(n4, 20)
_, n6Short1, _ := net.ParseCIDR("2001:db8::/33")
r = r.InsertNet(n6Short1, 21)
_, n6Short2, _ := net.ParseCIDR("2001:db8:1::/48")
r = r.InsertNet(n6Short2, 22)
_, n6Long, _ := net.ParseCIDR("2001:db8:0:0:0:ff::/96")
r = r.InsertNet(n6Long, 30)
_, n6Fit, _ := net.ParseCIDR("2001:db8:8000:0:0:fe::/96")
r = r.InsertNet(n6Fit, 40)
v, ok := r.GetByNet(nil)
if ok {
t.Errorf("Expected no result for nil network but got %d", v)
}
v, ok = r.GetByNet(&net.IPNet{IP: nil, Mask: nil})
if ok {
t.Errorf("Expected no result for invalid network but got %d", v)
}
v, ok = r.GetByNet(n4)
assertResult(v, ok, 20, fmt.Sprintf("%s", n4), t)
v, ok = r.GetByNet(n6Short1)
assertResult(v, ok, 21, fmt.Sprintf("%s", n6Short1), t)
v, ok = r.GetByNet(n6Long)
assertResult(v, ok, 30, fmt.Sprintf("%s", n6Long), t)
_, n6, _ := net.ParseCIDR("2001:db8:1::/64")
v, ok = r.GetByNet(n6)
assertResult(v, ok, 22, fmt.Sprintf("%s", n6), t)
_, n6, _ = net.ParseCIDR("2001:db8:0:0:0:fe::/96")
v, ok = r.GetByNet(n6)
assertResult(v, ok, 21, fmt.Sprintf("%s", n6), t)
_, n6, _ = net.ParseCIDR("2001:db8:8000::/33")
v, ok = r.GetByNet(n6)
if ok {
t.Errorf("Expected no result for %s but got %d", n6, v)
}
_, n6, _ = net.ParseCIDR("2001:db8:8000:0:0:ff::/96")
v, ok = r.GetByNet(n6)
if ok {
t.Errorf("Expected no result for %s but got %d", n6, v)
}
}
func TestDeleteByNet(t *testing.T) {
var r *Tree
_, n4, _ := net.ParseCIDR("192.0.2.0/24")
r, ok := r.DeleteByNet(n4)
if ok {
t.Errorf("Expected no deletion in empty tree but got one")
}
r = r.InsertNet(n4, 20)
_, n6Short1, _ := net.ParseCIDR("2001:db8::/32")
r = r.InsertNet(n6Short1, 21)
_, n6Short2, _ := net.ParseCIDR("2001:db8:1::/48")
r = r.InsertNet(n6Short2, 22)
_, n6Long1, _ := net.ParseCIDR("2001:db8:0:0:0:ff::/96")
r = r.InsertNet(n6Long1, 31)
_, n6Long2, _ := net.ParseCIDR("2001:db8:0:0:0:fe::/96")
r = r.InsertNet(n6Long2, 32)
r, ok = r.DeleteByNet(nil)
if ok {
t.Errorf("Expected no deletion by nil network but got one")
}
r, ok = r.DeleteByNet(&net.IPNet{IP: nil, Mask: nil})
if ok {
t.Errorf("Expected no deletion by invalid network but got one")
}
r, ok = r.DeleteByNet(n6Long2)
if !ok {
t.Errorf("Expected deletion by %s but got nothing", n6Long2)
}
r, ok = r.DeleteByNet(n6Long1)
if !ok {
t.Errorf("Expected deletion by %s but got nothing", n6Long1)
}
v, ok := r.root64.ExactMatch(0x20010db800000000, 64)
if ok {
t.Errorf("Expected no subtree node at 0x%016x, %d after deleting all long mask addresses but got %#v",
0x20010db800000000, 64, v)
}
r, ok = r.DeleteByNet(n6Short2)
if !ok {
t.Errorf("Expected deletion by %s but got nothing", n6Short2)
}
r, ok = r.DeleteByNet(n6Short1)
if !ok {
t.Errorf("Expected deletion by %s but got nothing", n6Short1)
}
r, ok = r.DeleteByNet(n4)
if !ok {
t.Errorf("Expected deletion by %s but got nothing", n4)
}
if r.root32 != nil || r.root64 != nil {
t.Errorf("Expected expected empty tree at the end but have root32: %#v and root64: %#v", r.root32, r.root64)
}
}
func TestTreeByIP(t *testing.T) {
ip := net.ParseIP("2001:db8::1")
var r *Tree
r = r.InsertIP(ip, 1)
if r == nil {
t.Errorf("Expected some tree after insert %s but got %#v", ip, r)
}
v, ok := r.GetByIP(ip)
assertResult(v, ok, 1, fmt.Sprintf("address %s", ip), t)
r, ok = r.DeleteByIP(ip)
if !ok {
t.Errorf("Expected deletion by address %s but got nothing", ip)
}
r.InplaceInsertIP(ip, 1)
if r.root64 == nil {
t.Errorf("Expected some tree after inplace insert %s", ip)
}
}
func TestIPv4NetToUint32(t *testing.T) {
_, n, _ := net.ParseCIDR("192.0.2.0/24")
key, bits := iPv4NetToUint32(n)
if key != 0xc0000200 || bits != 24 {
t.Errorf("Expected 0xc0000200, 24 pair but got 0x%08x, %d", key, bits)
}
n = &net.IPNet{
IP: net.IP{0xc, 0x00},
Mask: net.IPMask{0xff, 0xff, 0xff, 0x00}}
key, bits = iPv4NetToUint32(n)
if bits >= 0 {
t.Errorf("Expected negative number of bits for invalid IPv4 address but got 0x%08x, %d", key, bits)
}
n = &net.IPNet{
IP: net.IP{0xc, 0x00, 0x02, 0x00},
Mask: net.IPMask{0xff, 0x00, 0xff, 0x00}}
key, bits = iPv4NetToUint32(n)
if bits >= 0 {
t.Errorf("Expected negative number of bits for invalid IPv4 mask but got 0x%08x, %d", key, bits)
}
}
func TestIPv6NetToUint64Pair(t *testing.T) {
_, n, _ := net.ParseCIDR("2001:db8::/32")
MSKey, MSBits, LSKey, LSBits := iPv6NetToUint64Pair(n)
if MSKey != 0x20010db800000000 || MSBits != 32 || LSKey != 0x0 || LSBits != 0 {
t.Errorf("Expected 0x20010db800000000, 32 and 0x0000000000000000, 0 pairs bit got 0x%016x, %d and 0x%016x, %d",
MSKey, MSBits, LSKey, LSBits)
}
_, n, _ = net.ParseCIDR("2001:db8:0:0:0:ff::/96")
MSKey, MSBits, LSKey, LSBits = iPv6NetToUint64Pair(n)
if MSKey != 0x20010db800000000 || MSBits != 64 || LSKey != 0x000000ff00000000 || LSBits != 32 {
t.Errorf("Expected 0x20010db800000000, 32 and 0x0000000000000000, 0 pairs bit got 0x%016x, %d and 0x%016x, %d",
MSKey, MSBits, LSKey, LSBits)
}
n = &net.IPNet{
IP: net.IP{
0x20, 0x01, 0x0d, 0xb8, 0x00, 0x00, 0x00, 0x00},
Mask: net.IPMask{
0xff, 0xff, 0xff, 0xff, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00}}
MSKey, MSBits, LSKey, LSBits = iPv6NetToUint64Pair(n)
if MSBits >= 0 {
t.Errorf("Expected negative number of bits for invalid IPv6 address but got 0x%016x, %d and 0x%016x, %d",
MSKey, MSBits, LSKey, LSBits)
}
n = &net.IPNet{
IP: net.IP{
0x20, 0x01, 0x0d, 0xb8, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00},
Mask: net.IPMask{
0x00, 0xff, 0x00, 0xff, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00}}
MSKey, MSBits, LSKey, LSBits = iPv6NetToUint64Pair(n)
if MSBits >= 0 {
t.Errorf("Expected negative number of bits for invalid IPv6 mask but got 0x%016x, %d and 0x%016x, %d",
MSKey, MSBits, LSKey, LSBits)
}
}
func TestNewIPNetFromIP(t *testing.T) {
n := newIPNetFromIP(net.ParseIP("192.0.2.1"))
if n.String() != "192.0.2.1/32" {
t.Errorf("Expected %s for IPv4 conversion but got %s", "192.0.2.1/32", n)
}
n = newIPNetFromIP(net.ParseIP("2001:db8::1"))
if n.String() != "2001:db8::1/128" {
t.Errorf("Expected %s for IPv6 conversion but got %s", "2001:db8::1/128", n)
}
n = newIPNetFromIP(net.IP{0xc, 0x00})
if n != nil {
t.Errorf("Expected %#v for invalid IP address but got %s", nil, n)
}
}
func assertTree32Node(r *Tree, key uint32, bits int, e uint32, desc string, t *testing.T) {
v, ok := r.root32.ExactMatch(key, bits)
assertResult(v, ok, e, fmt.Sprintf("0x%08x, %d for %s", key, bits, desc), t)
}
func assertTree64Node(r *Tree, MSKey uint64, MSBits int, LSKey uint64, LSBits int, e uint32, desc string, t *testing.T) {
desc = fmt.Sprintf("0x%016x, %d and 0x%016x, %d for %s", MSKey, MSBits, LSKey, LSBits, desc)
s, ok := r.root64.ExactMatch(MSKey, MSBits)
if ok {
v, ok := s.ExactMatch(LSKey, LSBits)
if ok {
assertResult(v, ok, e, desc, t)
} else {
t.Errorf("Expected string %q at %s but got nothing at second hop", e, desc)
}
} else {
if MSBits < 64 {
t.Errorf("Expected string %q at %s but got nothing", e, desc)
} else {
t.Errorf("Expected string %q at %s but got nothing even at first hop", e, desc)
}
}
}
func assertResult(v uint32, ok bool, e uint32, desc string, t *testing.T) {
if ok {
if v != e {
t.Errorf("Expected value %d at %s but got %d", e, desc, v)
}
} else {
t.Errorf("Expected value %d at %s but got nothing", e, desc)
}
}
|
#!/usr/bin/env bash
################################################################################
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
################################################################################
set -o pipefail
if [[ -z $FLINK_DIR ]]; then
echo "FLINK_DIR needs to point to a Flink distribution directory"
exit 1
fi
export PASS=1
echo "Flink dist directory: $FLINK_DIR"
TEST_ROOT=`pwd`
TEST_INFRA_DIR="$0"
TEST_INFRA_DIR=`dirname "$TEST_INFRA_DIR"`
cd $TEST_INFRA_DIR
TEST_INFRA_DIR=`pwd`
cd $TEST_ROOT
# used to randomize created directories
export TEST_DATA_DIR=$TEST_INFRA_DIR/temp-test-directory-$(date +%S%N)
echo "TEST_DATA_DIR: $TEST_DATA_DIR"
function backup_config() {
# back up the masters and flink-conf.yaml
cp $FLINK_DIR/conf/masters $FLINK_DIR/conf/masters.bak
cp $FLINK_DIR/conf/flink-conf.yaml $FLINK_DIR/conf/flink-conf.yaml.bak
}
function revert_default_config() {
# revert our modifications to the masters file
if [ -f $FLINK_DIR/conf/masters.bak ]; then
mv -f $FLINK_DIR/conf/masters.bak $FLINK_DIR/conf/masters
fi
# revert our modifications to the Flink conf yaml
if [ -f $FLINK_DIR/conf/flink-conf.yaml.bak ]; then
mv -f $FLINK_DIR/conf/flink-conf.yaml.bak $FLINK_DIR/conf/flink-conf.yaml
fi
}
function set_conf() {
CONF_NAME=$1
VAL=$2
echo "$CONF_NAME: $VAL" >> $FLINK_DIR/conf/flink-conf.yaml
}
function change_conf() {
CONF_NAME=$1
OLD_VAL=$2
NEW_VAL=$3
sed -i -e "s/${CONF_NAME}: ${OLD_VAL}/${CONF_NAME}: ${NEW_VAL}/" ${FLINK_DIR}/conf/flink-conf.yaml
}
function create_ha_config() {
backup_config
# clean up the dir that will be used for zookeeper storage
# (see high-availability.zookeeper.storageDir below)
if [ -e $TEST_DATA_DIR/recovery ]; then
echo "File ${TEST_DATA_DIR}/recovery exists. Deleting it..."
rm -rf $TEST_DATA_DIR/recovery
fi
# create the masters file (only one currently).
# This must have all the masters to be used in HA.
echo "localhost:8081" > ${FLINK_DIR}/conf/masters
# then move on to create the flink-conf.yaml
sed 's/^ //g' > ${FLINK_DIR}/conf/flink-conf.yaml << EOL
#==============================================================================
# Common
#==============================================================================
jobmanager.rpc.address: localhost
jobmanager.rpc.port: 6123
jobmanager.heap.mb: 1024
taskmanager.heap.mb: 1024
taskmanager.numberOfTaskSlots: 4
#==============================================================================
# High Availability
#==============================================================================
high-availability: zookeeper
high-availability.zookeeper.storageDir: file://${TEST_DATA_DIR}/recovery/
high-availability.zookeeper.quorum: localhost:2181
high-availability.zookeeper.path.root: /flink
high-availability.cluster-id: /test_cluster_one
#==============================================================================
# Web Frontend
#==============================================================================
rest.port: 8081
EOL
}
function start_ha_cluster {
create_ha_config
start_local_zk
start_cluster
}
function start_local_zk {
# Parses the zoo.cfg and starts locally zk.
# This is almost the same code as the
# /bin/start-zookeeper-quorum.sh without the SSH part and only running for localhost.
while read server ; do
server=$(echo -e "${server}" | sed -e 's/^[[:space:]]*//' -e 's/[[:space:]]*$//') # trim
# match server.id=address[:port[:port]]
if [[ $server =~ ^server\.([0-9]+)[[:space:]]*\=[[:space:]]*([^: \#]+) ]]; then
id=${BASH_REMATCH[1]}
address=${BASH_REMATCH[2]}
if [ "${address}" != "localhost" ]; then
echo "[ERROR] Parse error. Only available for localhost."
PASS=""
exit 1
fi
${FLINK_DIR}/bin/zookeeper.sh start $id
else
echo "[WARN] Parse error. Skipping config entry '$server'."
fi
done < <(grep "^server\." "${FLINK_DIR}/conf/zoo.cfg")
}
function start_cluster {
"$FLINK_DIR"/bin/start-cluster.sh
# wait at most 10 seconds until the dispatcher is up
for i in {1..10}; do
# without the || true this would exit our script if the JobManager is not yet up
QUERY_RESULT=$(curl "http://localhost:8081/taskmanagers" 2> /dev/null || true)
# ensure the taskmanagers field is there at all and is not empty
if [[ ${QUERY_RESULT} =~ \{\"taskmanagers\":\[.+\]\} ]]; then
echo "Dispatcher REST endpoint is up."
break
fi
echo "Waiting for dispatcher REST endpoint to come up..."
sleep 1
done
}
function stop_cluster {
"$FLINK_DIR"/bin/stop-cluster.sh
# stop zookeeper only if there are processes running
if ! [ "`jps | grep 'FlinkZooKeeperQuorumPeer' | wc -l`" = "0" ]; then
"$FLINK_DIR"/bin/zookeeper.sh stop
fi
if grep -rv "GroupCoordinatorNotAvailableException" $FLINK_DIR/log \
| grep -v "RetriableCommitFailedException" \
| grep -v "NoAvailableBrokersException" \
| grep -v "Async Kafka commit failed" \
| grep -v "DisconnectException" \
| grep -v "AskTimeoutException" \
| grep -v "WARN akka.remote.transport.netty.NettyTransport" \
| grep -v "WARN org.apache.flink.shaded.akka.org.jboss.netty.channel.DefaultChannelPipeline" \
| grep -v "jvm-exit-on-fatal-error" \
| grep -v '^INFO:.*AWSErrorCode=\[400 Bad Request\].*ServiceEndpoint=\[https://.*\.s3\.amazonaws\.com\].*RequestType=\[HeadBucketRequest\]' \
| grep -v "RejectedExecutionException" \
| grep -v "An exception was thrown by an exception handler" \
| grep -v "java.lang.NoClassDefFoundError: org/apache/hadoop/yarn/exceptions/YarnException" \
| grep -v "java.lang.NoClassDefFoundError: org/apache/hadoop/conf/Configuration" \
| grep -iq "error"; then
echo "Found error in log files:"
cat $FLINK_DIR/log/*
PASS=""
fi
if grep -rv "GroupCoordinatorNotAvailableException" $FLINK_DIR/log \
| grep -v "RetriableCommitFailedException" \
| grep -v "NoAvailableBrokersException" \
| grep -v "Async Kafka commit failed" \
| grep -v "DisconnectException" \
| grep -v "AskTimeoutException" \
| grep -v "WARN akka.remote.transport.netty.NettyTransport" \
| grep -v "WARN org.apache.flink.shaded.akka.org.jboss.netty.channel.DefaultChannelPipeline" \
| grep -v '^INFO:.*AWSErrorCode=\[400 Bad Request\].*ServiceEndpoint=\[https://.*\.s3\.amazonaws\.com\].*RequestType=\[HeadBucketRequest\]' \
| grep -v "RejectedExecutionException" \
| grep -v "An exception was thrown by an exception handler" \
| grep -v "Caused by: java.lang.ClassNotFoundException: org.apache.hadoop.yarn.exceptions.YarnException" \
| grep -v "Caused by: java.lang.ClassNotFoundException: org.apache.hadoop.conf.Configuration" \
| grep -v "java.lang.NoClassDefFoundError: org/apache/hadoop/yarn/exceptions/YarnException" \
| grep -v "java.lang.NoClassDefFoundError: org/apache/hadoop/conf/Configuration" \
| grep -v "java.lang.Exception: Execution was suspended" \
| grep -v "Caused by: java.lang.Exception: JobManager is shutting down" \
| grep -iq "exception"; then
echo "Found exception in log files:"
cat $FLINK_DIR/log/*
PASS=""
fi
if grep -ri "." $FLINK_DIR/log/*.out > /dev/null; then
echo "Found non-empty .out files:"
cat $FLINK_DIR/log/*.out
PASS=""
fi
}
function wait_job_running {
for i in {1..10}; do
JOB_LIST_RESULT=$("$FLINK_DIR"/bin/flink list | grep "$1")
if [[ "$JOB_LIST_RESULT" == "" ]]; then
echo "Job ($1) is not yet running."
else
echo "Job ($1) is running."
break
fi
sleep 1
done
}
function wait_job_terminal_state {
local job=$1
local terminal_state=$2
echo "Waiting for job ($job) to reach terminal state $terminal_state ..."
while : ; do
N=$(grep -o "Job $job reached globally terminal state $terminal_state" $FLINK_DIR/log/*standalonesession*.log | tail -1)
if [[ -z $N ]]; then
sleep 1
else
break
fi
done
}
function take_savepoint {
"$FLINK_DIR"/bin/flink savepoint $1 $2
}
function cancel_job {
"$FLINK_DIR"/bin/flink cancel $1
}
function check_result_hash {
local name=$1
local outfile_prefix=$2
local expected=$3
local actual
if [ "`command -v md5`" != "" ]; then
actual=$(LC_ALL=C sort $outfile_prefix* | md5 -q)
elif [ "`command -v md5sum`" != "" ]; then
actual=$(LC_ALL=C sort $outfile_prefix* | md5sum | awk '{print $1}')
else
echo "Neither 'md5' nor 'md5sum' binary available."
exit 2
fi
if [[ "$actual" != "$expected" ]]
then
echo "FAIL $name: Output hash mismatch. Got $actual, expected $expected."
PASS=""
echo "head hexdump of actual:"
head $outfile_prefix* | hexdump -c
else
echo "pass $name"
# Output files are left behind in /tmp
fi
}
function check_all_pass {
if [[ ! "$PASS" ]]
then
echo "One or more tests FAILED."
exit 1
fi
echo "All tests PASS"
}
function s3_put {
local_file=$1
bucket=$2
s3_file=$3
resource="/${bucket}/${s3_file}"
contentType="application/octet-stream"
dateValue=`date -R`
stringToSign="PUT\n\n${contentType}\n${dateValue}\n${resource}"
s3Key=$ARTIFACTS_AWS_ACCESS_KEY
s3Secret=$ARTIFACTS_AWS_SECRET_KEY
signature=`echo -en ${stringToSign} | openssl sha1 -hmac ${s3Secret} -binary | base64`
curl -X PUT -T "${local_file}" \
-H "Host: ${bucket}.s3.amazonaws.com" \
-H "Date: ${dateValue}" \
-H "Content-Type: ${contentType}" \
-H "Authorization: AWS ${s3Key}:${signature}" \
https://${bucket}.s3.amazonaws.com/${s3_file}
}
function s3_delete {
bucket=$1
s3_file=$2
resource="/${bucket}/${s3_file}"
contentType="application/octet-stream"
dateValue=`date -R`
stringToSign="DELETE\n\n${contentType}\n${dateValue}\n${resource}"
s3Key=$ARTIFACTS_AWS_ACCESS_KEY
s3Secret=$ARTIFACTS_AWS_SECRET_KEY
signature=`echo -en ${stringToSign} | openssl sha1 -hmac ${s3Secret} -binary | base64`
curl -X DELETE \
-H "Host: ${bucket}.s3.amazonaws.com" \
-H "Date: ${dateValue}" \
-H "Content-Type: ${contentType}" \
-H "Authorization: AWS ${s3Key}:${signature}" \
https://${bucket}.s3.amazonaws.com/${s3_file}
}
# This function starts the given number of task managers and monitors their processes. If a task manager process goes
# away a replacement is started.
function tm_watchdog {
local expectedTm=$1
while true;
do
runningTm=`jps | grep -Eo 'TaskManagerRunner|TaskManager' | wc -l`;
count=$((expectedTm-runningTm))
for (( c=0; c<count; c++ ))
do
$FLINK_DIR/bin/taskmanager.sh start > /dev/null
done
sleep 5;
done
}
# Kills all job manager.
function jm_kill_all {
kill_all 'StandaloneSessionClusterEntrypoint'
}
# Kills all task manager.
function tm_kill_all {
kill_all 'TaskManagerRunner|TaskManager'
}
# Kills all processes that match the given name.
function kill_all {
local pid=`jps | grep -E "${1}" | cut -d " " -f 1`
kill ${pid} 2> /dev/null
wait ${pid} 2> /dev/null
}
function kill_random_taskmanager {
KILL_TM=$(jps | grep "TaskManager" | sort -R | head -n 1 | awk '{print $1}')
kill -9 "$KILL_TM"
echo "TaskManager $KILL_TM killed."
}
function setup_flink_slf4j_metric_reporter() {
INTERVAL="${1:-1 SECONDS}"
cp $FLINK_DIR/opt/flink-metrics-slf4j-*.jar $FLINK_DIR/lib/
set_conf "metrics.reporter.slf4j.class" "org.apache.flink.metrics.slf4j.Slf4jReporter"
set_conf "metrics.reporter.slf4j.interval" "${INTERVAL}"
}
function rollback_flink_slf4j_metric_reporter() {
rm $FLINK_DIR/lib/flink-metrics-slf4j-*.jar
}
function get_metric_processed_records {
OPERATOR=$1
N=$(grep ".General purpose test job.$OPERATOR.numRecordsIn:" $FLINK_DIR/log/*taskexecutor*.log | sed 's/.* //g' | tail -1)
if [ -z $N ]; then
N=0
fi
echo $N
}
function get_num_metric_samples {
OPERATOR=$1
N=$(grep ".General purpose test job.$OPERATOR.numRecordsIn:" $FLINK_DIR/log/*taskexecutor*.log | wc -l)
if [ -z $N ]; then
N=0
fi
echo $N
}
function wait_oper_metric_num_in_records {
OPERATOR=$1
MAX_NUM_METRICS="${2:-200}"
NUM_METRICS=$(get_num_metric_samples ${OPERATOR})
OLD_NUM_METRICS=${3:-${NUM_METRICS}}
# monitor the numRecordsIn metric of the state machine operator in the second execution
# we let the test finish once the second restore execution has processed 200 records
while : ; do
NUM_METRICS=$(get_num_metric_samples ${OPERATOR})
NUM_RECORDS=$(get_metric_processed_records ${OPERATOR})
# only account for metrics that appeared in the second execution
if (( $OLD_NUM_METRICS >= $NUM_METRICS )) ; then
NUM_RECORDS=0
fi
if (( $NUM_RECORDS < $MAX_NUM_METRICS )); then
echo "Waiting for job to process up to 200 records, current progress: $NUM_RECORDS records ..."
sleep 1
else
break
fi
done
}
function wait_num_checkpoints {
JOB=$1
NUM_CHECKPOINTS=$2
echo "Waiting for job ($JOB) to have at least $NUM_CHECKPOINTS completed checkpoints ..."
while : ; do
N=$(grep -o "Completed checkpoint [1-9]* for job $JOB" $FLINK_DIR/log/*standalonesession*.log | awk '{print $3}' | tail -1)
if [ -z $N ]; then
N=0
fi
if (( N < NUM_CHECKPOINTS )); then
sleep 1
else
break
fi
done
}
# Starts the timer. Note that nested timers are not supported.
function start_timer {
SECONDS=0
}
# prints the number of minutes and seconds that have elapsed since the last call to start_timer
function end_timer {
duration=$SECONDS
echo "$(($duration / 60)) minutes and $(($duration % 60)) seconds elapsed."
}
#######################################
# Prints the given description, runs the given test and prints how long the execution took.
# Arguments:
# $1: description of the test
# $2: command to execute
#######################################
function run_test {
description="$1"
command="$2"
printf "\n==============================================================================\n"
printf "Running ${description}\n"
printf "==============================================================================\n"
start_timer
${command}
exit_code="$?"
end_timer
return "${exit_code}"
}
# Shuts down the cluster and cleans up all temporary folders and files. Make sure to clean up even in case of failures.
function cleanup {
stop_cluster
tm_kill_all
jm_kill_all
rm -rf $TEST_DATA_DIR 2> /dev/null
revert_default_config
check_all_pass
rm -rf $FLINK_DIR/log/* 2> /dev/null
}
trap cleanup EXIT
|
<filename>samples/keyvault/keyvault-examples.ts
import { DefaultAzureCredential } from "@azure/identity";
import {
KeyVaultManagementClient,
VaultAccessPolicyParameters,
VaultCreateOrUpdateParameters,
VaultPatchParameters,
} from "@azure/arm-keyvault";
const subscriptionId = process.env.subscriptionId;
const credential = new DefaultAzureCredential();
const resourceGroup = "myjstest";
const tenantId = "72f988bf-86f1-41af-91ab-2d7cd011db47";
const vaultName = "myvaultzzzz";
let client: KeyVaultManagementClient;
//--keyvaultExamples--
//vaults.beginCreateOrUpdateAndWait
async function vaults_beginCreateOrUpdateAndWait() {
const parameter: VaultCreateOrUpdateParameters = {
location: "eastus",
properties: {
tenantId: tenantId,
sku: {
family: "A",
name: "standard",
},
accessPolicies: [
{
tenantId: tenantId,
objectId: "00000000-0000-0000-0000-000000000000",
permissions: {
keys: [
"encrypt",
"decrypt",
"wrapKey",
"unwrapKey",
"sign",
"verify",
"get",
"list",
"create",
"update",
"import",
"delete",
"backup",
"restore",
"recover",
"purge",
],
secrets: [
"get",
"list",
"set",
"delete",
"backup",
"restore",
"recover",
"purge",
],
certificates: [
"get",
"list",
"delete",
"create",
"import",
"update",
"managecontacts",
"getissuers",
"listissuers",
"setissuers",
"deleteissuers",
"manageissuers",
"recover",
"purge",
],
},
},
],
enabledForDeployment: true,
enabledForDiskEncryption: true,
enabledForTemplateDeployment: true,
},
};
await client.vaults
.beginCreateOrUpdateAndWait(resourceGroup, vaultName, parameter)
.then((res) => {
console.log(res);
});
}
//vaults.updateAccessPolicy
async function vaults_updateAccessPolicy() {
const parameter: VaultAccessPolicyParameters = {
properties: {
accessPolicies: [
{
tenantId: tenantId,
objectId: "00000000-0000-0000-0000-000000000000",
permissions: {
keys: ["encrypt"],
secrets: ["get"],
certificates: ["get"],
},
},
],
},
};
await client.vaults
.updateAccessPolicy(resourceGroup, vaultName, "add", parameter)
.then((res) => {
console.log(res);
});
}
//vaults.get
async function vaults_get() {
await client.vaults.get(resourceGroup, vaultName).then((res) => {
console.log(res);
});
}
//vaults.listByResourceGroup
async function vaults_listByResourceGroup() {
for await (const item of client.vaults.listByResourceGroup(resourceGroup, {
top: 1,
})) {
console.log(item);
}
}
//vaults.listDeleted
async function vaults_listDeleted() {
for await (const item of client.vaults.listDeleted()) {
console.log(item);
}
}
//vaults.listBySubscription
async function vaults_listBySubscription() {
for await (const item of client.vaults.listBySubscription({ top: 1 })) {
console.log(item);
}
}
//vaults.update
async function vaults_update() {
const parameter: VaultPatchParameters = {
properties: {
tenantId: tenantId,
sku: {
family: "A",
name: "standard",
},
accessPolicies: [
{
tenantId: tenantId,
objectId: "00000000-0000-0000-0000-000000000000",
permissions: {
keys: [
"encrypt",
"decrypt",
"wrapKey",
"unwrapKey",
"sign",
"verify",
"get",
"list",
"create",
"update",
"import",
"delete",
"backup",
"restore",
"recover",
"purge",
],
secrets: [
"get",
"list",
"set",
"delete",
"backup",
"restore",
"recover",
"purge",
],
certificates: [
"get",
"list",
"delete",
"create",
"import",
"update",
"managecontacts",
"getissuers",
"listissuers",
"setissuers",
"deleteissuers",
"manageissuers",
"recover",
"purge",
],
},
},
],
enabledForDeployment: true,
enabledForDiskEncryption: true,
enabledForTemplateDeployment: true,
},
};
await client.vaults
.update(resourceGroup, vaultName, parameter)
.then((res) => {
console.log(res);
});
}
//vaults.checkNameAvailability
async function vaults_checkNameAvailability() {
await client.vaults
.checkNameAvailability({
name: "sample-vault",
type: "Microsoft.KeyVault/vaults",
})
.then((res) => {
console.log(res);
});
}
//vaults.delete
async function vaults_delete() {
await client.vaults.delete(resourceGroup, vaultName).then((res) => {
console.log(res);
});
}
//vaults.getDeleted
async function vaults_getDeleted() {
await client.vaults.getDeleted(vaultName, "eastus").then((res) => {
console.log(res);
});
}
//vaults.beginPurgeDeletedAndWait
async function vaults_beginPurgeDeletedAndWait() {
await client.vaults
.beginPurgeDeletedAndWait(vaultName, "eastus")
.then((res) => {
console.log(res);
});
}
//operations.list
async function operations_list() {
for await (const item of client.operations.list()) {
console.log(item);
}
}
async function main() {
client = new KeyVaultManagementClient(credential, subscriptionId);
await vaults_beginCreateOrUpdateAndWait();
}
main();
|
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.mobilestartup.controllers
import javax.inject.{Inject, Named, Singleton}
import play.api.mvc._
import uk.gov.hmrc.auth.core._
import uk.gov.hmrc.auth.core.retrieve.v2.Retrievals
import uk.gov.hmrc.http.{HeaderCarrier, HttpException}
import uk.gov.hmrc.mobilestartup.model.types.ModelTypes.JourneyId
import uk.gov.hmrc.mobilestartup.services.StartupService
import uk.gov.hmrc.play.bootstrap.backend.controller.BackendBaseController
import scala.concurrent.{ExecutionContext, Future}
@Singleton()
class LiveStartupController @Inject() (
service: StartupService[Future],
val controllerComponents: ControllerComponents,
override val authConnector: AuthConnector,
@Named("controllers.confidenceLevel") val confLevel: Int
)(implicit ec: ExecutionContext)
extends BackendBaseController
with AuthorisedFunctions {
class GrantAccessException(message: String) extends HttpException(message, 401)
def startup(journeyId: JourneyId): Action[AnyContent] =
Action.async { implicit request =>
withNinoFromAuth { verifiedNino =>
service.startup(verifiedNino, journeyId).map(Ok(_))
}
}
/**
* Check that the user is authorized to at least a confidence level of 200 and retrieve the NINO associated
* with their account. Run the supplied function with that NINO.
*
* Various failure scenarios are translated to appropriate Play `Result` types.
*
* @param f - the function to be run with the NINO, if it is successfully retrieved from the auth data
*/
private def withNinoFromAuth(f: String => Future[Result])(implicit hc: HeaderCarrier): Future[Result] =
authConnector
.authorise(ConfidenceLevel.L200, Retrievals.nino)
.flatMap {
case Some(ninoFromAuth) => f(ninoFromAuth)
case None => Future.successful(Unauthorized("Authorization failure [user is not enrolled for NI]"))
}
.recover {
case e: NoActiveSession => Unauthorized(s"Authorisation failure [${e.reason}]")
case e: GrantAccessException => Unauthorized(s"Authorisation failure [${e.message}]")
case e: AuthorisationException => Forbidden(s"Authorisation failure [${e.reason}]")
}
}
|
def throttle_controller_step(self, cte, sample_time):
throttle = 0
brake = 0
if linear_velocity == 0 and current_velocity < 0.1:
throttle = 0
brake = 400
elif throttle < 0.1 and cte < 0:
throttle = 0
decel = max(cte, self.decel_limit)
brake = abs(decel) * self.vehicle_mass * self.wheel_radius
return throttle, brake |
<gh_stars>1-10
# -*- coding: utf-8 -*-
"""Batch generator definition."""
import cv2
import numpy as np
class ImageBatchGenerator(object):
"""Batch generator for training on general images."""
def __init__(self, input_files, batch_size, height, width, channel=3,
shuffle=False, flip_h=False):
assert batch_size > 0, batch_size
assert channel == 3 or channel == 1, channel
if channel == 3:
self._imread_flag = cv2.IMREAD_COLOR
else:
self._imread_flag = cv2.IMREAD_GRAYSCALE
self._input_files = input_files
self._batch_size = batch_size
self._height = height
self._width = width
self._shuffle = shuffle
self._flip_h = flip_h
for ifile in input_files:
image = cv2.imread(ifile, cv2.IMREAD_UNCHANGED)
assert isinstance(image, np.ndarray)
assert image.shape[:2] == (
height, width), (image.shape[:2], (height, width))
print('verify ' + ifile)
self._batch_generator = self.__get_batch_generator()
def __get_batch_generator(self):
batch = []
while True:
if self._shuffle:
file_index = np.random.permutation(self.n_samples)
else:
file_index = range(self.n_samples)
for idx in file_index:
image = cv2.imread(self._input_files[idx], self._imread_flag)
if self._flip_h:
if np.random.randint(2) == 0:
image = image[:, ::-1]
if image.ndim == 2:
image = image.reshape((1,) + image.shape)
else:
image = image.transpose((2, 0, 1))
image = image.astype(np.float32)
image = ((image / 255.) - 0.5) * 2.
batch.append(image)
if len(batch) == self._batch_size:
yield np.asarray(batch)
batch = []
@property
def n_samples(self):
return len(self._input_files)
def __next__(self):
self._batch = next(self._batch_generator)
return self._batch
def next(self):
return self.__next__()
|
package test;
import java.util.Calendar;
import java.util.Date;
import util.DateUtil;
import util.StringUtil;
public class Test {
public static final int SECONDS_PER_YEAR = 60 * 60 * 24 * 365;
public static final int SECONDS_PER_MONTH = 60 * 60 * 24 * 30;
public static void main(String[] args) {
Calendar c = Calendar.getInstance();
c.setTime(new Date(System.currentTimeMillis()));
c.add(Calendar.SECOND, 3*SECONDS_PER_MONTH);
String overTime = DateUtil.date2String(c.getTime(), "yyyy-MM-dd HH:mm:ss");
System.out.println(overTime);
//123123
int i = StringUtil.CompareString("000123", "001220");
System.out.println(i);
}
}
|
import React from 'react';
import { Switch, Route } from 'react-router-dom';
import ItemsContainer from './ItemsContainer';
import About from '../components/About'
import NotFound from '../components/NotFound'
import PrinciplesContainer from './PrinciplesContainer'
export default function MainContainer({language}) {
return (
<main className='main-container'>
<Switch>
<Route exact path="/" component={About} />
<Route path="/about" component={About} />
<Route path="/principles" component={(props) => <PrinciplesContainer language={language}/>} />
<Route path="/(resources|online-dialogues|response-tracker)/"
component={(props) => <ItemsContainer {...props} language={language}/>} />
<Route component={NotFound} />
</Switch>
</main>
)
}
|
class DataProcessor:
def __init__(self, path):
self.path = path
self.data = self.load_data()
def load_data(self):
with open(self.path) as f:
data = json.load(f)
return data
def process_data(self):
results = []
for d in self.data:
result = do_something(d)
results.append(result)
return results
def save_data(self, data):
with open(self.path, 'w') as f:
json.dump(data, f) |
<gh_stars>0
import * as mongoose from 'mongoose';
export const UserSchema=new mongoose.Schema({
name:String,
gender:String,
email:
{
type:String,
unique:true
},
phone:
{
type:Number,
unique:true
},
from:String,
To:String,
seatnumber:Number
})
|
<filename>vst3sdk/public.sdk/samples/vst/mda-vst3/source/mdaThruZeroController.cpp
/*
* mdaThruZeroController.cpp
* mda-vst3
*
* Created by <NAME> on 6/14/08.
*
* mda VST Plug-ins
*
* Copyright (c) 2008 <NAME>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
* The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*
*/
#include "mdaThruZeroController.h"
#include <cmath>
namespace Steinberg {
namespace Vst {
namespace mda {
#ifdef SMTG_MDA_VST2_COMPATIBILITY
//-----------------------------------------------------------------------------
FUID ThruZeroController::uid (0x5653456D, 0x64615A6D, 0x64612074, 0x6872757A);
#else
//-----------------------------------------------------------------------------
FUID ThruZeroController::uid (0x4A552BC9, 0x3F484476, 0x87608F28, 0xE34736DE);
#endif
//-----------------------------------------------------------------------------
ThruZeroController::ThruZeroController ()
{
}
//-----------------------------------------------------------------------------
ThruZeroController::~ThruZeroController ()
{
}
//-----------------------------------------------------------------------------
tresult PLUGIN_API ThruZeroController::initialize (FUnknown* context)
{
tresult res = BaseController::initialize (context);
if (res == kResultTrue)
{
ParamID pid = 0;
parameters.addParameter (USTRING("Rate"), USTRING("sec"), 0, 0.15, ParameterInfo::kCanAutomate, pid++);
parameters.addParameter (USTRING("Depth"), USTRING("ms"), 0, 0.6, ParameterInfo::kCanAutomate, pid++);
parameters.addParameter (new ScaledParameter (USTRING("Mix"), USTRING("%"), 0, 0.5, ParameterInfo::kCanAutomate, pid++, 0, 100, true));
parameters.addParameter (new ScaledParameter (USTRING("DepthMod"), USTRING("%"), 0, 0.15, ParameterInfo::kCanAutomate, pid++, -100, 100, true));
parameters.addParameter (new ScaledParameter (USTRING("Feedback"), USTRING("%"), 0, 0.15, ParameterInfo::kCanAutomate, pid++, 0, 100, true));
}
return res;
}
//-----------------------------------------------------------------------------
tresult PLUGIN_API ThruZeroController::terminate ()
{
return BaseController::terminate ();
}
//-----------------------------------------------------------------------------
tresult PLUGIN_API ThruZeroController::getParamStringByValue (ParamID tag, ParamValue valueNormalized, String128 string)
{
UString128 result;
switch (tag)
{
case 0:
{
if (valueNormalized < 0.01)
result.fromAscii("-");
else
result.printFloat (pow (10.0f, (float)(2.0f - 3.0f * valueNormalized)));
break;
}
case 1:
{
float dep = 2000.0f * valueNormalized * valueNormalized;
result.printFloat (1000.f * dep / getSampleRate ());
break;
}
default:
return BaseController::getParamStringByValue (tag, valueNormalized, string);
}
result.copyTo (string, 128);
return kResultTrue;
}
//-----------------------------------------------------------------------------
tresult PLUGIN_API ThruZeroController::getParamValueByString (ParamID tag, TChar* string, ParamValue& valueNormalized)
{
return BaseController::getParamValueByString (tag, string, valueNormalized);
/*
switch (tag)
{
default:
return BaseController::getParamValueByString (tag, string, valueNormalized);
}
return kResultFalse;*/
}
}}} // namespaces
|
/*
* Copyright 2019 Wultra s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.getlime.security.powerauth.lib.dataadapter.model.request;
import io.getlime.security.powerauth.lib.dataadapter.model.enumeration.AfsAction;
import io.getlime.security.powerauth.lib.dataadapter.model.enumeration.AfsAuthInstrument;
import io.getlime.security.powerauth.lib.dataadapter.model.enumeration.AfsType;
import io.getlime.security.powerauth.lib.dataadapter.model.enumeration.OperationTerminationReason;
import io.getlime.security.powerauth.lib.nextstep.model.enumeration.AuthStepResult;
import java.util.ArrayList;
import java.util.List;
/**
* Request parameters for AFS action.
*
* @author <NAME>, <EMAIL>
*/
public class AfsRequestParameters {
/**
* AFS product type.
*/
private AfsType afsType;
/**
* AFS action.
*/
private AfsAction afsAction;
/**
* Client IP address.
*/
private String clientIpAddress;
/**
* Index counter for this authentication step.
*/
private int stepIndex;
/**
* Username filled in by the user. This value is used for user identification before user is authenticated.
*/
private String username;
/**
* Authentication instruments used during this authentication step.
*/
private final List<AfsAuthInstrument> authInstruments = new ArrayList<>();
/**
* Authentication step result.
*/
private AuthStepResult authStepResult;
/**
* Reason why operation was terminated.
*/
private OperationTerminationReason operationTerminationReason;
/**
* Default constructor.
*/
public AfsRequestParameters() {
}
/**
* Constructor with all details.
* @param afsType AFS product type.
* @param afsAction AFS action.
* @param clientIpAddress Client IP address.
* @param stepIndex Index counter for this authentication step.
* @param username Username filled in by the user, which is used before user is authenticated.
* @param authInstruments Authentication instruments used during this authentication step.
* @param authStepResult Authentication step result.
* @param operationTerminationReason Reason why operation was terminated.
*/
public AfsRequestParameters(AfsType afsType, AfsAction afsAction, String clientIpAddress, int stepIndex, String username, List<AfsAuthInstrument> authInstruments, AuthStepResult authStepResult, OperationTerminationReason operationTerminationReason) {
this.afsType = afsType;
this.afsAction = afsAction;
this.clientIpAddress = clientIpAddress;
this.stepIndex = stepIndex;
this.username = username;
this.authInstruments.addAll(authInstruments);
this.authStepResult = authStepResult;
this.operationTerminationReason = operationTerminationReason;
}
/**
* Get the AFS product type.
* @return AFS product type.
*/
public AfsType getAfsType() {
return afsType;
}
/**
* Set the AFS product type.
* @param afsType AFS product type.
*/
public void setAfsType(AfsType afsType) {
this.afsType = afsType;
}
/**
* Get the AFS action.
* @return AFS action.
*/
public AfsAction getAfsAction() {
return afsAction;
}
/**
* Set the AFS action.
* @param afsAction AFS action.
*/
public void setAfsAction(AfsAction afsAction) {
this.afsAction = afsAction;
}
/**
* Get client IP address.
* @return Client IP address.
*/
public String getClientIpAddress() {
return clientIpAddress;
}
/**
* Set client IP address.
* @param clientIpAddress IP address.
*/
public void setClientIpAddress(String clientIpAddress) {
this.clientIpAddress = clientIpAddress;
}
/**
* Get index counter for this authentication step.
* @return Index counter for this authentication step.
*/
public int getStepIndex() {
return stepIndex;
}
/**
* Set index counter for this authentication step.
* @param stepIndex Index counter for this authentication step.
*/
public void setStepIndex(int stepIndex) {
this.stepIndex = stepIndex;
}
/**
* Get username filled in by the user.
* @return Username filled in by the user.
*/
public String getUsername() {
return username;
}
/**
* Set username filled in by the user.
* @param username Username filled in by the user.
*/
public void setUsername(String username) {
this.username = username;
}
/**
* Get authentication authentication instruments used during this step.
* @return Authentication authentication instruments used during this step.
*/
public List<AfsAuthInstrument> getAuthInstruments() {
return authInstruments;
}
/**
* Get authentication step result.
* @return Authentication step result.
*/
public AuthStepResult getAuthStepResult() {
return authStepResult;
}
/**
* Set authentication step result.
* @param authStepResult Authentication step result.
*/
public void setAuthStepResult(AuthStepResult authStepResult) {
this.authStepResult = authStepResult;
}
/**
* Get reason why operation was terminated, use null for active operations.
* @return Reason why operation was terminated.
*/
public OperationTerminationReason getOperationTerminationReason() {
return operationTerminationReason;
}
/**
* Set reason why operation was terminated, use null for active operations.
* @param operationTerminationReason Reason why operation was terminated.
*/
public void setOperationTerminationReason(OperationTerminationReason operationTerminationReason) {
this.operationTerminationReason = operationTerminationReason;
}
}
|
<filename>src/project/enums/LoginResult.java
package project.enums;
public enum LoginResult {
SUCCESS,
FAILED_BY_CREDENTIALS,
FAILED_BY_NETWORK,
FAILED_BY_UNEXPECTED_ERROR;
}
|
<reponame>risq/emaproject-mobile
/**
* Created by jerek0 on 02/04/2015.
*/
let $ = require('jquery');
let uiManager = require('./UIManager');
function init() {
// fix scroll header
$('header .bg2').height($(window).height() + 60);
$('.dimension .content .dimensionLauncher').on('click', uiManager.goToDimension);
window.scrollTo(0, 0);
}
module.exports = {
init: init
}; |
<gh_stars>1-10
const { ServiceBroker } = require("moleculer");
const Swagger = require("../src/swagger.mixin");
describe("Create a Swagger service without settings", () => {
const broker = new ServiceBroker({
transporter: "Fake",
nodeID: "node-1",
logger: false,
});
const SwaggerService = {
name: "swagger",
mixins: [Swagger()],
};
broker.createService(SwaggerService);
beforeAll(() => broker.start());
afterAll(() => broker.stop());
test("Mixin should register an openapi action", () => {
const localService = broker.getLocalService("swagger");
expect(localService.fullName).toEqual("swagger");
expect(localService.actions.openapi).not.toBeUndefined();
});
test("Mixin should register an openapi action", async () => {
const response = await broker.call("swagger.openapi");
expect(response).toMatchInlineSnapshot(`
Object {
"components": Object {
"responses": Object {
"NotFound": Object {
"description": "The specified resource was not found",
},
"ServiceUnaviable": Object {
"description": "Service Unaviable",
},
"Success": Object {
"content": Object {
"application/json": Object {
"schema": Object {
"type": "string",
},
},
"text/html": Object {
"schema": Object {
"type": "string",
},
},
},
"description": "Successful operation",
},
"Unauthorized": Object {
"description": "Unauthorized",
},
},
},
"info": Object {
"contact": Object {
"email": "<EMAIL>",
},
"description": "A short description of the API. CommonMark syntax MAY be used for rich text representation.",
"license": Object {
"name": "Apache 2.0",
"url": "http://www.apache.org/licenses/LICENSE-2.0.html",
},
"termsOfService": "http://swagger.io/terms/",
"title": "The title of the API.",
"version": "3.0.3",
},
"openapi": "3.0.3",
"paths": Object {},
}
`);
});
describe("Create service-1 and service-2 with decorated actions", () => {
beforeAll(async () => {
await broker.createService({
name: "service-1",
actions: {
list: {
rest: "GET /",
handler() {},
},
},
});
await broker.createService({
name: "service-2",
actions: {
list: {
rest: "GET /",
handler() {},
},
get: {
rest: "GET /:id",
params: { id: "string" },
handler() {},
},
complex: {
rest: "GET /complex/:id/case/:type",
params: {
id: "string",
type: "number",
filter: { type: "array", items: "string" },
},
handler() {},
},
},
});
await broker.start();
});
afterAll(() => broker.stop());
test("Mixin should register an openapi action", async () => {
const response = await broker.call("swagger.openapi");
expect(response).toMatchSnapshot();
});
test("Mixin should register an openapi action", async () => {
const response = await broker.call("swagger.openapi");
expect(response).toMatchSnapshot();
});
});
});
|
#!/bin/bash
dieharder -d 15 -g 22 -S 538031746
|
import java.util.concurrent.locks.Condition;
import java.util.concurrent.locks.ReentrantLock;
import java.util.Queue;
import java.util.LinkedList;
class Producer implements Runnable {
// ... (same as in the problem description)
public void run() {
while (true) {
consumer_lock.lock();
try {
while (buffer.size() == 10) {
bufferNotFull.await();
}
int item = (int) (Math.random() * 100);
buffer.add(item);
System.out.println("Produced: " + item);
bufferNotEmpty.signalAll(); // Notify consumers that the buffer is not empty
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
} finally {
if (consumer_lock.isHeldByCurrentThread())
consumer_lock.unlock();
}
}
}
}
class Consumer implements Runnable {
// ... (same as in the problem description)
public void run() {
while (true) {
consumer_lock.lock();
try {
while (buffer.isEmpty()) {
bufferNotEmpty.await();
}
int item = buffer.poll();
System.out.println("Consumed: " + item);
bufferNotFull.signalAll(); // Notify producers that the buffer is not full
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
} finally {
if (consumer_lock.isHeldByCurrentThread())
consumer_lock.unlock();
}
}
}
}
public class Main {
// ... (same as in the problem description)
} |
#!/bin/bash
BLACK="\033[0;30m"
RED="\033[0;31m"
GREEN="\033[0;32m"
YELLOW="\033[0;33m"
BLUE="\033[0;34m"
MAGENTA="\033[0;35m"
CYAN="\033[0;36m"
DEFAULT="\033[0;37m"
DARK_GRAY="\033[1;30m"
FG_RED="\033[1;31m"
FG_GREEN="\033[1;32m"
FG_YELLOW="\033[1;33m"
FG_BLUE="\033[1;34m"
FG_MAGENTA="\033[1;35m"
FG_CYAN="\033[1;36m"
FG_WHITE="\033[1;37m"
_success () {
echo -e "${FG_GREEN}✔ ${FG_WHITE}${1}${DEFAULT}"
}
_info () {
echo -e "${FG_CYAN}i ${FG_WHITE}${1}${DEFAULT}"
}
script_path="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
cd "${script_path}"
if [ -d node_modules ]; then
_success "Found node modules/"
else
_info "Installing node modules"
npm i
fi
mkdir -p conf
if [ -f conf/user_config.toml ]; then
_success "Found user_config.toml"
else
_info "Creating user_config.toml ..."
cp conf/template_config.toml conf/user_config.toml
_success "Created user_config.toml"
_info "Please make the necessary changes in conf/user_config.toml"
fi |
<gh_stars>1-10
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.refresh = void 0;
var refresh = {
"viewBox": "0 0 512 512",
"children": [{
"name": "path",
"attribs": {
"d": "M256,0C114.609,0,0,114.609,0,256s114.609,256,256,256s256-114.609,256-256S397.391,0,256,0z M256,472\r\n\tc-119.297,0-216-96.703-216-216S136.703,40,256,40s216,96.703,216,216S375.297,472,256,472z"
},
"children": []
}, {
"name": "path",
"attribs": {
"d": "M354.554,236.532c6.422,31.797-2.75,66.047-27.484,90.703c-39.234,39.203-102.907,39.203-142.172,0\r\n\tc-39.281-39.188-39.266-102.688,0-141.859c39.094-38.984,102.782-38.641,141.594-0.484l-13.828,14.391l63.609,14.469l-17.875-62.016\r\n\tl-12.891,13.375c-51.719-50.594-130.329-48.641-180.016,0.906c-49.984,49.875-49.984,130.703,0,180.562\r\n\tc50,49.891,131.016,49.891,181,0c28.25-28.125,40.5-66.125,36.891-102.828L354.554,236.532z"
},
"children": []
}]
};
exports.refresh = refresh; |
#!/bin/bash
# Copyright 2018 Google LLC
#
# Use of this source code is governed by an MIT-style
# license that can be found in the LICENSE file or at
# https://opensource.org/licenses/MIT.
# This script tests that a build with multiple versions of the Mundane crate in
# the same build graph works properly. It performs the following steps:
# - Create a temporary directory
# - Create two copies of Mundane - mundane-v1, and mundane-v2 - which directly
# expose the boringssl::ffi module so that dependent crates can access the raw
# symbols
# - Create two crates, one depending on mundane-v1, and one on mundane-v2, each
# of which exposes all of the BoringSSL symbols from Mundane
# - Create a top-level program which depends on both of these crates
# - Have the top-level program's main link all of the Mundane functions from
# each of the crates
# - Produce a release build, which forces linking, to make sure that linking
# these two versions of the library at the same time works properly
set -e
# the directory this script lives in
readonly SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
CRATE_ROOT="${SCRIPT_DIR}/.."
TMP="$(mktemp -d)"
trap "rm -rf $TMP" EXIT
cd "$TMP"
# NOTE: The -L means to follow symlinks
cp -LR "$CRATE_ROOT" mundane-v1
cp -LR "$CRATE_ROOT" mundane-v2
echo "$TMP"
#
# Make Mundane crates
#
# Update the Cargo.toml versions and names in place to be distinct
sed -i'' -e 's/^name =.*/name = "mundane-v1"/' mundane-v1/Cargo.toml
sed -i'' -e 's/^version =.*/version = "1.0.0"/' mundane-v1/Cargo.toml
sed -i'' -e 's/^name =.*/name = "mundane-v2"/' mundane-v2/Cargo.toml
sed -i'' -e 's/^version =.*/version = "2.0.0"/' mundane-v2/Cargo.toml
# Update the link directive to use the right version number
sed -i'' -e 's/#[link(name = "crypto_[0-9]*_[0-9]*_[0-9]*")]/#[link(name = "crypto_1_0_0")]/' mundane-v1/boringssl/boringssl.rs
sed -i'' -e 's/#[link(name = "crypto_[0-9]*_[0-9]*_[0-9]*")]/#[link(name = "crypto_2_0_0")]/' mundane-v2/boringssl/boringssl.rs
# Update the link_name directives to use the right version number
sed -i'' -e 's/__RUST_MUNDANE_[0-9]*_[0-9]*_[0-9]*_/__RUST_MUNDANE_1_0_0_/' mundane-v1/boringssl/boringssl.rs
sed -i'' -e 's/__RUST_MUNDANE_[0-9]*_[0-9]*_[0-9]*_/__RUST_MUNDANE_2_0_0_/' mundane-v2/boringssl/boringssl.rs
# Mark the ffi module as public
sed -i'' -e 's/^mod ffi;$/pub mod ffi;/' mundane-v1/src/boringssl/mod.rs
sed -i'' -e 's/^mod ffi;$/pub mod ffi;/' mundane-v2/src/boringssl/mod.rs
# Make Mundane directly expose the ffi module
echo "pub use boringssl::ffi;" >> mundane-v1/src/lib.rs
echo "pub use boringssl::ffi;" >> mundane-v2/src/lib.rs
#
# Make crates which depend on Mundane
#
# Usage: make_crate <crate name> <dep name>
function make_crate {
CRATE_NAME="$1"
DEP_NAME="$2"
DEP_NAME_RS="$(echo ${DEP_NAME} | tr - _)"
mkdir "$CRATE_NAME"
mkdir "${CRATE_NAME}/src"
# Re-export all symbols from Mundane
cat >> "${CRATE_NAME}/src/lib.rs" <<EOF
extern crate ${DEP_NAME_RS};
pub use ${DEP_NAME_RS}::ffi::*;
EOF
cat >> "${CRATE_NAME}/Cargo.toml" <<EOF
[package]
name = "${CRATE_NAME}"
version = "0.0.0"
[dependencies]
${DEP_NAME} = { path = "../${DEP_NAME}" }
EOF
}
make_crate depends-mundane-v1 mundane-v1
make_crate depends-mundane-v2 mundane-v2
#
# Make top-level crate
#
cat >> Cargo.toml <<EOF
[package]
name = "mundane-version-test"
version = "0.0.0"
[dependencies]
depends-mundane-v1 = { path = "./depends-mundane-v1" }
depends-mundane-v2 = { path = "./depends-mundane-v2" }
EOF
mkdir src
cat >> src/main.rs <<EOF
extern crate depends_mundane_v1;
extern crate depends_mundane_v2;
fn main() {
EOF
# Populate the body of main() with lines of the form:
# println!("{:?}", depends_mundane_v1::SYMBOL as *const ());
# println!("{:?}", depends_mundane_v2::SYMBOL as *const ());
#
# Find the functions to use by scraping boringssl.rs.
#
# TODO(joshlf): Are there other types of symbols we want to include (such as
# static variables)?
rg -U 'extern "C" \{\n[^\n]*\n pub fn [0-9A-Za-z_]*([^)]*)' "${SCRIPT_DIR}/boringssl.rs" | \
grep '^ *pub fn' | sed -e 's/.*pub fn \([^(]*\).*/println!("{:?}", depends_mundane_v1::\1 as *const ());/' >> src/main.rs
rg -U 'extern "C" \{\n[^\n]*\n pub fn [0-9A-Za-z_]*([^)]*)' "${SCRIPT_DIR}/boringssl.rs" | \
grep '^ *pub fn' | sed -e 's/.*pub fn \([^(]*\).*/println!("{:?}", depends_mundane_v2::\1 as *const ());/' >> src/main.rs
echo '}' >> src/main.rs
cargo build --release
|
def factorial(n):
# Base case: if n is 1 or 0, return 1
if n <= 1:
return 1
# Recursive case: calculate the factorial of n-1 and multiply it by n
return n * factorial(n-1) |
public class Fruit {
private String name;
private int quantity;
public Fruit(String name, int quantity) {
this.name = name;
this.quantity = quantity;
}
public String getName() {
return this.name;
}
public void setName(String name) {
this.name = name;
}
public int getQuantity() {
return this.quantity;
}
public void setQuantity(int quantity) {
this.quantity = quantity;
}
} |
class RouteItem:
def __init__(self, namespace, host, path, openapi_ui):
self.namespace = namespace
self.host = host
self.path = path
self.openapi_ui = openapi_ui
class RouteManager:
routes = {}
@classmethod
def load_kube_config(cls):
# Implement the logic to load Kubernetes configuration settings
pass
@classmethod
def add_route(cls, uid, namespace, host, path, openapi_ui):
cls.routes[uid] = RouteItem(namespace, host, path, openapi_ui)
@classmethod
def update_route(cls, uid, namespace, host, path, openapi_ui):
if uid in cls.routes:
cls.routes[uid] = RouteItem(namespace, host, path, openapi_ui)
else:
raise ValueError(f"Route with uid {uid} does not exist")
@classmethod
def get_route(cls, uid):
if uid in cls.routes:
return cls.routes[uid]
else:
raise ValueError(f"Route with uid {uid} does not exist") |
<filename>bitly/kgs/spec/services/hash_service/finder_spec.rb
# frozen_string_literal: true
require_relative '../spec_helper'
describe HashService::Finder do
end |
#!/bin/sh
#
# Vivado(TM)
# runme.sh: a Vivado-generated Runs Script for UNIX
# Copyright 1986-2020 Xilinx, Inc. All Rights Reserved.
#
echo "This script was generated under a different operating system."
echo "Please update the PATH and LD_LIBRARY_PATH variables below, before executing this script"
exit
if [ -z "$PATH" ]; then
PATH=C:/Xilinx/Vitis/2020.2/bin;C:/Xilinx/Vivado/2020.2/ids_lite/ISE/bin/nt64;C:/Xilinx/Vivado/2020.2/ids_lite/ISE/lib/nt64:C:/Xilinx/Vivado/2020.2/bin
else
PATH=C:/Xilinx/Vitis/2020.2/bin;C:/Xilinx/Vivado/2020.2/ids_lite/ISE/bin/nt64;C:/Xilinx/Vivado/2020.2/ids_lite/ISE/lib/nt64:C:/Xilinx/Vivado/2020.2/bin:$PATH
fi
export PATH
if [ -z "$LD_LIBRARY_PATH" ]; then
LD_LIBRARY_PATH=
else
LD_LIBRARY_PATH=:$LD_LIBRARY_PATH
fi
export LD_LIBRARY_PATH
HD_PWD='C:/Users/danie/OneDrive/Documentos/CR/PRATICAL/CustomHardwareDisplays/CustomHardwareDisplays.runs/synth_1'
cd "$HD_PWD"
HD_LOG=runme.log
/bin/touch $HD_LOG
ISEStep="./ISEWrap.sh"
EAStep()
{
$ISEStep $HD_LOG "$@" >> $HD_LOG 2>&1
if [ $? -ne 0 ]
then
exit
fi
}
EAStep vivado -log mb_design_wrapper.vds -m64 -product Vivado -mode batch -messageDb vivado.pb -notrace -source mb_design_wrapper.tcl
|
def remove_duplicates(ls):
result = []
for e in ls:
if e not in result:
result.append(e)
return list(set(result)) |
<reponame>bytecodeio/migration_tool<gh_stars>0
// /* tslint:disable */
// /*
// * The MIT License (MIT)
// *
// * Copyright (c) 2020 Looker Data Sciences, Inc.
// *
// * Permission is hereby granted, free of charge, to any person obtaining a copy
// * of this software and associated documentation files (the "Software"), to deal
// * in the Software without restriction, including without limitation the rights
// * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// * copies of the Software, and to permit persons to whom the Software is
// * furnished to do so, subject to the following conditions:
// *
// * The above copyright notice and this permission notice shall be included in
// * all copies or substantial portions of the Software.
// *
// * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// * THE SOFTWARE.
// */
// import React, { useContext, useEffect, useState } from "react"
// import {
// ActionList,
// ActionListItemAction,
// ActionListColumns,
// ActionListItem,
// ActionListItemColumn,
// Banner,
// Box,
// Button,
// ButtonOutline,
// FieldText,
// Form,
// Prompt,
// Text,
// } from "@looker/components"
// import { FetchProxyDemoProps } from "./types"
// import {
// ExtensionContext,
// ExtensionContextData
// } from "@looker/extension-sdk-react"
// import {
// updateName,
// updatePosts,
// updateTitle,
// updateErrorMessage,
// updatePostsServer,
// } from '../../data/DataReducer'
// import { extractMessageFromError } from '../../../../utils'
// /**
// * Demonstration of Looker extension SDK external API use, fetchProxy
// *
// * A note on state. This component is rendered in a tab panel and such
// * can get unloaded while an asynchronous operation is in progress. Rather
// * than attempt to update state in this component after the component is
// * unmounted and get a nasty message in the console, state is held in the
// * parent component. Thus if the component is unloaded, no messages appear
// * in the console. The added advantage is that data will be ready to
// * display should the component be remounted.
// *
// * A note on data. A simple json server is provided. This server must be
// * started in order for this demo to work.
// */
// export const FetchProxyDemo: React.FC<FetchProxyDemoProps> = ({ dataDispatch, dataState }) => {
// const extensionContext = useContext<ExtensionContextData>(ExtensionContext)
// // Get access to the extension SDK and the looker API SDK.
// const { extensionSDK, core40SDK } = extensionContext
// // Component state
// // const { posts, name, title, errorMessage, postsServer } = dataState
// // postsServer isn't used anymore.
// const [fetching,changeFetching] = useState(true)
// useEffect(() => {
// onCreatePostSubmit()
// }, [fetching])
// // const { name, title, errorMessage, url, key, secret } = dataState
// const name = ''
// const title = ''
// const postsServer = ''
// const errorMessage = ''
// const posts : Array<any>= []
// const subdomain = 'looker'
// const domain = 'bytecode.io'
// const api_port = '19999'
// const api_version = '4.0'
// const client_id = 'X'
// const client_secret = 'X'
// // https://{{subdomain}}.{{domain}}:{{api_port}}/api/{{api_version}}/login?client_id={{client_id}}&client_secret={{client_secret}}
// const onCreatePostSubmit = async () => {
// // Need to prevent default processing for event from occurring.
// // The button is rendered in a form and default action is to
// // submit the form.
// // event.preventDefault()
// var body = `{client_id:${encodeURIComponent(client_id)}&client_secret:${encodeURIComponent(client_secret)}}`
// try {
// // A more complex use of the fetch proxy. In this case the
// // content type must be included in the headers as the json server
// // will not process it otherwise.
// // Note the that JSON object in the string MUST be converted to
// // a string.
// let response = await extensionSDK.serverProxy(
// // `https://${subdomain}.${domain}:${api_port}/api/${api_version}/login?client_id=${client_id}&client_secret=${client_secret}`,
// // `https://${subdomain}.${domain}:${api_port}/api/${api_version}/login`,
// `https://${subdomain}.${domain}/api/${api_version}/login`,
// method: 'POST',
// headers: {
// "Content-Type": 'application/x-www-form-urlencoded;charset=UTF-8',
// },
// body: body
// })
// if (response.ok) {
// console.dir(response)
// updateTitle(dataDispatch, "")
// updateErrorMessage(dataDispatch, undefined)
// fetchPosts()
// } else {
// console.error("Failed to create post", response)
// updateErrorMessage(dataDispatch, "Failed to create post")
// }
// } catch(error) {
// console.error("An unexpected error occured", error)
// updateErrorMessage(dataDispatch, `An unexpected error occured: ${extractMessageFromError(error)}`)
// }
// }
// const onPostDelete = async (post: any) => {
// // Slightly more complex use of the fetch method. In this case
// // the DELETE method is used.
// try {
// let response: any = await extensionSDK.fetchProxy(
// `${postsServer}/posts/${post.id}`,
// {
// method: 'DELETE',
// })
// if (response.ok) {
// updateTitle(dataDispatch, "")
// updateErrorMessage(dataDispatch, undefined)
// fetchPosts()
// } else {
// console.error("Failed to delete post", response)
// updateErrorMessage(dataDispatch, "Failed to delete post")
// }
// }
// catch(error) {
// console.error("An unexpected error occured:", error)
// updateErrorMessage(dataDispatch, `An unexpected error occured: ${extractMessageFromError(error)}`)
// }
// }
// const fetchPosts = async(firstTime = false) => {
// try {
// // Use the extension SDK external API fetch method. A simple GET call.
// // Note the response body is determined from the fetch response. The
// // fetch call can take a third argument that indicates what type of
// // response is expected.
// const response = await extensionSDK.fetchProxy(`${postsServer}/posts`)
// if (response.ok) {
// updatePosts(dataDispatch, response.body.reverse())
// updateErrorMessage(dataDispatch, undefined)
// } else {
// updateErrorMessage(dataDispatch, "Has the data server been started? yarn start start-data-server")
// }
// } catch(error) {
// const errorMessage = extractMessageFromError(error)
// if (errorMessage.startsWith("Extension not entitled to access external ")) {
// updateErrorMessage(dataDispatch, errorMessage)
// } else if (firstTime && errorMessage.startsWith("Required Looker version ")) {
// updateErrorMessage(dataDispatch, "This version of Looker does not support external API functions")
// } else if (firstTime && errorMessage.startsWith("Entitlements must be defined")) {
// updateErrorMessage(dataDispatch, "Entitlements must be defined to use external API functionality")
// } else if (firstTime) {
// updateErrorMessage(dataDispatch, "Has the data server been started? yarn start start-data-server")
// } else {
// updateErrorMessage(dataDispatch, `An unexpected error occured: ${errorMessage}`)
// }
// }
// }
// const onTitleChange = (e: any) => {
// updateTitle(dataDispatch, e.currentTarget.value)
// }
// const onDismiss = () => {
// updateErrorMessage(dataDispatch, undefined)
// }
// const onChangeServerClick = (value: string) => {
// // Allow server to be changed to facilitate integration tests.
// // Integration do not have access to 127.0.0.1 so server can be
// // changed during the test.
// try {
// new URL(value)
// updatePosts(dataDispatch, [])
// updatePostsServer(dataDispatch, value.endsWith('/') ? value.substring(0, value.length - 1) : value)
// }
// catch(error) {
// updateErrorMessage(dataDispatch, 'Invalid URL')
// }
// }
// const postsColumns = [
// {
// id: 'id',
// primaryKey: true,
// title: 'ID',
// type: 'number',
// widthPercent: 10,
// },
// {
// id: 'title',
// title: 'Title',
// type: 'string',
// widthPercent: 60,
// },
// {
// id: 'author',
// title: 'Author',
// type: 'string',
// widthPercent: 30,
// },
// ] as ActionListColumns
// const postsItems = posts.map((post: any) => {
// const actions = (
// <>
// <ActionListItemAction onClick={onPostDelete.bind(null, post)}>
// Delete
// </ActionListItemAction>
// </>
// )
// const { id, title, author } = post
// return (
// <ActionListItem key={id} id={id} actions={actions}>
// <ActionListItemColumn>{id}</ActionListItemColumn>
// <ActionListItemColumn>{title}</ActionListItemColumn>
// <ActionListItemColumn>{author}</ActionListItemColumn>
// </ActionListItem>
// )
// })
// return (
// <>
// {errorMessage &&
// <Banner intent="error" onDismiss={onDismiss} canDismiss>
// {errorMessage}
// </Banner>
// }
// <Box display="flex" flexDirection="row" justifyContent="space-between" mb="medium" alignItems="baseline">
// <Text>Posts data is being served from {postsServer}</Text>
// <Prompt
// title="Change server"
// inputLabel='Server'
// defaultValue={postsServer}
// onSave={onChangeServerClick}
// >
// {(open) => <ButtonOutline onClick={open}>Change server</ButtonOutline>}
// </Prompt>
// </Box>
// <Box mb="medium" px="xlarge" pt="small" border="1px solid" borderColor="palette.charcoal200" borderRadius="4px">
// <Form onSubmit={onCreatePostSubmit}>
// <FieldText label="Title" name="title" value={title} onChange={onTitleChange} required />
// <FieldText label="Author" name="author" value={name} readOnly />
// <Button disabled={title.length === 0}>Create Post</Button>
// </Form>
// </Box>
// <ActionList columns={postsColumns}>{postsItems}</ActionList>
// </>
// )
// }
|
(function () {
angular
.module('dashboardModule',[])
.controller('dashboardCtrl',dashboardCtrl);
function dashboardCtrl($rootScope) {
console.log('we are at dashboard');
}
})();
|
def fibonacci(n):
''' Calculates fibonacci sequence for n number of terms. '''
if n <= 1:
return n
else:
return fibonacci(n-1) + fibonacci(n-2) |
<reponame>francisuloko/todo-list
/**
* @jest-environment jsdom
*/
import MockStorage from '../src/__mocks__/local-storage.js';
import MockDOM from '../src/__mocks__/DOM.js';
describe('Update task completion', () => {
const temp = [
{
description: 'Sample 1',
completed: false,
index: 0,
},
{
description: 'Sample 2',
completed: false,
index: 1,
},
{
description: 'Sample 3',
completed: false,
index: 2,
},
];
const change = new Event('change');
let element = '';
const mockDOM = new MockDOM();
const mockStorage = new MockStorage();
mockStorage.setList(temp);
const list = mockStorage.getList();
mockDOM.displayTask(list);
element = document.querySelector('.checkbox');
if (element.dispatchEvent(change)) {
list[0].completed = true;
mockStorage.setList(list);
mockDOM.displayTask(mockStorage.list);
}
test('Task completion is updated successfully', () => {
expect(list[0].completed).toBeTruthy();
});
test('Expect completed not to be false', () => {
expect(list[0].completed).not.toBe(false);
});
test('Task completion is false', () => {
expect(list[1].completed).toBeFalsy();
});
}); |
#!/bin/sh
function header(){
echo `date +[%F:%T]`
}
function message(){
echo "`header` $1"
}
datestring=`date +%F_%T`
basedir=/home/guest/Documents/Eclipse
tar cf /home/guest/Documents/Backups/backup$datestring.tar.gz $basedir --exclude="$basedir/.metadata" --exclude="$basedir/Testing"
message "Backup successful, filename: backup$datestring.tar.gz"
#Find oldest file
while [ `ls -l /home/guest/Documents/Backups/ | wc -l` -gt 10 ]; do
oldestFile=`find /home/guest/Documents/Backups/ -type f -printf '%T+ %p\n' | sort | head -n 1 | awk -F ' ' '{print $2}'`
rm $oldestFile
message "The oldest file was: $oldestFile"
done
|
#!/bin/bash
# Generates configurations for 1 node.
# We can't pack several nodes without having `--home` flag working
# so reducing nodes count to 1 for now and creating follow up ticket.
set -euox pipefail
# sed in macos requires extra argument
if [[ "$OSTYPE" == "linux-gnu"* ]]; then
sed_extension=''
elif [[ "$OSTYPE" == "darwin"* ]]; then
sed_extension='.orig'
fi
CHAIN_ID="cheqd"
echo "##### [Node 0] Generating key"
cheqd-noded init node0 --chain-id $CHAIN_ID
NODE_0_ID=$(cheqd-noded tendermint show-node-id)
NODE_0_VAL_PUBKEY=$(cheqd-noded tendermint show-validator)
echo "##### [Node 0] Setting fee"
sed -i $sed_extension 's/minimum-gas-prices = ""/minimum-gas-prices = "25ncheq"/g' "$HOME/.cheqdnode/config/app.toml"
echo "##### [Validator operator] Generating key"
cheqd-noded keys add alice --keyring-backend test
echo "##### [Validator operator] Initializing genesis"
GENESIS="$HOME/.cheqdnode/config/genesis.json"
sed -i $sed_extension 's/"stake"/"ncheq"/' $GENESIS
echo "##### [Validator operator] Creating genesis account"
cheqd-noded add-genesis-account alice 20000000000000000ncheq --keyring-backend test
echo "##### Adding test accounts to the genesis"
BASE_ACCOUNT_1="cheqd1rnr5jrt4exl0samwj0yegv99jeskl0hsxmcz96"
# Mnemonic: sketch mountain erode window enact net enrich smoke claim kangaroo another visual write meat latin bacon pulp similar forum guilt father state erase bright
cat <<< "$(jq '.app_state.bank.balances += [{"address": "'${BASE_ACCOUNT_1}'", "coins": [{"denom": "ncheq", "amount": "100001000000000000"}] }]' "$GENESIS")" > "$GENESIS"
cat <<< "$(jq '.app_state.auth.accounts += [{"@type": "/cosmos.auth.v1beta1.BaseAccount","address": "'${BASE_ACCOUNT_1}'", "pub_key": null,"account_number": "0","sequence": "0"}]' "$GENESIS")" > "$GENESIS"
BASE_ACCOUNT_2="cheqd1l9sq0se0jd3vklyrrtjchx4ua47awug5vsyeeh"
# Mnemonic: ugly dirt sorry girl prepare argue door man that manual glow scout bomb pigeon matter library transfer flower clown cat miss pluck drama dizzy
cat <<< "$(jq '.app_state.bank.balances += [{"address": "'${BASE_ACCOUNT_2}'", "coins": [{"denom": "ncheq", "amount": "100001000000000000"}] }]' "$GENESIS")" > $GENESIS
cat <<< "$(jq '.app_state.auth.accounts += [{"@type": "/cosmos.auth.v1beta1.BaseAccount","address": "'${BASE_ACCOUNT_2}'", "pub_key": null,"account_number": "0","sequence": "0"}]' "$GENESIS")" > "$GENESIS"
BASE_VESTING_ACCOUNT="cheqd1lkqddnapqvz2hujx2trpj7xj6c9hmuq7uhl0md"
# Mnemonic: coach index fence broken very cricket someone casino dial truth fitness stay habit such three jump exotic spawn planet fragile walk enact angry great
BASE_VESTING_COIN="{\"denom\":\"ncheq\",\"amount\":\"10001000000000000\"}"
cat <<< "$(jq '.app_state.bank.balances += [{"address": "'${BASE_VESTING_ACCOUNT}'", "coins": [{"denom": "ncheq", "amount": "5000000000000000"}] }]' "$GENESIS")" > "$GENESIS"
cat <<< "$(jq '.app_state.auth.accounts += [{"@type": "/cosmos.vesting.v1beta1.BaseVestingAccount", "base_account": {"address": "'${BASE_VESTING_ACCOUNT}'","pub_key": null,"account_number": "0","sequence": "0"}, "original_vesting": ['${BASE_VESTING_COIN}'], "delegated_free": [], "delegated_vesting": [], "end_time": "1630362459"}]' "$GENESIS")" > "$GENESIS"
CONTINOUS_VESTING_ACCOUNT="cheqd1353p46macvn444rupg2jstmx3tmz657yt9gl4l"
# Mnemonic: phone worry flame safe panther dirt picture pepper purchase tiny search theme issue genre orange merit stove spoil surface color garment mind chuckle image
cat <<< "$(jq '.app_state.bank.balances += [{"address": "'${CONTINOUS_VESTING_ACCOUNT}'", "coins": [{"denom": "ncheq", "amount": "5000000000000000"}] }]' "$GENESIS")" > "$GENESIS"
cat <<< "$(jq '.app_state.auth.accounts += [{"@type": "/cosmos.vesting.v1beta1.ContinuousVestingAccount", "base_vesting_account": { "base_account": {"address": "'${CONTINOUS_VESTING_ACCOUNT}'","pub_key": null,"account_number": "0","sequence": "0"}, "original_vesting": ['${BASE_VESTING_COIN}'], "delegated_free": [], "delegated_vesting": [], "end_time": "1630362459"}, "start_time": "1630352459"}]' "$GENESIS")" > "$GENESIS"
DELAYED_VESTING_ACCOUNT="cheqd1njwu33lek5jt4kzlmljkp366ny4qpqusahpyrj"
# Mnemonic: pilot text keen deal economy donkey use artist divide foster walk pink breeze proud dish brown icon shaft infant level labor lift will tomorrow
cat <<< "$(jq '.app_state.bank.balances += [{"address": "'${DELAYED_VESTING_ACCOUNT}'", "coins": [{"denom": "ncheq", "amount": "5000000000000000"}] }]' "$GENESIS")" > "$GENESIS"
cat <<< "$(jq '.app_state.auth.accounts += [{"@type": "/cosmos.vesting.v1beta1.DelayedVestingAccount", "base_vesting_account": { "base_account": {"address": "'${DELAYED_VESTING_ACCOUNT}'","pub_key": null,"account_number": "0","sequence": "0"}, "original_vesting": ['${BASE_VESTING_COIN}'], "delegated_free": [], "delegated_vesting": [], "end_time": "1630362459"}}]' "$GENESIS")" > "$GENESIS"
PERIODIC_VESTING_ACCOUNT="cheqd1uyngr0l3xtyj07js9sdew9mk50tqeq8lghhcfr"
# Mnemonic: want merge flame plate trouble moral submit wing whale sick meat lonely yellow lens enable oyster slight health vast weird radar mesh grab olive
cat <<< "$(jq '.app_state.bank.balances += [{"address": "'${PERIODIC_VESTING_ACCOUNT}'", "coins": [{"denom": "ncheq", "amount": "5000000000000000"}] }]' "$GENESIS")" > "$GENESIS"
cat <<< "$(jq '.app_state.auth.accounts += [{"@type": "/cosmos.vesting.v1beta1.PeriodicVestingAccount", "base_vesting_account": { "base_account": {"address": "'${PERIODIC_VESTING_ACCOUNT}'","pub_key": null,"account_number": "0","sequence": "0"}, "original_vesting": ['${BASE_VESTING_COIN}'], "delegated_free": [], "delegated_vesting": [], "end_time": "1630362459"}, "start_time": "1630362439", "vesting_periods": [{"length": "20", "amount": ['${BASE_VESTING_COIN}']}]}]' "$GENESIS")" > "$GENESIS"
echo "##### [Validator operator] Creating genesis validator"
cheqd-noded gentx alice 1000000000000000ncheq --chain-id $CHAIN_ID --node-id "$NODE_0_ID" --pubkey "$NODE_0_VAL_PUBKEY" --keyring-backend test
echo "##### [Validator operator] Collect gentxs"
cheqd-noded collect-gentxs
cheqd-noded validate-genesis
|
#!/bin/bash
i=$1
if [ "$i" = "clean" ]
then
echo "Deleting all object files and binaries..."
rm *.o
rm *.elf
rm *.bin
rm *.*~
exit
fi
if [ "$i" = "" ]
then
echo "The source files have been built..."
echo -e "To copy files to sdcard \n Use: ./build.sh flash"
else
echo "Action requested: $i"
echo "Building all source files... and copying image into sdcard"
fi
arm-none-eabi-gcc -mcpu=arm1176jzf-s -fpic -ffreestanding -c boot.S -o boot.o
arm-none-eabi-gcc -mcpu=arm1176jzf-s -fpic -ffreestanding -std=gnu99 -c uart.c -o uart.o -O2 -Wall -Wextra
arm-none-eabi-gcc -mcpu=arm1176jzf-s -fpic -ffreestanding -std=gnu99 -c base64.c -o base64.o -O2 -Wall -Wextra
arm-none-eabi-gcc -mcpu=arm1176jzf-s -fpic -ffreestanding -std=gnu99 -c ascii_hex.c -o ascii_hex.o -O2 -Wall -Wextra
arm-none-eabi-gcc -T linker.ld -o myos.elf -ffreestanding -O2 -nostdlib boot.o uart.o base64.o ascii_hex.o
arm-none-eabi-objcopy myos.elf -O binary myos.bin
if [ "$i" = "flash" ]
then
if [ -e /media/b-ak/boot/ ]
then
echo "sdcard present"
else
echo -e "FIXME:\nsdcard not present or\npath of the sdcard/boot/ partition not correct"
exit
fi
cp myos.bin /media/b-ak/boot/kernel.img
ls -l /media/b-ak/boot/kernel.img
umount /media/b-ak/*
sync
fi
|
#!/bin/bash
find data -iname "*.png" -type f -delete
find data -iname "*.json" -type f -delete
|
<filename>src/components/Card/index.js<gh_stars>1-10
import styled from 'styled-components';
import { breakpoints, colors, sizes } from '../../styles/variables';
const Card = styled.div`
position: relative;
display: inline-flex;
align-items: center;
justify-content: center;
width: ${({ width }) => width || '100%'};
height: ${({ height }) => height || '175px'};
background-color: ${colors.LIGHTEST_GREY};
border-width: 1px;
border-style: solid;
border-color: ${colors.DARKEST_GREY_2};
box-sizing: border-box;
cursor: pointer;
padding: ${sizes.HALF};
margin: 0;
overflow: hidden;
border-radius: ${sizes.RADIUS};
z-index: 1;
object-fit: fill;
::before {
content: '';
position: absolute;
top: 0;
right: 0;
bottom: 0;
left: 0;
width: 100%;
height: 100%;
background-color: rgba(0, 0, 0, .3);
z-index: 1;
transition: background-color .2s ease-out;
@media screen and (min-width: ${breakpoints.SMALL}) {
background-color: rgba(0, 0, 0, .55);
}
}
:hover,
:focus {
::before {
background-color: rgba(0, 0, 0, .4);
}
}
.image-card,
.image-country,
.sr-only {
position: absolute;
}
.image-card {
top: 50%;
left: 50%;
width: calc(100% + 10px);
height: auto;
min-height: ${({ height }) => (height ? `calc(${height} + 10px)` : '180px')};
transform: translate3d(-50%, -50%, 0);
}
.image-country,
.image-logo {
z-index: 2;
}
.image-country {
top: ${sizes.HALF};
right: ${sizes.HALF};
width: ${sizes.DEFAULT};
height: ${sizes.DEFAULT};
}
.image-logo {
position: relative;
}
.sr-only {
width: 0;
height: 0;
overflow: hidden;
}
`;
export default Card;
|
<filename>src/com/horowitz/mickey/trainScanner/TrainManagementWindow.java
package com.horowitz.mickey.trainScanner;
import java.awt.BorderLayout;
import java.awt.Color;
import java.awt.Dimension;
import java.awt.Insets;
import java.awt.event.ActionEvent;
import java.awt.image.BufferedImage;
import java.io.File;
import java.io.FilenameFilter;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import javax.imageio.ImageIO;
import javax.swing.AbstractAction;
import javax.swing.BorderFactory;
import javax.swing.Box;
import javax.swing.ImageIcon;
import javax.swing.JButton;
import javax.swing.JCheckBox;
import javax.swing.JFrame;
import javax.swing.JLabel;
import javax.swing.JPanel;
import javax.swing.JScrollPane;
import javax.swing.JTextField;
import javax.swing.JToggleButton;
import javax.swing.JToolBar;
import javax.swing.event.ChangeEvent;
import javax.swing.event.ChangeListener;
import com.horowitz.mickey.DateUtils;
import com.horowitz.mickey.ImageManager;
import com.horowitz.mickey.JCanvas;
import com.horowitz.mickey.common.Scheduler;
import com.horowitz.mickey.data.DataStore;
public class TrainManagementWindow extends JFrame {
class TrainView {
public List<JToggleButton> _buttons;
JPanel _panel;
Train _train;
public JCheckBox _checkBox;
}
public String _fancyTime;
private JScrollPane _jScrollPane;
private Thread _scheduleThread;
private JLabel _timeLabel;
private long _timeLeft;
private JTextField _timeTF;
private List<Train> _trains;
private List<TrainView> _trainViews;
TrainScanner _tscanner;
private JCheckBox _locoOnly;
private int _numberTrains;
public TrainManagementWindow(List<Train> trains, TrainScanner tscanner) {
super();
_trains = trains;
_tscanner = tscanner;
_tscanner.setLocoOnly(true);
setDefaultCloseOperation(HIDE_ON_CLOSE);
setTitle("Int. Train Manager");
init();
setSize(740, 550);
setLocationRelativeTo(null);
}
private TrainView buildTrainView(Train t) {
_numberTrains++;
final TrainView trainView = new TrainView();
JPanel panel = new JPanel(new BorderLayout());
trainView._panel = panel;
trainView._train = t;
trainView._checkBox = new JCheckBox();
// train image
JCanvas trainCanvas = new JCanvas();
trainCanvas.setImage(t.getFullImage());
Box b = Box.createHorizontalBox();
b.add(trainCanvas);
b.add(trainView._checkBox);
b.add(Box.createHorizontalGlue());
panel.add(b, BorderLayout.NORTH);
// additional info image
JCanvas addInfoCanvas = new JCanvas();
if (!t.isIdle()) {
addInfoCanvas.setImage(t.getAdditionalInfoShort());
} else {
try {
BufferedImage read = ImageIO.read(ImageManager.getImageURL("int/empty.png"));
addInfoCanvas.setImage(read);
} catch (IOException e) {
}
}
panel.add(addInfoCanvas, BorderLayout.WEST);
// contractor buttons
Box box = Box.createHorizontalBox();
trainView._buttons = new ArrayList<>();
try {
List<String> activeContractorNames = new DataStore().getActiveContractorNames();
activeContractorNames.add("XP");
activeContractorNames.add("Special");
for (String cname : activeContractorNames) {
JToggleButton cbutton = createContractorButton(cname);
box.add(Box.createHorizontalStrut(2));
box.add(cbutton);
trainView._buttons.add(cbutton);
}
selectContractors(trainView);
} catch (IOException e) {
e.printStackTrace();
}
JPanel buttonsPanel = new JPanel(new BorderLayout());
buttonsPanel.add(new JScrollPane(box), BorderLayout.NORTH);
Box box2 = Box.createHorizontalBox();
box2.add(new JLabel(" " + _numberTrains + " "));
JButton clearButton = new JButton(new AbstractAction("Clear") {
@Override
public void actionPerformed(ActionEvent evt) {
clear(trainView);
}
});
box2.add(Box.createHorizontalStrut(10));
box2.add(clearButton);
JButton removeButton = new JButton(new AbstractAction("Remove this train") {
@Override
public void actionPerformed(ActionEvent evt) {
removeThisTrain(trainView);
}
});
box2.add(Box.createHorizontalStrut(10));
box2.add(removeButton);
JButton mergeButton = new JButton(new AbstractAction("Merge") {
@Override
public void actionPerformed(ActionEvent evt) {
mergeWithSelected(trainView);
}
});
box2.add(Box.createHorizontalStrut(10));
box2.add(mergeButton);
long time = trainView._train.getTimeToSendNext() - System.currentTimeMillis();
if (time > 0)
box2.add(new JLabel("Scheduled for " + DateUtils.fancyTime2(time)));
buttonsPanel.add(box2, BorderLayout.SOUTH);
panel.add(buttonsPanel); // center
return trainView;
}
protected void removeThisTrain(TrainView trainView) {
save();
// trainView._panel;
for (Train train : _trains) {
if (train.getFullImageFileName().equals(trainView._train.getFullImageFileName())) {
_trains.remove(train);
break;
}
}
updateView();
}
protected void mergeWithSelected(TrainView trainView) {
// trainView._panel;
for (TrainView tv : _trainViews) {
if (tv._checkBox.isSelected()) {
// tv._train vs trainView._train
trainView._train.mergeWith(tv._train);
removeThisTrain(tv);
break;
}
}
// updateView();
}
private void clear(TrainView tv) {
for (JToggleButton button : tv._buttons) {
button.setSelected(false);
}
}
private JToggleButton createContractorButton(String name) {
JToggleButton button = new JToggleButton();
button.setName(name);
// button.setText(name);
ImageIcon icon1 = ImageManager.getImage("int/" + name + "3.png");
button.setIcon(icon1);
button.setBorderPainted(false);
button.setContentAreaFilled(false);
// button.setPreferredSize(new Dimension(40+4, 54+4));
button.setMargin(new Insets(0, 0, 0, 0));
button.setBorder(BorderFactory.createLineBorder(new Color(224, 10, 2), 7));
// button.setPreferredSize(new Dimension(80, 80));
// button.setMinimumSize(new Dimension(70, 70));
button.addChangeListener(new ChangeListener() {
@Override
public void stateChanged(ChangeEvent e) {
JToggleButton b = (JToggleButton) e.getSource();
b.setBorderPainted(b.isSelected());
b.revalidate();
}
});
return button;
}
private void init() {
JPanel mainPanel = new JPanel();
mainPanel.setLayout(new BorderLayout());
initToolbar(mainPanel);
Box box = Box.createVerticalBox();
_jScrollPane = new JScrollPane(box);
mainPanel.add(_jScrollPane);
getContentPane().add(mainPanel);
reload();
}
private void initToolbar(JPanel mainPanel) {
JToolBar toolbar = new JToolBar();
toolbar.setFloatable(false);
{
JButton button = new JButton(new AbstractAction("Reload") {
@Override
public void actionPerformed(ActionEvent e) {
reload();
}
});
toolbar.add(button);
}
{
JButton button = new JButton(new AbstractAction("Put default to all") {
@Override
public void actionPerformed(ActionEvent e) {
putDefaultToAll();
}
});
toolbar.add(button);
}
{
JButton button = new JButton(new AbstractAction("Rescan ALL") {
@Override
public void actionPerformed(ActionEvent e) {
scan(true, false, _locoOnly.isSelected());
}
});
toolbar.add(button);
}
// {
// JButton button = new JButton(new AbstractAction("Rescan Idle") {
//
// @Override
// public void actionPerformed(ActionEvent e) {
// scan(false, true, _locoOnly.isSelected());
// }
// });
//
// toolbar.add(button);
// }
{
JButton button = new JButton(new AbstractAction("ADD Idle") {
@Override
public void actionPerformed(ActionEvent e) {
scan(false, false, _locoOnly.isSelected());
}
});
toolbar.add(button);
}
{
_locoOnly = new JCheckBox("only locos");
_locoOnly.setSelected(true);
toolbar.add(_locoOnly);
}
{
JButton button = new JButton(new AbstractAction("Remove all") {
@Override
public void actionPerformed(ActionEvent e) {
removeAllTrains();
}
});
toolbar.add(button);
}
{
JButton button = new JButton(new AbstractAction("Save") {
@Override
public void actionPerformed(ActionEvent e) {
save();
}
});
toolbar.add(button);
}
{
JButton button = new JButton(new AbstractAction("Send Now") {
@Override
public void actionPerformed(ActionEvent e) {
sendTrains();
}
});
toolbar.add(button);
}
{
JButton button = new JButton(new AbstractAction("Schedule") {
@Override
public void actionPerformed(ActionEvent e) {
schedule();
}
});
toolbar.add(button);
}
_timeTF = new JTextField(8);
_timeTF.setMaximumSize(new Dimension(50, 20));
_timeTF.setMinimumSize(new Dimension(50, 20));
_timeLabel = new JLabel(" no schedule at the moment");
toolbar.add(_timeTF);
toolbar.add(_timeLabel);
mainPanel.add(toolbar, BorderLayout.NORTH);
}
private boolean isRunning(String threadName) {
boolean isRunning = false;
Set<Thread> threadSet = Thread.getAllStackTraces().keySet();
for (Iterator<Thread> it = threadSet.iterator(); it.hasNext();) {
Thread thread = it.next();
if (thread.getName().equals(threadName)) {
isRunning = true;
break;
}
}
return isRunning;
}
public void reloadNow() {
_trains = new ArrayList<>();
try {
Train[] trains = new DataStore().readTrains();
if (trains != null)
for (Train train : trains) {
if (train.getFullImage() == null) {
BufferedImage image = ImageIO.read(new File(train.getFullImageFileName()));
train.setFullImage(image);
String additionalInfoShortFileName = train.getAdditionalInfoShortFileName();
if (additionalInfoShortFileName != null) {
image = ImageIO.read(new File(additionalInfoShortFileName));
train.setAdditionalInfoShort(image);
}
image = ImageIO.read(new File(train.getScanImageFileName()));
train.setScanImage(image);
}
_trains.add(train);
}
} catch (IOException e) {
//JOptionPane.showMessageDialog(null, "I/O Error!");
e.printStackTrace();
}
// TrainManagementWindow.this.setVisible(true);
updateView();
}
public void putDefaultNow() {
reloadNow();
if (_trains != null)
for (Train train : _trains) {
train.getContractors().add(_tscanner.getDefaultContractor());
}
save();
updateView();
}
public void reload() {
Thread t = new Thread(new Runnable() {
public void run() {
reloadNow();
}
});
t.start();
}
public void putDefaultToAll() {
Thread t = new Thread(new Runnable() {
public void run() {
putDefaultNow();
}
});
t.start();
}
private void removeAllTrains() {
if (_trains != null) {
_trains.clear();
save();
updateView();
}
}
private void save() {
if (_trains != null)
try {
updateTrainStatus();
new DataStore().writeTrains(_trains.toArray(new Train[0]));
deleteUnUsedImages();
// new Thread(new Runnable() {
// public void run() {
// }
// }).start();
} catch (IOException e) {
e.printStackTrace();
}
}
private void deleteUnUsedImages() {
final List<String> usedImages = new ArrayList<>();
for (Train t : _trains) {
if (t.getAdditionalInfoFileName() != null) {
usedImages.add(t.getAdditionalInfoFileName());
}
if (t.getAdditionalInfoShortFileName() != null) {
usedImages.add(t.getAdditionalInfoShortFileName());
}
if (t.getFullImageFileName() != null) {
usedImages.add(t.getFullImageFileName());
}
if (t.getScanImageFileName() != null) {
usedImages.add(t.getScanImageFileName());
}
}
File d = new File("data/int");
File[] listFiles = d.listFiles(new FilenameFilter() {
@Override
public boolean accept(File f, String fn) {
return !usedImages.contains("data/int/" + fn) && !fn.endsWith("json");
}
});
for (File file : listFiles) {
file.delete();
}
}
protected void scan(final boolean all, final boolean removeNotFound, boolean locoOnly) {
_tscanner.setLocoOnly(locoOnly);
Thread t = new Thread(new Runnable() {
public void run() {
TrainManagementWindow.this.setVisible(false);
reload();
List<Train> newTrains = _tscanner.analyzeIntTrains(all);
if (_trains != null) {
if (removeNotFound)
_tscanner.mergeTrains(_trains, newTrains);
else
_tscanner.addNewTrains(_trains, newTrains);
} else {
_trains = newTrains;
}
save();
reload();
TrainManagementWindow.this.setVisible(true);
}
});
t.start();
}
/**
* @deprecated
* @param time
*/
private void runScheduleThread(final long time) {
// long timeNext = time + System.currentTimeMillis();
// if (_trains != null) {
// for (Train train : _trains) {
// train.setTimeToSendNext(timeNext);
// }
// }
// save();
_scheduleThread = new Thread(new Runnable() {
public void run() {
while (true) {
try {
Thread.sleep(200);
} catch (InterruptedException e) {
System.err.println("Scheduler interrupted");
break;
}
_timeLeft = time - System.currentTimeMillis();
if (_timeLeft <= 0) {
// it's time
_timeLabel.setText("");
if (!isRunning("MAGIC"))
sendTrains();
break;
} else {
_fancyTime = DateUtils.fancyTime2(_timeLeft) + " left to send int. trains...";
_tscanner.LOGGER.info(">>> " + _fancyTime);
System.err.println(_fancyTime);
_timeLabel.setText(_fancyTime);
}
try {
Thread.sleep(19800);
} catch (InterruptedException e) {
System.err.println("Scheduler interrupted!");
break;
}
}
}
}, "SCHEDULER");
_scheduleThread.start();
}
private void selectContractors(TrainView tv) {
clear(tv);
List<String> contractors = tv._train.getContractors();
for (String cname : contractors) {
for (JToggleButton button : tv._buttons) {
if (cname.equals(button.getName())) {
button.setSelected(true);
break;
}
}
}
}
private void updateTrainStatus() {
for (TrainView tv : _trainViews) {
Train t = tv._train;
List<String> contractors = t.getContractors();
contractors.clear();
for (JToggleButton b : tv._buttons) {
if (b.isSelected()) {
contractors.add(b.getName());
}
}
}
}
public void sendTrains() {
if (!isRunning("SENDING")) {
Thread t = new Thread(new Runnable() {
public void run() {
sendTrainsNow();
}
}, "SENDING");
t.start();
}
}
public boolean sendTrainsNow() {
updateTrainStatus();
boolean sendTrains = _tscanner.sendTrains(_trains);
if (sendTrains)
save();
return sendTrains;
}
public boolean isTrainWaiting() {
long now = System.currentTimeMillis();
for (Train t : _trains) {
if (!t.getContractors().isEmpty() && t.getTimeToSendNext() - now <= 0) {
return true;
}
}
return false;
}
private void updateView() {
Box box = Box.createVerticalBox();
_trainViews = new ArrayList<>();
_numberTrains = 0;
if (_trains != null) {
for (Train t : _trains) {
TrainView tv = buildTrainView(t);
box.add(tv._panel);
box.add(Box.createVerticalStrut(5));
_trainViews.add(tv);
}
}
_jScrollPane.getViewport().setView(box);
_jScrollPane.getVerticalScrollBar().setUnitIncrement(20);
}
public long getTimeLeft() {
return _timeLeft;
}
public void setTimeLeft(long timeLeft) {
_timeLeft = timeLeft;
}
protected void schedule() {
long timeLeft = Scheduler.parse(_timeTF.getText());
reschedule(timeLeft);
}
public void reschedule(long time) {
setTimeLeft(time + System.currentTimeMillis());
_tscanner.LOGGER.info("Scheduling sending for " + DateUtils.fancyTime2(time));
}
} |
import tensorflow as tf
def generate_random_tensor(batch_size, feature_num):
return tf.random.normal(shape=[batch_size, feature_num + 1]) |
<filename>tfc_web/bikes/urls.py
from django.conf.urls import url
from bikes.views import current_bikes
urlpatterns = [
url(r'^current-bikes', current_bikes, name='current-bikes'),
]
|
def generate_sql(tableName, user_id, first_name, last_name, age, city):
query = f"""SELECT * FROM {tableName} WHERE 1=1"""
if user_id:
query += f""" and user_id = {user_id}"""
if first_name:
query += f""" and first_name = '{first_name}'"""
if last_name:
query += f""" and last_name = '{last_name}'"""
if age:
query += f""" and age = {age}"""
if city:
query += f""" and city = '{city}'"""
return query |
<reponame>prasanthbendra/ng-2
'use strict';
var helpers = require('../helpers');
var operators = ['+', '-', '/', '*', '%', '<', '>', '==', '!=', '<=', '>='];
/**
* Determine a relational operator based on the operator node
*
* @param {Object} node - The operator node
* @returns {string} Returns a relational operator
*/
var getRelationalOperator = function (node) {
if (node.content === '<') {
return '<=';
}
if (node.content === '>') {
return '>=';
}
return false;
};
/**
* Determine if operator is negative number
*
* @param {string} operator - The operator
* @param {Object} next - The next node
* @param {Object} previous - The previous node
* @param {Object} doublePrevious - The double previous node (back two)
* @returns {bool} true / false
*/
var isNegativeNumber = function (operator, next, previous, doublePrevious) {
if (operator === '-') {
// Catch the following:
// $foo: -20;
if (!previous && !next.is('space')) {
return true;
}
// Catch the following:
// .foo {
// property: -16px;
// }
if (next.is('dimension') || next.is('percentage')) {
return true;
}
// Catch the following:
// .foo {
// propery: 2 / -16;
// }
if (doublePrevious && doublePrevious.is('operator')) {
return true;
}
// Catch the following:
// .foo {
// property: 2 /-16px;
// }
if (previous && previous.is('operator')) {
return true;
}
}
return false;
};
/**
* Determine if operator is divider
*
* @param {string} operator - The operator
* @param {Object} next - The next node
* @param {Object} previous - The previous node
* @returns {bool} true / false
*/
var isDivider = function (operator, next, previous) {
if (operator === '/') {
// Catch the following:
// .foo {
// property: calc(100% / 2);
// }
if (previous && next) {
if (previous.is('dimension') && (next.is('dimension') || next.is('number'))) {
return true;
}
}
}
return false;
};
/**
* Determine if operator is part of unicode
*
* @param {string} operator - The operator
* @param {Object} previous - The previous node
* @returns {bool} true / false
*/
var isUnicode = function (operator, previous) {
if (operator === '+') {
// Catch the following:
// @font-face {
// unicode-range: U+26;
// }
if (previous.is('ident') && previous.content === 'U') {
return true;
}
}
return false;
};
/**
* Determine if operator is part of import path
*
* @param {string} operator - The operator
* @param {Object} parent - The parent node
* @returns {bool} true / false
*/
var isImport = function (operator, parent) {
if (operator === '/') {
if (parent.is('atrule') && parent.contains('atkeyword')) {
var keyword = parent.first('atkeyword');
if (keyword.contains('ident')) {
var ident = keyword.first('ident');
if (ident.content === 'import') {
return true;
}
}
}
}
return false;
};
/**
* Determine if operator is exception
*
* @param {string} operator - The operator
* @param {Object} parent - The parent node
* @param {Object} i - The node index
* @returns {bool} true / false
*/
var isException = function (operator, parent, i) {
var previous = parent.content[i - 1] || false,
doublePrevious = parent.content[i - 2] || false,
next = parent.content[i + 1] || false;
if (isNegativeNumber(operator, next, previous, doublePrevious)) {
return true;
}
if (isDivider(operator, next, previous)) {
return true;
}
if (isUnicode(operator, previous)) {
return true;
}
if (isImport(operator, parent)) {
return true;
}
return false;
};
/**
* Check the spacing around an operator
*
* @param {Object} node - The node to check the spacing around
* @param {int} i - The node's child index of it's parent
* @param {Object} parent - The parent node
* @param {Object} parser - The parser object
* @param {Object} result - The result object
* @returns {bool|null} false if exception
*/
var checkSpacing = function (node, i, parent, parser, result) {
if (node.is('operator') || node.is('unaryOperator')) {
var previous = parent.content[i - 1] || false,
next = parent.content[i + 1] || false,
operator = node.content;
//////////////////////////
// Multi-part operators
//////////////////////////
// If second part of relational operator move on
if (node.content === '=' && previous) {
if (previous.content === '<' || previous.content === '>') {
return false;
}
}
// If first part of relational operator, carry on and build it
if ((node.content === '<' || node.content === '>') && next) {
if (next.content === '=') {
operator = getRelationalOperator(node);
next = parent.content[i + 2] || false;
}
}
//////////////////////////
// Exceptions
//////////////////////////
if (isException(operator, parent, i)) {
return false;
}
// If the operator checks out in our valid operator list
if (operators.indexOf(operator) !== -1) {
if (parser.options.include) {
if (
(previous && !previous.is('space'))
|| (next && !next.is('space'))
) {
result = helpers.addUnique(result, {
'ruleId': parser.rule.name,
'line': node.start.line,
'column': node.start.column,
'message': 'Space expected around operator',
'severity': parser.severity
});
}
else {
if (
(previous && (previous.end.line >= previous.start.line) && (previous.end.column > previous.start.column))
|| (next && (next.end.line >= next.start.line) && (next.end.column > next.start.column))
) {
result = helpers.addUnique(result, {
'ruleId': parser.rule.name,
'line': node.start.line,
'column': node.start.column,
'message': 'Multiple spaces not allowed around operator',
'severity': parser.severity
});
}
}
}
else {
if (
(previous && previous.is('space'))
|| (next && next.is('space'))
) {
result = helpers.addUnique(result, {
'ruleId': parser.rule.name,
'line': node.start.line,
'column': node.start.column,
'message': 'No spaces allowed around operator',
'severity': parser.severity
});
}
}
}
}
return true;
};
module.exports = {
'name': 'space-around-operator',
'defaults': {
'include': true
},
'detect': function (ast, parser) {
var result = [];
ast.traverseByTypes(['condition', 'atrule', 'value'], function (node) {
node.forEach(function (item, i, parent) {
// Perform another loop of the children if we come across a parenthesis
// parent node
if (item.is('parentheses')) {
item.forEach(function (child, j, childParent) {
// Do the spacing checks
checkSpacing(child, j, childParent, parser, result);
});
}
else {
// Do the spacing checks
checkSpacing(item, i, parent, parser, result);
}
});
});
return result;
}
};
|
import java.io.IOException;
import java.nio.file.attribute.BasicFileAttributes;
public class CustomFile implements BasicFileAttributes {
private String fileName;
// other file attributes
public CustomFile(String fileName) {
this.fileName = fileName;
// initialize other file attributes
}
public String name() {
return fileName;
}
@Override
public FileTime lastModifiedTime() {
// implement lastModifiedTime() method
}
@Override
public FileTime lastAccessTime() {
// implement lastAccessTime() method
}
@Override
public FileTime creationTime() {
// implement creationTime() method
}
@Override
public boolean isRegularFile() {
// implement isRegularFile() method
}
@Override
public boolean isDirectory() {
// implement isDirectory() method
}
@Override
public boolean isSymbolicLink() {
// implement isSymbolicLink() method
}
@Override
public boolean isOther() {
// implement isOther() method
}
@Override
public long size() {
// implement size() method
}
@Override
public Object fileKey() {
// implement fileKey() method
}
public BasicFileAttributes readAttributes() throws IOException {
// handle any potential exceptions
return this;
}
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.