text stringlengths 1 1.05M |
|---|
#!/usr/bin/env bash
#
# Copyright 2021 Merck & Co., Inc. Kenilworth, NJ, USA.
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
set -e
cd /opt/app
db_host=${TABLES_DB:-db}
while ! nc -z -v -w30 "${db_host}" 5432; do
echo "Waiting for database connection..."
sleep 5
done
yarn run migrate
exec "$@"
|
#!/bin/bash
docker run --restart=always --net=host -p 88:80 -d -v /home/architekt/docker-configs/haproxy-config:/usr/local/etc/haproxy haproxy
|
#!/bin/sh
# Scan local network for connected devices
nmap -sP 192.168.1.0/24 | awk '/Nmap scan report for/{printf $5;}/MAC Address:/{print " => "$3;}' |
<reponame>futjikato/electron-docker-gui<filename>app/actions/image.js
// @flow
import type { imageStateType, actionType } from '../reducers/image';
import Image from './../struct/Image';
import { CHANNEL_IMAGES, IMAGES_DELETE_ID, IMAGES_LOAD, IMAGES_CREATE_FROM } from './../backend/constants';
const { ipcRenderer } = require('electron');
export const SET_IMAGE = 'SET_IMAGE';
export const ADD_IMAGE = 'ADD_IMAGE';
export const FINISH_IMAGE = 'FINISH_IMAGE';
export const IMAGE_STATUS = 'IMAGE_STATUS';
export const IMAGE_SUBMODULE_STATUS = 'IMAGE_SUBMODULE_STATUS';
export const DELETE_IMAGE = 'DELETE_IMAGE';
const LOAD_REQUEST_BREAK = 60000;
export function setImage(image: Array<Image> = []): actionType {
return {
type: SET_IMAGE,
image: image
};
}
export function setImageStatus(reference: string, status: string): actionType {
return {
type: IMAGE_STATUS,
ref: reference,
status: status
};
}
export function setImageSubmoduleStatus(
reference: string,
subModuleId: string,
status: string,
progress: ?{
current: ?number,
total: ?number
}
) {
let percentageProgress = 0;
if (progress && progress.total > 0) {
percentageProgress = progress.current / progress.total * 100;
}
return {
type: IMAGE_SUBMODULE_STATUS,
ref: reference,
submoduleRef: subModuleId,
status: status,
progress: percentageProgress
};
}
export function startImageLoad(ref: string): actionType {
return {
type: ADD_IMAGE,
ref: ref
};
}
export function finishImageLoad(ref: string, success: boolean, id: ?string) {
return (dispatch: () => void, getState: () => imageStateType) => {
if (success) {
dispatch({
type: FINISH_IMAGE,
ref: ref,
id: id
});
} else {
dispatch(deletedRef(ref));
}
}
}
export function loadImage() {
return (dispatch: () => void, getState: () => imageStateType) => {
const { image } = getState();
if (image.lastLoad > (Date.now() - LOAD_REQUEST_BREAK)) {
console.log('Just loaded images so skip this request', Date.now() - image.lastLoad, LOAD_REQUEST_BREAK);
return;
}
ipcRenderer.send(CHANNEL_IMAGES, {type: IMAGES_LOAD});
};
}
export function createImage(from: string, tag: string) {
return (dispatch: () => void, getState: () => imageStateType) => {
const { image } = getState();
if (image.list.find((img: Image) => {
return img.name === from+':'+tag;
})) {
console.log('image already found. dont build', from);
return;
}
const ref = from+':'+tag;
dispatch(startImageLoad(ref));
ipcRenderer.send(CHANNEL_IMAGES, {type: IMAGES_CREATE_FROM, ref: ref, from: from, tag: tag});
}
}
export function deleteImage(ref: string, id: string) {
return (dispatch: () => void, getState: () => imageStateType) => {
const { image } = getState();
let foundImage;
if (undefined === (foundImage = image.list.find((img: Image) => {
return img.id === id;
}))) {
console.log('image id not found. do not send delete request.', id);
return;
}
ipcRenderer.send(CHANNEL_IMAGES, {type: IMAGES_DELETE_ID, ref, id});
}
}
export function deletedRef(reference: string): actionType {
return {
type: DELETE_IMAGE,
ref: reference
};
} |
#!/usr/bin/env bash
aws cloudformation deploy \
--template-file ./packaged-template.yaml \
--stack-name S3UncompressLambdaStack \
--capabilities CAPABILITY_IAM \
--parameter-overrides DestinationBucket=$UNCOMPRESSOR_DESTINATION_BUCKET |
#!/bin/sh
set -e -x
CURDIR="$PWD"
cat << EOF > $CURDIR/configure/RELEASE.local
EPICS_BASE=$HOME/.source/epics-base
EOF
install -d "$HOME/.source"
cd "$HOME/.source"
add_gh_flat() {
MODULE=$1
REPOOWNER=$2
REPONAME=$3
BRANCH=$4
MODULE_UC=$(echo $MODULE | tr 'a-z' 'A-Z')
( git clone --quiet --depth 5 --branch $BRANCH https://github.com/$REPOOWNER/$REPONAME.git $MODULE && \
cd $MODULE && git log -n1 )
cat < $CURDIR/configure/RELEASE.local > $MODULE/configure/RELEASE.local
cat << EOF >> $CURDIR/configure/RELEASE.local
${MODULE_UC}=$HOME/.source/$MODULE
EOF
}
# not recursive
git clone --quiet --depth 5 --branch "$BRBASE" https://github.com/${REPOBASE:-epics-base}/epics-base.git epics-base
(cd epics-base && git log -n1 )
add_gh_flat pvData ${REPOPVD:-epics-base} pvDataCPP ${BRPVD:-master}
add_gh_flat pvAccess ${REPOPVA:-epics-base} pvAccessCPP ${BRPVA:-master}
if [ -e $CURDIR/configure/RELEASE.local ]
then
cat $CURDIR/configure/RELEASE.local
fi
EPICS_HOST_ARCH=`sh epics-base/startup/EpicsHostArch`
# requires wine and g++-mingw-w64-i686
if [ "$WINE" = "32" ]
then
echo "Cross mingw32"
sed -i -e '/CMPLR_PREFIX/d' epics-base/configure/os/CONFIG_SITE.linux-x86.win32-x86-mingw
cat << EOF >> epics-base/configure/os/CONFIG_SITE.linux-x86.win32-x86-mingw
CMPLR_PREFIX=i686-w64-mingw32-
EOF
cat << EOF >> epics-base/configure/CONFIG_SITE
CROSS_COMPILER_TARGET_ARCHS+=win32-x86-mingw
EOF
fi
if [ "$STATIC" = "YES" ]
then
echo "Build static libraries/executables"
cat << EOF >> epics-base/configure/CONFIG_SITE
SHARED_LIBRARIES=NO
STATIC_BUILD=YES
EOF
fi
case "$CMPLR" in
clang)
echo "Host compiler is clang"
cat << EOF >> epics-base/configure/os/CONFIG_SITE.Common.$EPICS_HOST_ARCH
GNU = NO
CMPLR_CLASS = clang
CC = clang
CCC = clang++
EOF
# hack
sed -i -e 's/CMPLR_CLASS = gcc/CMPLR_CLASS = clang/' epics-base/configure/CONFIG.gnuCommon
clang --version
;;
*)
echo "Host compiler is default"
gcc --version
;;
esac
cat <<EOF >> epics-base/configure/CONFIG_SITE
USR_CPPFLAGS += $USR_CPPFLAGS
USR_CFLAGS += $USR_CFLAGS
USR_CXXFLAGS += $USR_CXXFLAGS
EOF
# set RTEMS to eg. "4.9" or "4.10"
# requires qemu, bison, flex, texinfo, install-info
if [ -n "$RTEMS" ]
then
echo "Cross RTEMS${RTEMS} for pc386"
curl -L "https://github.com/mdavidsaver/rsb/releases/download/20171203-${RTEMS}/i386-rtems${RTEMS}-trusty-20171203-${RTEMS}.tar.bz2" \
| tar -C / -xmj
sed -i -e '/^RTEMS_VERSION/d' -e '/^RTEMS_BASE/d' epics-base/configure/os/CONFIG_SITE.Common.RTEMS
cat << EOF >> epics-base/configure/os/CONFIG_SITE.Common.RTEMS
RTEMS_VERSION=$RTEMS
RTEMS_BASE=$HOME/.rtems
EOF
cat << EOF >> epics-base/configure/CONFIG_SITE
CROSS_COMPILER_TARGET_ARCHS += RTEMS-pc386-qemu
EOF
fi
make -j2 -C epics-base $EXTRA
make -j2 -C pvData $EXTRA
make -j2 -C pvAccess $EXTRA
|
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
################################################################################
# Build font instances from Glyphs source.
# Arguments:
# Path to Glyphs source.
# Output formats, as separate strings.
################################################################################
function build_glyphs() {
fontmake -g "$1" -o "${@:2}" -i
}
################################################################################
# Build font instances from plist source, which designates separate MTI feature
# files to use with a corresponding Glyphs source.
# Arguments:
# Path to plist source.
# Output formats, as separate strings.
################################################################################
function build_plist() {
glyphs="$(glyphs_from_plist "$1")"
family="$(family_from_plist "$1")"
if [[ -n "$family" ]]; then
fontmake -g "$glyphs" -o "${@:2}" --mti-source "$1"\
--no-production-names --family-name "$family"
fontmake -g "$glyphs" -o "${@:2}" -i --interpolate-binary-layout\
--no-production-names --family-name "$family"
else
fontmake -g "$glyphs" -o "${@:2}" --mti-source "$1"\
--no-production-names
fontmake -g "$glyphs" -o "${@:2}" -i --interpolate-binary-layout\
--no-production-names
fi
}
################################################################################
# Build variable font from Glyphs source.
# Arguments:
# Path to Glyphs source.
################################################################################
function build_glyphs_variable() {
fontmake -g "$1" -o variable
}
################################################################################
# Build variable font from plist source.
# Arguments:
# Path to plist source.
################################################################################
function build_plist_variable() {
glyphs="$(glyphs_from_plist "$1")"
family="$(family_from_plist "$1")"
if [[ -n "${family}" ]]; then
fontmake -g "${glyphs}" -o variable --mti-source "$1"\
--family-name "${family}"
else
fontmake -g "${glyphs}" -o variable --mti-source "$1"
fi
}
################################################################################
# Build from UFO source, assuming that the source contains quadratic curves (for
# which BooleanOperations is unable to remove overlaps, and only TTFs can be
# generated).
# Arguments:
# Path to UFO source.
################################################################################
function build_ufo() {
fontmake -u "$1" -o 'ttf' --keep-overlaps
}
function glyphs_from_plist() {
glyphs="${1/%.plist/.glyphs}"
case "$1" in
*/NotoSansDevanagariUI-MM.plist)
echo "${glyphs/UI/}"
;;
*)
echo "${glyphs}"
;;
esac
}
function family_from_plist() {
case "$1" in
*/NotoSansDevanagariUI-MM.plist)
echo 'Noto Sans Devanagari UI'
;;
*)
echo ''
;;
esac
}
|
#!/usr/bin/env bash
#
# Waits until request to given URI returns 200 or timeout threshold is reached.
# Can be given a command to run when done waiting.
#
SCRIPT_NAME=${0##*/}
echoerr() { if [[ $QUIET -ne 1 ]]; then echo "$@" 1>&2; fi }
usage()
{
cat << USAGE >&2
Usage:
$SCRIPT_NAME uri [-s] [-t timeout] [-- COMMAND ARGS]
uri a valid http(s) URI
-s | --strict Only execute COMMAND if the test succeeds
-q | --quiet Don't output any status messages
-c | --code Expected HTTP status code
-t TIMEOUT | --timeout=TIMEOUT
Timeout in seconds, zero for no timeout
-- COMMAND ARGS Command with args to run after the test finishes
USAGE
exit 1
}
wait_for()
{
if [[ $TIMEOUT -gt 0 ]]; then
echoerr "$SCRIPT_NAME: waiting $TIMEOUT seconds for $URI to get a $EXPECTEDCODE HTTP code"
else
echoerr "$SCRIPT_NAME: waiting for $URI without a timeout"
fi
WAIT_START_TS=$(date +%s)
while :
do
STATUS_CODE=$(curl --connect-timeout 2 --insecure -s -o /dev/null -w ''%{http_code}'' $URI)
test "$STATUS_CODE" == "$EXPECTEDCODE"
OUTCOME=$?
if [[ $OUTCOME -eq 0 ]]; then
WAIT_END_TS=$(date +%s)
echoerr "$SCRIPT_NAME: $URI is alive after $((WAIT_END_TS - WAIT_START_TS)) seconds"
break
fi
sleep 1
done
return $OUTCOME
}
# passes this script and its arguments to timeout (the script calls itself inside a timeout context)
wait_for_wrapper()
{
# In order to support SIGINT during timeout: http://unix.stackexchange.com/a/57692
if [[ $QUIET -eq 1 ]]; then
timeout $BUSY_BOX_TIMEFLAG $TIMEOUT $0 $URI --quiet --child --timeout=$TIMEOUT --code=$EXPECTEDCODE &
else
timeout $BUSY_BOX_TIMEFLAG $TIMEOUT $0 $URI --child --timeout=$TIMEOUT --code=$EXPECTEDCODE &
fi
SUBPROCESS_PID=$!
trap "kill -INT -$SUBPROCESS_PID" INT
wait $SUBPROCESS_PID
OUTCOME=$?
if [[ $OUTCOME -ne 0 ]]; then
echoerr "$SCRIPT_NAME: timeout occurred after waiting $TIMEOUT seconds for $URI"
fi
return $OUTCOME
}
validate_uri()
{
curl --connect-timeout 1 --insecure -s -o /dev/null $URI
curl_exit_code=$?
if [[ $curl_exit_code -eq 3 ]]; then # exit code 3 indicates an invalid URI
echoerr "Error: you need to provide a VALID URI to test."
usage
fi
}
# process arguments
while [[ $# -gt 0 ]]
do
case "$1" in
http://*)
URI="$1"
if [[ $URI == "" ]]; then break; fi
shift 1
;;
https://*)
URI="$1"
if [[ $URI == "" ]]; then break; fi
shift 1
;;
--child)
CHILD=1
shift 1
;;
-q | --quiet)
QUIET=1
shift 1
;;
-s | --strict)
STRICT=1
shift 1
;;
-c)
EXPECTEDCODE="$2"
if [[ $EXPECTEDCODE == "" ]]; then break; fi
shift 2
;;
--code=*)
EXPECTEDCODE="${1#*=}"
shift 1
;;
-t)
TIMEOUT="$2"
if [[ $TIMEOUT == "" ]]; then break; fi
shift 2
;;
--timeout=*)
TIMEOUT="${1#*=}"
shift 1
;;
--)
shift
COMMAND=("$@")
break
;;
-h)
usage
;;
--help)
usage
;;
*)
echoerr "Unknown argument: $1"
usage
;;
esac
done
# make sure that uri was given and is valid (by testing for curl exit code 3)
if [[ "$URI" == "" ]]; then
echoerr "Error: you need to provide a URI to test."
usage
fi
validate_uri
TIMEOUT=${TIMEOUT:-15}
STRICT=${STRICT:-0}
EXPECTEDCODE=${EXPECTEDCODE:-200}
CHILD=${CHILD:-0}
QUIET=${QUIET:-0}
# Check to see if timeout is from busybox?
TIMEOUT_PATH=$(type -p timeout)
TIMEOUT_PATH=$(realpath $TIMEOUT_PATH 2>/dev/null || readlink -f $TIMEOUT_PATH)
BUSY_BOX_TIMEFLAG=""
if [[ $TIMEOUT_PATH =~ "busybox" ]]; then
ON_BUSY_BOX=1
# Check if busybox timeout uses -t flag
# (recent Alpine versions don't support -t anymore)
if timeout &>/dev/stdout | grep -q -e '-t '; then
BUSY_BOX_TIMEFLAG="-t"
fi
else
ON_BUSY_BOX=0
fi
if [[ $CHILD -gt 0 ]]; then
wait_for
OUTCOME=$?
exit $OUTCOME
else
if [[ $TIMEOUT -gt 0 ]]; then
wait_for_wrapper
OUTCOME=$?
else
wait_for
OUTCOME=$?
fi
fi
if [[ $COMMAND != "" ]]; then
if [[ $OUTCOME -ne 0 && $STRICT -eq 1 ]]; then
echoerr "$SCRIPT_NAME: strict mode, refusing to execute subprocess"
exit $OUTCOME
fi
exec "${COMMAND[@]}"
else
exit $OUTCOME
fi
|
/* Copyright (c) 2001-2011, The HSQL Development Group
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* Neither the name of the HSQL Development Group nor the names of its
* contributors may be used to endorse or promote products derived from this
* software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL HSQL DEVELOPMENT GROUP, HSQLDB.ORG,
* OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.hsqldb;
import org.hsqldb.HsqlNameManager.HsqlName;
import org.hsqldb.lib.OrderedHashSet;
import org.hsqldb.rights.Grantee;
/**
* SQL schema object interface
*
* @author <NAME> (<EMAIL> dot sourceforge.net)
* @version 1.9.0
* @since 1.9.0
*/
public interface SchemaObject {
int CATALOG = 1;
int SCHEMA = 2;
int TABLE = 3;
int VIEW = 4;
int CONSTRAINT = 5;
int ASSERTION = 6;
int SEQUENCE = 7;
int TRIGGER = 8;
int COLUMN = 9;
int TRANSITION = 10;
int GRANTEE = 11;
int TYPE = 12;
int DOMAIN = 13;
int CHARSET = 14;
int COLLATION = 15;
int FUNCTION = 16;
int PROCEDURE = 17;
int ROUTINE = 18;
int CURSOR = 19;
int INDEX = 20;
int LABEL = 21;
int VARIABLE = 22;
int PARAMETER = 23;
int SPECIFIC_ROUTINE = 24;
int WRAPPER = 25;
int SERVER = 26;
int SUBQUERY = 27;
int SEARCH = 28;
//
SchemaObject[] emptyArray = new SchemaObject[]{};
int getType();
HsqlName getName();
HsqlName getSchemaName();
HsqlName getCatalogName();
Grantee getOwner();
OrderedHashSet getReferences();
OrderedHashSet getComponents();
void compile(Session session, SchemaObject parentObject);
String getSQL();
long getChangeTimestamp();
interface ConstraintTypes {
int FOREIGN_KEY = 0;
int MAIN = 1;
int UNIQUE = 2;
int CHECK = 3;
int PRIMARY_KEY = 4;
int TEMP = 5;
}
/*
SQL CLI codes
Referential Constraint 0 CASCADE
Referential Constraint 1 RESTRICT
Referential Constraint 2 SET NULL
Referential Constraint 3 NO ACTION
Referential Constraint 4 SET DEFAULT
*/
interface ReferentialAction {
int CASCADE = 0;
int RESTRICT = 1;
int SET_NULL = 2;
int NO_ACTION = 3;
int SET_DEFAULT = 4;
}
interface Deferable {
int NOT_DEFERRABLE = 0;
int INIT_DEFERRED = 1;
int INIT_IMMEDIATE = 2;
}
interface ViewCheckModes {
int CHECK_NONE = 0;
int CHECK_LOCAL = 1;
int CHECK_CASCADE = 2;
}
interface ParameterModes {
byte PARAM_UNKNOWN = 0; // java.sql.ParameterMetaData.parameterModeUnknown
byte PARAM_IN = 1; // java.sql.ParameterMetaData.parameterModeIn
byte PARAM_OUT = 4; // java.sql.ParameterMetaData.parameterModeInOut
byte PARAM_INOUT = 2; // java.sql.ParameterMetaData.parameterModeOut
}
interface Nullability {
byte NO_NULLS = 0; // java.sql.ResultSetMetaData.columnNoNulls
byte NULLABLE = 1; // java.sql.ResultSetMetaData.columnNullable
byte NULLABLE_UNKNOWN = 2; // java.sql.ResultSetMetaData.columnNullableUnknown
}
}
|
#!/bin/bash
set -e
echo "add admin user with password 1423 if no users"
cat << EOF > /tmp/account.js
db = db.getSiblingDB('open5gs')
cursor = db.accounts.find()
if ( cursor.count() == 0 ) {
db.accounts.insert({ salt: 'f5c15fa72622d62b6b790aa8569b9339729801ab8bda5d13997b5db6bfc1d997', hash: '402223057db5194899d2e082aeb0802f6794622e1cbc47529c419e5a603f2cc592074b4f3323b239ffa594c8b756d5c70a4e1f6ecd3f9f0d2d7328c4cf8b1b766514effff0350a90b89e21eac54cd4497a169c0c7554a0e2cd9b672e5414c323f76b8559bc768cba11cad2ea3ae704fb36abc8abc2619231ff84ded60063c6e1554a9777a4a464ef9cfdfa90ecfdacc9844e0e3b2f91b59d9ff024aec4ea1f51b703a31cda9afb1cc2c719a09cee4f9852ba3cf9f07159b1ccf8133924f74df770b1a391c19e8d67ffdcbbef4084a3277e93f55ac60d80338172b2a7b3f29cfe8a36738681794f7ccbe9bc98f8cdeded02f8a4cd0d4b54e1d6ba3d11792ee0ae8801213691848e9c5338e39485816bb0f734b775ac89f454ef90992003511aa8cceed58a3ac2c3814f14afaaed39cbaf4e2719d7213f81665564eec02f60ede838212555873ef742f6666cc66883dcb8281715d5c762fb236d72b770257e7e8d86c122bb69028a34cf1ed93bb973b440fa89a23604cd3fefe85fbd7f55c9b71acf6ad167228c79513f5cfe899a2e2cc498feb6d2d2f07354a17ba74cecfbda3e87d57b147e17dcc7f4c52b802a8e77f28d255a6712dcdc1519e6ac9ec593270bfcf4c395e2531a271a841b1adefb8516a07136b0de47c7fd534601b16f0f7a98f1dbd31795feb97da59e1d23c08461cf37d6f2877d0f2e437f07e25015960f63', username: 'admin', roles: [ 'admin' ], "__v" : 0})
}
EOF
mongo $DB_URI /tmp/account.js
rm -f /tmp/account.js |
#!/bin/sh -x
set -e
rados -p data rm foo || true
rados -p data put foo.tmp /etc/passwd --object-locator foo
rados -p data clonedata foo.tmp foo --object-locator foo
rados -p data get foo /tmp/foo
cmp /tmp/foo /etc/passwd
rados -p data rm foo.tmp --object-locator foo
rados -p data rm foo
echo OK |
<gh_stars>0
module.exports = function toReadable (number) {
console.log(number);
let units = [
"zero",
"one",
"two",
"three",
"four",
"five",
"six",
"seven",
"eight",
"nine"
],
from10to19 = [
"ten",
"eleven",
"twelve",
"thirteen",
"fourteen",
"fifteen",
"sixteen",
"seventeen",
"eighteen",
"nineteen"
],
tens = [
"null",
"null",
"twenty",
"thirty",
"forty",
"fifty",
"sixty",
"seventy",
"eighty",
"ninety"
];
let numberArray = number.toString().split("");
if (numberArray.length === 1) {
return units[Number(numberArray[0])];
}
if (numberArray.length === 2) {
if (Number(numberArray[0]) === 1) {
return from10to19[Number(numberArray[1])];
} else if (Number(numberArray[1]) === 0) {
return tens[Number(numberArray[0])];
} else {
return (
tens[Number(numberArray[0])] + " " + units[Number(numberArray[1])]
);
}
}
if (numberArray.length === 3) {
if (Number(numberArray[1]) == 1) {
return (
units[Number(numberArray[0])] +
" hundred " +
from10to19[Number(numberArray[2])]
);
} else if (Number(numberArray[1]) !== 0 && Number(numberArray[2]) !== 0) {
return (
units[Number(numberArray[0])] +
" hundred " +
tens[Number(numberArray[1])] +
" " +
units[Number(numberArray[2])]
);
} else if (Number(numberArray[1]) == 0 && Number(numberArray[2]) == 0) {
return units[Number(numberArray[0])] + " hundred";
} else if (Number(numberArray[1]) == 0) {
return (
units[Number(numberArray[0])] +
" hundred " +
units[Number(numberArray[2])]
);
} else {
return (
units[Number(numberArray[0])] +
" hundred " +
tens[Number(numberArray[1])]
);
}
}
}
|
##########################################################################
# Author: Jane Curry, jane.curry@skills-1st.co.uk
# Date: February 28th, 2011
# Revised: Extra debugging added Aug 23, 2011
#
# JuniperFPC modeler plugin
#
# This program can be used under the GNU General Public License version 2
# You can find full information here: http://www.zenoss.com/oss
#
##########################################################################
__doc__ = """JuniperFPCMap
Gather table information from Juniper Contents tables
"""
import re
from Products.DataCollector.plugins.CollectorPlugin import SnmpPlugin, GetMap, GetTableMap
class JuniperFPCMap(SnmpPlugin):
"""Map Juniper FPC table to model."""
maptype = "JuniperFPCMap"
modname = "ZenPacks.ZenSystems.Juniper.JuniperFPC"
relname = "JuniperFP"
compname = ""
snmpGetTableMaps = (
GetTableMap('jnxContentsTable',
'.1.3.6.1.4.1.2636.3.1.8.1',
{
'.1': 'containerIndex',
'.5': 'FPCType',
'.6': 'FPCDescr',
'.7': 'FPCSerialNo',
'.8': 'FPCRevision',
'.10': 'FPCPartNo',
'.11': 'FPCChassisId',
'.12': 'FPCChassisDescr',
'.13': 'FPCChassisCLEI',
}
),
GetTableMap('jnxOperatingTable',
'.1.3.6.1.4.1.2636.3.1.13.1',
{
'.6': 'FPCState',
'.7': 'FPCTemp',
'.8': 'FPCCPU',
'.13': 'FPCUpTime',
'.15': 'FPCMemory',
}
),
GetTableMap('jnxContainersTable',
'.1.3.6.1.4.1.2636.3.1.6.1',
{
'.1': 'containerIndex',
'.3': 'containerLevel',
'.4': 'containerNextLevel',
'.5': 'containerType',
'.6': 'containerDescr',
}
),
)
def process(self, device, results, log):
"""collect snmp information from this device"""
log.info('processing %s for device %s', self.name(), device.id)
getdata, tabledata = results
rm = self.relMap()
contentsTable = tabledata.get('jnxContentsTable')
operatingTable = tabledata.get('jnxOperatingTable')
containersTable = tabledata.get('jnxContainersTable')
# If no data supplied then simply return
if not contentsTable:
log.warn( 'No SNMP response from %s for the %s plugin for contents', device.id, self.name() )
log.warn( "Data= %s", tabledata )
return
if not operatingTable:
log.warn( 'No SNMP response from %s for the %s plugin for operating system', device.id, self.name() )
log.warn( "Data= %s", tabledata )
return
if not containersTable:
log.warn( 'No SNMP response from %s for the %s plugin for containers', device.id, self.name() )
log.warn( "Data= %s", tabledata )
return
for oid, data in contentsTable.items():
try:
om = self.objectMap(data)
FPCDescr = om.FPCDescr
# log.info(' FPCDescr is %s ' % (om.FPCDescr))
isaFPC = re.match(r'(.*FPC.*)', FPCDescr.upper())
if not isaFPC:
continue
else:
for oid1, data1 in operatingTable.items():
if oid1 == oid:
om.FPCState = data1['FPCState']
om.FPCTemp = data1['FPCTemp']
om.FPCCPU = data1['FPCCPU']
om.FPCUpTime = data1['FPCUpTime']
om.FPCMemory = data1['FPCMemory']
for oid2, data2 in containersTable.items():
# log.info( ' oid is %s - oid2 is %s - data is %s' % (oid, oid2 , data2))
if oid.startswith(oid2):
om.containerDescr = data2['containerDescr']
if data2['containerLevel'] == 1:
om.containerDescr = '....' + om.containerDescr
elif data2['containerLevel'] == 2:
om.containerDescr = '........' + om.containerDescr
om.containerParentIndex = data2['containerNextLevel']
if om.containerParentIndex != 0:
for oid3, data3 in containersTable.items():
if oid3.endswith(str(om.containerParentIndex)):
om.containerParentDescr = data3['containerDescr']
om.snmpindex = oid1.strip('.')
# Convert FPCUpTime from milliseconds to hours
om.FPCUpTime = om.FPCUpTime / 1000 / 60 / 60 /24
# Transform numeric FPCState into a status string via operatingStateLookup
if (om.FPCState < 1 or om.FPCState > 7):
om.FPCState = 1
om.FPCState = self.operatingStateLookup[om.FPCState]
om.id = self.prepId( om.FPCDescr.replace(' ','_') + '_' + str( om.snmpindex.replace('.','_') ) )
except (KeyError, IndexError, AttributeError, TypeError), errorInfo:
log.warn( ' Error in %s modeler plugin %s' % ( self.name(), errorInfo))
continue
rm.append(om)
# log.info('rm %s' % (rm) )
return rm
operatingStateLookup = { 1: 'Unknown',
2: 'Running',
3: 'Ready',
4: 'Reset',
5: 'RunningAtFullSpeed (Fan)',
6: 'Down',
7: 'Standby'
} |
<reponame>openstreetcam/android
package com.telenav.osv.item.metadata;
import com.telenav.osv.utils.Log;
/**
* Data object which contains a track id and a frame index within that track. This object's {@link #toString()} knows how to format its data s.t. it's appropriate to write in
* the metadata file of a sequence.
*
* Metadata format version 1.1.6.
* @see <a href="http://spaces.telenav.com:8080/display/TELENAVEU/Metadata+Format+Protocol">Metadata format</a>
*/
public class VideoData {
private static final String LINE_SEPARATOR = "\n";
private static final String TAG = "VideoData";
private final long mTimeStamp;
private int[] mIndex;
private int[] mVideoIndex;
public VideoData(int index, int videoIndex, long millis) {
mIndex = new int[1];
mVideoIndex = new int[1];
mIndex[0] = index;
mVideoIndex[0] = videoIndex;
mTimeStamp = millis;
}
@Override
public String toString() {
StringBuilder builder = new StringBuilder();
long seconds = mTimeStamp / 1_000L;
long partial = mTimeStamp - (seconds * 1_000L);
builder.append(seconds);
builder.append(".");
builder.append((int) partial);
builder.append(";");
//null gps data
builder.append(";;;;;");
//null rotation
builder.append(";;;");
//null accelerometer
builder.append(";;;");
//pressure
builder.append(";");
//compass
builder.append(";");
if (mVideoIndex != null) {
builder.append(mVideoIndex[0]);
}
builder.append(";");
if (mIndex != null) {
builder.append(mIndex[0]);
}
builder.append(";");
//gravity
builder.append(";;;");
builder.append(";");
//todo builder.append(vertical_accuracy);
builder.append(LINE_SEPARATOR);
String str = builder.toString();
if (mIndex != null && mVideoIndex != null) {
Log.d(TAG, "toString: created for video file = " + mVideoIndex[0] + " and frame = " + mIndex[0]);
}
return str;
}
} |
class ImportError(Exception):
pass
class ContentImporter:
def __init__(self):
self.content = None
def import_content(self, content):
try:
self.content = content
except ValueError as error:
raise ImportError(str(error)) |
#!/usr/bin/env bash
set -e
cd "$(dirname "$0")/.."
annotate() {
${BUILDKITE:-false} && {
buildkite-agent annotate "$@"
}
}
ci/affects-files.sh \
.rs$ \
Cargo.lock$ \
Cargo.toml$ \
ci/test-bench.sh \
|| {
annotate --style info --context test-bench \
"Bench skipped as no .rs files were modified"
exit 0
}
genesis ci/_
genesis ci/upload-ci-artifact.sh
eval "$(ci/channel-info.sh)"
genesis ci/rust-version.sh nightly
set -o pipefail
export RUST_BACKTRACE=1
UPLOAD_METRICS=""
TARGET_BRANCH=$BUILDKITE_BRANCH
if [[ -z $BUILDKITE_BRANCH ]] || ./ci/is-pr.sh; then
TARGET_BRANCH=$EDGE_CHANNEL
else
UPLOAD_METRICS="upload"
fi
BENCH_FILE=bench_output.log
BENCH_ARTIFACT=current_bench_results.log
# Ensure all dependencies are built
_ cargo +$rust_nightly build --all --release
# Remove "BENCH_FILE", if it exists so that the following commands can append
rm -f "$BENCH_FILE"
# Run sdk benches
_ cargo +$rust_nightly bench --manifest-path sdk/Cargo.toml ${V:+--verbose} \
-- -Z unstable-options --format=json | tee -a "$BENCH_FILE"
# Run runtime benches
_ cargo +$rust_nightly bench --manifest-path runtime/Cargo.toml ${V:+--verbose} \
-- -Z unstable-options --format=json | tee -a "$BENCH_FILE"
# Run core benches
_ cargo +$rust_nightly bench --manifest-path core/Cargo.toml ${V:+--verbose} \
-- -Z unstable-options --format=json | tee -a "$BENCH_FILE"
# Run bpf benches
_ cargo +$rust_nightly bench --manifest-path controllers/bpf/Cargo.toml ${V:+--verbose} --features=bpf_c \
-- -Z unstable-options --format=json --nocapture | tee -a "$BENCH_FILE"
# TODO: debug why morgan-upload-perf takes over 30 minutes to complete.
exit 0
_ cargo +$rust_nightly run --release --package morgan-upload-perf \
-- "$BENCH_FILE" "$TARGET_BRANCH" "$UPLOAD_METRICS" | tee "$BENCH_ARTIFACT"
upload-ci-artifact "$BENCH_FILE"
upload-ci-artifact "$BENCH_ARTIFACT"
|
import pytest
from unittest.mock import patch
# Assume the function to be tested is named 'interact_with_pokedex'
# Import the function to be tested
from your_module import interact_with_pokedex
# Define the unit test using the provided fixtures
def test_interact_with_pokedex(mock_query_server_publish, mock_pokedex_functions):
# Set up any necessary mock data or behavior for the Pokedex functions
mock_pokedex_functions['get_pokemon_info'].return_value = {'name': 'Pikachu', 'type': 'Electric'}
# Call the function to be tested
interact_with_pokedex()
# Assert that the query server's _publish method was called with the expected data
mock_query_server_publish.assert_called_with('Pikachu - Electric') |
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/512+512+512-old/13-model --tokenizer_name model-configs/1536-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/512+512+512-old/13-1024+0+512-pad-256 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function truncate_and_pad_first_two_thirds_sixth --eval_function last_sixth_eval |
#!/bin/sh
sqlite3 posts.db "drop table if exists post;
create table if not exists post(
id int primary key not null,
userId int not null,
title text not null,
body text not null
);"
|
# Always enable colored `grep` output
# Note: `GREP_OPTIONS="--color=auto"` is deprecated, hence the alias usage.
alias grep='grep --color=auto'
alias fgrep='fgrep --color=auto'
alias egrep='egrep --color=auto'
alias diff='diff --color -u'
|
package javafx.scene.control.skin;
import com.sun.javafx.scene.control.behavior.ButtonBehavior;
import javafx.scene.control.Hyperlink;
/**
* A Skin for Hyperlinks.
*/
public class HyperlinkSkin extends LabeledSkinBase<Hyperlink, ButtonBehavior<Hyperlink>> {
/***************************************************************************
* *
* Constructors *
* *
**************************************************************************/
public HyperlinkSkin(Hyperlink link) {
super(link, new ButtonBehavior<>(link));
}
}
|
#!/usr/bin/env bash
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
set -e -x
PYTHON_VERSIONS="python2.7 python3.5 python3.6 python3.7"
ln -sf /usr/bin/python3.5 /usr/bin/python3 # Py36 has issues with add-apt
curl -sSOL https://bootstrap.pypa.io/get-pip.py
add-apt-repository -y ppa:deadsnakes/ppa
apt-get -y -qq update
for version in ${PYTHON_VERSIONS}; do
export PYTHON_VERSION=${version}
apt-get -y -qq install ${PYTHON_VERSION}
${PYTHON_VERSION} get-pip.py -q
${PYTHON_VERSION} -m pip --version
#Link TF dependency
yes 'y' | ./configure.sh --quiet
# Build
bazel build \
-c opt \
--noshow_progress \
--noshow_loading_progress \
--verbose_failures \
--test_output=errors \
--crosstool_top=//build_deps/toolchains/gcc7_manylinux2010-nvcc-cuda10.1:toolchain \
build_pip_pkg
# Package Whl
bazel-bin/build_pip_pkg artifacts --nightly
# Uncomment and use this command for release branches
#bazel-bin/build_pip_pkg artifacts
done
# Clean up
rm get-pip.py
# Verify Wheels
./tools/ci_build/builds/wheel_verify.sh |
package com.yoloho.enhanced.data.dao.monitor;
import java.util.List;
public interface MonitorCallback {
void receive(List<MonitorData> dataList, int intervalInSeconds);
} |
<filename>src/locale/lang/zh-CN.js
export default {
home: '首页',
login: '登录',
application: '应用',
log: '日志',
application_list: '应用列表',
application_config: '应用配置',
log_list: '日志列表',
log_detail: '日志详情',
error_store_page: '错误收集',
error_logger_page: '错误日志',
query: '带参路由',
params: '动态路由'
}
|
#!/bin/sh
set -e
set -u
set -o pipefail
function on_error {
echo "$(realpath -mq "${0}"):$1: error: Unexpected failure"
}
trap 'on_error $LINENO' ERR
if [ -z ${FRAMEWORKS_FOLDER_PATH+x} ]; then
# If FRAMEWORKS_FOLDER_PATH is not set, then there's nowhere for us to copy
# frameworks to, so exit 0 (signalling the script phase was successful).
exit 0
fi
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
COCOAPODS_PARALLEL_CODE_SIGN="${COCOAPODS_PARALLEL_CODE_SIGN:-false}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
# Used as a return value for each invocation of `strip_invalid_archs` function.
STRIP_BINARY_RETVAL=0
# This protects against multiple targets copying the same framework dependency at the same time. The solution
# was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html
RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????")
# Copies and strips a vendored framework
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# Use filter instead of exclude so missing patterns don't throw errors.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
elif [ -L "${binary}" ]; then
echo "Destination binary is symlinked..."
dirname="$(dirname "${binary}")"
binary="${dirname}/$(readlink "${binary}")"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u)
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Copies and strips a vendored dSYM
install_dsym() {
local source="$1"
if [ -r "$source" ]; then
# Copy the dSYM into a the targets temp dir.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${DERIVED_FILES_DIR}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${DERIVED_FILES_DIR}"
local basename
basename="$(basename -s .framework.dSYM "$source")"
binary="${DERIVED_FILES_DIR}/${basename}.framework.dSYM/Contents/Resources/DWARF/${basename}"
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"Mach-O "*"dSYM companion"* ]]; then
strip_invalid_archs "$binary"
fi
if [[ $STRIP_BINARY_RETVAL == 1 ]]; then
# Move the stripped file into its final destination.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${DERIVED_FILES_DIR}/${basename}.framework.dSYM\" \"${DWARF_DSYM_FOLDER_PATH}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${DERIVED_FILES_DIR}/${basename}.framework.dSYM" "${DWARF_DSYM_FOLDER_PATH}"
else
# The dSYM was not stripped at all, in this case touch a fake folder so the input/output paths from Xcode do not reexecute this script because the file is missing.
touch "${DWARF_DSYM_FOLDER_PATH}/${basename}.framework.dSYM"
fi
fi
}
# Copies the bcsymbolmap files of a vendored framework
install_bcsymbolmap() {
local bcsymbolmap_path="$1"
local destination="${BUILT_PRODUCTS_DIR}"
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}"
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY:-}" -a "${CODE_SIGNING_REQUIRED:-}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identity
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS:-} --preserve-metadata=identifier,entitlements '$1'"
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
code_sign_cmd="$code_sign_cmd &"
fi
echo "$code_sign_cmd"
eval "$code_sign_cmd"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current target binary
binary_archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | awk '{$1=$1;print}' | rev)"
# Intersect them with the architectures we are building for
intersected_archs="$(echo ${ARCHS[@]} ${binary_archs[@]} | tr ' ' '\n' | sort | uniq -d)"
# If there are no archs supported by this binary then warn the user
if [[ -z "$intersected_archs" ]]; then
echo "warning: [CP] Vendored binary '$binary' contains architectures ($binary_archs) none of which match the current build architectures ($ARCHS)."
STRIP_BINARY_RETVAL=0
return
fi
stripped=""
for arch in $binary_archs; do
if ! [[ "${ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary"
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
STRIP_BINARY_RETVAL=1
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/Matrix/Matrix.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/Matrix/Matrix.framework"
fi
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
wait
fi
|
var _unit_tests_8cpp =
[
[ "BOOST_TEST_MODULE", "_unit_tests_8cpp.xhtml#a6b2a3852db8bb19ab6909bac01859985", null ],
[ "BOOST_AUTO_TEST_CASE", "_unit_tests_8cpp.xhtml#ad57096af99f473aca7909812a744b619", null ],
[ "BOOST_GLOBAL_FIXTURE", "_unit_tests_8cpp.xhtml#a12aa278dd0bf585d562659e2b2e74014", null ]
]; |
message = 'Hello world!'
print(message.upper()) // HELLO WORLD! |
module ChatModule {
export interface IChat {
id: string;
name: string;
premoderated: boolean;
direction: string;
theme?: ITheme;
socket?: SocketIOClient.Socket;
messages?: IMessage[];
status: boolean;
init(): ng.IPromise<IChatResponse>;
}
export interface IChatResponse {
data: {
id: string;
name: string;
premoderated: boolean;
};
status: number;
}
export interface IMessage {
accessToken: string;
date: string;
hidden: boolean;
id: string;
msg: string;
user: string;
userLink: string;
userName?: string;
avatarColor?: string;
}
export interface IUser {
accessToken: string;
user: string;
userLink: string;
userName?: string;
userId?: string;
userID?: string
}
export interface IFbAuth {
accessToken: string;
expiresIn?: number;
signedRequest?: string;
userID: string;
}
export interface ITheme {
avatar: string;
backgroundColor: string;
borderColor: string;
fontColor: string;
headerFontColor: string;
}
}
|
<reponame>MrPepperoni/Reaping2-1
#ifndef INCLUDED_PLATFORM_TGATEXTURE_H
#define INCLUDED_PLATFORM_TGATEXTURE_H
#include "texture_base.h"
#include "rstdint.h"
namespace platform {
class File;
class TgaTexture : public TextureBase
{
public:
TgaTexture( File& F );
};
} // namespace platform
#endif//INCLUDED_PLATFORM_TGATEXTURE_H
|
import pygame as pg
# Initialize Pygame
pg.init()
# Set up the game window
window_width = 800
window_height = 600
window = pg.display.set_mode((window_width, window_height))
pg.display.set_caption('Maze Game')
# Game variables
running = True
player_x = 50
player_y = 50
goal_x = 700
goal_y = 500
obstacle_x = 300
obstacle_y = 200
obstacle_width = 100
obstacle_height = 50
# Game loop
while running:
for event in pg.event.get():
if event.type == pg.QUIT:
running = False
keys = pg.key.get_pressed()
if keys[pg.K_LEFT]:
player_x -= 5
if keys[pg.K_RIGHT]:
player_x += 5
if keys[pg.K_UP]:
player_y -= 5
if keys[pg.K_DOWN]:
player_y += 5
# Check for collision with obstacle
if (player_x < obstacle_x + obstacle_width and
player_x + 20 > obstacle_x and
player_y < obstacle_y + obstacle_height and
player_y + 20 > obstacle_y):
running = False
# Check for reaching the goal
if player_x > goal_x and player_y > goal_y:
running = False
# Update the display
window.fill((0, 0, 0)) # Clear the screen
pg.draw.rect(window, (255, 0, 0), (obstacle_x, obstacle_y, obstacle_width, obstacle_height)) # Draw obstacle
pg.draw.rect(window, (0, 255, 0), (goal_x, goal_y, 20, 20)) # Draw goal
pg.draw.rect(window, (0, 0, 255), (player_x, player_y, 20, 20)) # Draw player
pg.display.update()
# Clean up
pg.quit() |
import "dotenv/config";
import mongoose from "mongoose";
import express, { NextFunction, Request, Response } from "express";
import { HttpError } from "http-errors";
import { User } from "database/models";
import cors from "cors";
import session from "express-session";
import path from "path";
import itineraryRouter from "./routes/itineraries";
import sharedItineraryRouter from "./routes/sharedItineraries";
import userRouter from "./routes/users";
import googleAuthRouter from "./routes/googleAuth";
import yelpFusionRouter from "./routes/yelpFusion";
import { redirectToHTTPS } from "express-http-to-https";
mongoose
.connect(process.env.DATABASE_URL!, {
useNewUrlParser: true,
useUnifiedTopology: true,
})
.then(() => {
const app = express();
const PORT = process.env.PORT || 4000;
const corsOptions = {
origin: process.env.ORIGIN,
credentials: true,
preflightContinue: false,
};
app.use(redirectToHTTPS([/localhost:(\d{4})/], [], 301));
// Serve React app from express
app.use(
express.static(path.join(__dirname, "..", "..", "trippo", "build"))
);
app.get("*", (req, res, next) => {
if (req.path.startsWith("/api")) {
return next();
}
res.sendFile(
path.join(__dirname, "..", "..", "trippo", "build", "index.html")
);
});
app.use(
session({
resave: true,
secret: process.env.EXPRESS_SESSION_SECRET!,
saveUninitialized: true,
cookie: { secure: false },
})
);
// Placed above middleware to allow un-authenticated users to view shareable itineraries
app.use("/api/shared/itineraries", sharedItineraryRouter);
app.use(async (req: any, res, next) => {
const user = await User.findById(req.session.userId).exec();
if (
req.originalUrl !== "/api/v1/auth/google" &&
req.originalUrl !== "/api/v1/auth/logout" &&
req.method !== "OPTIONS" &&
!user
) {
res.status(404).send({ error: "User not found" });
return next("Invalid user");
}
req.user = user;
next();
});
app.use(express.json());
app.use(express.urlencoded({ extended: true }));
app.use(cors(corsOptions));
app.use("/api/itineraries", itineraryRouter);
app.use("/api/users", userRouter);
app.use("/api/v1/auth", googleAuthRouter);
app.use("/api/yelp", yelpFusionRouter);
app.use(
(err: HttpError, _req: Request, res: Response, _next: NextFunction) => {
res.status(err.statusCode || 500).send(err.message);
}
);
app.listen(PORT, () => console.log("Listening on port " + PORT));
});
|
#!/usr/bin/env bash
# Copyright 2020 Hewlett Packard Enterprise Development LP
: "${RELEASE:="${RELEASE_NAME:="casmrel-630-psp-hotfix"}-${RELEASE_VERSION:="0.0.4"}"}"
# return if sourced
return 0 2>/dev/null
# otherwise print release information
if [[ $# -eq 0 ]]; then
echo "$RELEASE"
else
case "$1" in
-n|--name) echo "$RELEASE_NAME" ;;
-v|--version) echo "$RELEASE_VERSION" ;;
*)
echo >&2 "error: unsupported argumented: $1"
echo >&2 "usage: ${0##*/} [--name|--version]"
;;
esac
fi
|
#!/bin/bash -f
#*********************************************************************************************************
# Vivado (TM) v2018.3 (64-bit)
#
# Filename : fifo_weights.sh
# Simulator : Aldec Active-HDL Simulator
# Description : Simulation script for compiling, elaborating and verifying the project source files.
# The script will automatically create the design libraries sub-directories in the run
# directory, add the library logical mappings in the simulator setup file, create default
# 'do/prj' file, execute compilation, elaboration and simulation steps.
#
# Generated by Vivado on Sat Dec 14 01:23:39 +0800 2019
# SW Build 2405991 on Thu Dec 6 23:38:27 MST 2018
#
# Copyright 1986-2018 Xilinx, Inc. All Rights Reserved.
#
# usage: fifo_weights.sh [-help]
# usage: fifo_weights.sh [-lib_map_path]
# usage: fifo_weights.sh [-noclean_files]
# usage: fifo_weights.sh [-reset_run]
#
# Prerequisite:- To compile and run simulation, you must compile the Xilinx simulation libraries using the
# 'compile_simlib' TCL command. For more information about this command, run 'compile_simlib -help' in the
# Vivado Tcl Shell. Once the libraries have been compiled successfully, specify the -lib_map_path switch
# that points to these libraries and rerun export_simulation. For more information about this switch please
# type 'export_simulation -help' in the Tcl shell.
#
# You can also point to the simulation libraries by either replacing the <SPECIFY_COMPILED_LIB_PATH> in this
# script with the compiled library directory path or specify this path with the '-lib_map_path' switch when
# executing this script. Please type 'fifo_weights.sh -help' for more information.
#
# Additional references - 'Xilinx Vivado Design Suite User Guide:Logic simulation (UG900)'
#
#*********************************************************************************************************
# Script info
echo -e "fifo_weights.sh - Script generated by export_simulation (Vivado v2018.3 (64-bit)-id)\n"
# Main steps
run()
{
check_args $# $1
setup $1 $2
compile
simulate
}
# RUN_STEP: <compile>
compile()
{
# Compile design files
source compile.do 2>&1 | tee -a compile.log
}
# RUN_STEP: <simulate>
simulate()
{
runvsimsa -l simulate.log -do "do {simulate.do}"
}
# STEP: setup
setup()
{
case $1 in
"-lib_map_path" )
if [[ ($2 == "") ]]; then
echo -e "ERROR: Simulation library directory path not specified (type \"./fifo_weights.sh -help\" for more information)\n"
exit 1
fi
map_setup_file $2
;;
"-reset_run" )
reset_run
echo -e "INFO: Simulation run files deleted.\n"
exit 0
;;
"-noclean_files" )
# do not remove previous data
;;
* )
map_setup_file $2
esac
# Add any setup/initialization commands here:-
# <user specific commands>
}
# Map library.cfg file
map_setup_file()
{
file="library.cfg"
if [[ ($1 != "") ]]; then
lib_map_path="$1"
else
lib_map_path="C:/AI/AIFPGA/sv/sv.cache/compile_simlib/activehdl"
fi
if [[ ($lib_map_path != "") ]]; then
src_file="$lib_map_path/$file"
if [[ -e $src_file ]]; then
vmap -link $lib_map_path
fi
fi
}
# Delete generated data from the previous run
reset_run()
{
files_to_remove=(compile.log elaboration.log simulate.log dataset.asdb work activehdl)
for (( i=0; i<${#files_to_remove[*]}; i++ )); do
file="${files_to_remove[i]}"
if [[ -e $file ]]; then
rm -rf $file
fi
done
}
# Check command line arguments
check_args()
{
if [[ ($1 == 1 ) && ($2 != "-lib_map_path" && $2 != "-noclean_files" && $2 != "-reset_run" && $2 != "-help" && $2 != "-h") ]]; then
echo -e "ERROR: Unknown option specified '$2' (type \"./fifo_weights.sh -help\" for more information)\n"
exit 1
fi
if [[ ($2 == "-help" || $2 == "-h") ]]; then
usage
fi
}
# Script usage
usage()
{
msg="Usage: fifo_weights.sh [-help]\n\
Usage: fifo_weights.sh [-lib_map_path]\n\
Usage: fifo_weights.sh [-reset_run]\n\
Usage: fifo_weights.sh [-noclean_files]\n\n\
[-help] -- Print help information for this script\n\n\
[-lib_map_path <path>] -- Compiled simulation library directory path. The simulation library is compiled\n\
using the compile_simlib tcl command. Please see 'compile_simlib -help' for more information.\n\n\
[-reset_run] -- Recreate simulator setup files and library mappings for a clean run. The generated files\n\
from the previous run will be removed. If you don't want to remove the simulator generated files, use the\n\
-noclean_files switch.\n\n\
[-noclean_files] -- Reset previous run, but do not remove simulator generated files from the previous run.\n\n"
echo -e $msg
exit 1
}
# Launch script
run $1 $2
|
package interpreter.core.include;
import interpreter.core.elements.Element;
import interpreter.core.elements.Value;
import interpreter.core.include.prototypes.PreDefined;
import interpreter.exceptions.InvalidSyntaxError;
import interpreter.util.Context;
import java.util.ArrayList;
/**
* Implements Minus function
*
* @author ramilmsh
*/
public class Minus extends PreDefined<Double> {
private ArrayList<Element> arguments;
/**
* Creates a new instance of Minus function
*
* @param name: function name
* @param context: context
* @throws InvalidSyntaxError: if there has been a syntax error
*/
public Minus(String name, Context context) throws InvalidSyntaxError {
super(Value.Type.NUMBER, name, context, 1);
}
@Override
protected Value evaluate(ArrayList<Value> inputs) {
ArrayList<Value<Double>> result = new ArrayList<>();
for (Value input : inputs)
result.add(new Value<>(-(Double) input.getValue()));
return new Value<>(result);
}
}
|
<reponame>ooooo-youwillsee/leetcode
//
// Created by ooooo on 2020/1/23.
//
#ifndef CPP_0021__SOLUTION2_H_
#define CPP_0021__SOLUTION2_H_
#include "ListNode.h"
/**
* 使用原先的节点 (效率高)
*/
class Solution {
public:
ListNode *mergeTwoLists(ListNode *l1, ListNode *l2) {
ListNode *dummyHead = new ListNode(0), *cur = dummyHead;
while (l1 && l2) {
if (l1->val == l2->val) {
ListNode *temp1 = l1->next;
ListNode *temp2 = l2->next;
cur->next = l1;
cur = cur->next;
cur->next = l2;
cur = cur->next;
l1 = temp1;
l2 = temp2;
} else if (l1->val < l2->val) {
ListNode *temp1 = l1->next;
cur->next = l1;
cur = cur->next;
l1 = temp1;
} else {
ListNode *temp2 = l2->next;
cur->next = l2;
cur = cur->next;
l2 = temp2;
}
}
while (l1) {
ListNode *temp1 = l1->next;
cur->next = l1;
cur = cur->next;
l1 = temp1;
}
while (l2) {
ListNode *temp2 = l2->next;
cur->next = l2;
cur = cur->next;
l2 = temp2;
}
return dummyHead->next;
}
};
#endif //CPP_0021__SOLUTION2_H_
|
<reponame>Sadeeg/VirtualYouthClub<filename>frontend/VirtualYouthClubApp/src/app/basic/viedeo/viedeo.component.spec.ts
import { async, ComponentFixture, TestBed } from '@angular/core/testing';
import { ViedeoComponent } from './viedeo.component';
describe('ViedeoComponent', () => {
let component: ViedeoComponent;
let fixture: ComponentFixture<ViedeoComponent>;
beforeEach(async(() => {
TestBed.configureTestingModule({
declarations: [ ViedeoComponent ]
})
.compileComponents();
}));
beforeEach(() => {
fixture = TestBed.createComponent(ViedeoComponent);
component = fixture.componentInstance;
fixture.detectChanges();
});
it('should create', () => {
expect(component).toBeTruthy();
});
});
|
/***************************************************************************
*
* Project _____ __ ____ _ _
* ( _ ) /__\ (_ _)_| |_ _| |_
* )(_)( /(__)\ )( (_ _)(_ _)
* (_____)(__)(__)(__) |_| |_|
*
*
* Copyright 2018-present, <NAME> <<EMAIL>>
* <NAME> <<EMAIL>>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
***************************************************************************/
#include "LoggerTest.hpp"
namespace oatpp { namespace test { namespace base {
OATPP_LOG_CATEGORY(LoggerTest::TESTCATEGORY, "LogCategory", true);
void LoggerTest::onRun() {
auto logger = std::static_pointer_cast<oatpp::base::DefaultLogger>(oatpp::base::Environment::getLogger());
OATPP_LOGV("LoggerTest", "Verbose Log");
OATPP_LOGD("LoggerTest", "Debug Log");
OATPP_LOGI("LoggerTest", "Info Log");
OATPP_LOGW("LoggerTest", "Warning Log");
OATPP_LOGE("LoggerTest", "Error Log");
OATPP_LOGI("LoggerTest", " --- Disabling Debug Log");
logger->disablePriority(oatpp::base::DefaultLogger::PRIORITY_D);
OATPP_ASSERT(!logger->isLogPriorityEnabled(oatpp::base::DefaultLogger::PRIORITY_D))
OATPP_LOGV("LoggerTest", "Verbose Log");
OATPP_LOGD("LoggerTest", "Debug Log");
OATPP_LOGI("LoggerTest", "Info Log");
OATPP_LOGW("LoggerTest", "Warning Log");
OATPP_LOGE("LoggerTest", "Error Log");
OATPP_LOGI("LoggerTest", " --- Enabling Debug Log again");
logger->enablePriority(oatpp::base::DefaultLogger::PRIORITY_D);
OATPP_ASSERT(logger->isLogPriorityEnabled(oatpp::base::DefaultLogger::PRIORITY_D))
OATPP_LOGV("LoggerTest", "Verbose Log");
OATPP_LOGD("LoggerTest", "Debug Log");
OATPP_LOGI("LoggerTest", "Info Log");
OATPP_LOGW("LoggerTest", "Warning Log");
OATPP_LOGE("LoggerTest", "Error Log");
OATPP_LOGI(TESTCATEGORY, " --- Log-Test with category");
OATPP_LOGV(TESTCATEGORY, "Verbose Log");
OATPP_LOGD(TESTCATEGORY, "Debug Log");
OATPP_LOGI(TESTCATEGORY, "Info Log");
OATPP_LOGW(TESTCATEGORY, "Warning Log");
OATPP_LOGE(TESTCATEGORY, "Error Log");
OATPP_LOGI(TESTCATEGORY, " --- Disabling Debug Log for category");
TESTCATEGORY.disablePriority(oatpp::base::DefaultLogger::PRIORITY_D);
OATPP_ASSERT(!TESTCATEGORY.isLogPriorityEnabled(oatpp::base::DefaultLogger::PRIORITY_D))
OATPP_LOGV(TESTCATEGORY, "Verbose Log");
OATPP_LOGD(TESTCATEGORY, "Debug Log");
OATPP_LOGI(TESTCATEGORY, "Info Log");
OATPP_LOGW(TESTCATEGORY, "Warning Log");
OATPP_LOGE(TESTCATEGORY, "Error Log");
}
}}}
|
#!/bin/sh
# Tsubame p100
if ls *.tsubame3.0_p100_kokkos > /dev/null 2>&1; then
qsub -g jh200053 batch_scripts/sub_tsubame3.0_p100_kokkos.sh
elif ls *.tsubame3.0_p100_openacc > /dev/null 2>&1; then
qsub -g jh200053 batch_scripts/sub_tsubame3.0_p100_acc.sh
# Tsubame broadwell
elif ls *.tsubame3.0_bdw_kokkos > /dev/null 2>&1; then
qsub -g jh200053 batch_scripts/sub_tsubame3.0_bdw_kokkos.sh
elif ls *.tsubame3.0_bdw_openmp > /dev/null 2>&1; then
qsub -g jh200053 batch_scripts/sub_tsubame3.0_bdw_omp.sh
# M100 v100
elif ls *.m100_v100_kokkos > /dev/null 2>&1; then
sbatch batch_scripts/sub_m100_v100_kokkos.sh
elif ls *.m100_v100_openacc > /dev/null 2>&1; then
sbatch batch_scripts/sub_m100_v100_acc.sh
elif ls *.m100_v100_omp4.5 > /dev/null 2>&1; then
sbatch batch_scripts/sub_m100_v100_omp4.5.sh
elif ls *.m100_v100_stdpar > /dev/null 2>&1; then
sbatch batch_scripts/sub_m100_v100_stdpar.sh
# Wisteria A100
elif ls *.A100_Wisteria_kokkos > /dev/null 2>&1; then
pjsub batch_scripts/sub_Wisteria_A100_kokkos.sh
elif ls *WISTERIA_A100_openacc > /dev/null 2>&1; then
pjsub batch_scripts/sub_Wisteria_A100_acc.sh
elif ls *WISTERIA_A100_omp4.5 > /dev/null 2>&1; then
pjsub batch_scripts/sub_Wisteria_A100_omp4.5.sh
elif ls *WISTERIA_A100_stdpar > /dev/null 2>&1; then
pjsub batch_scripts/sub_Wisteria_A100_stdpar.sh
# Oakforest pacs
elif ls *.pacs_knl_openmp > /dev/null 2>&1; then
pjsub batch_scripts/sub_pacs_knl_omp.sh
# JFRS1 skx
elif ls *.jfrs1_skx_openmp > /dev/null 2>&1; then
sbatch batch_scripts/sub_jfrs1_skx_omp.sh
elif ls *.jfrs1_skx_kokkos > /dev/null 2>&1; then
sbatch batch_scripts/sub_jfrs1_skx_kokkos.sh
# Flow machine
elif ls *.flow_a64fx_kokkos > /dev/null 2>&1; then
pjsub batch_scripts/sub_flow_a64fx_kokkos.sh
elif ls *.flow_a64fx_openmp > /dev/null 2>&1; then
pjsub batch_scripts/sub_flow_a64fx_omp.sh
# Fugaku
elif ls *.fugaku_a64fx_kokkos > /dev/null 2>&1; then
pjsub batch_scripts/sub_fugaku_a64fx_kokkos.sh
elif ls *.fugaku_a64fx_openmp > /dev/null 2>&1; then
pjsub batch_scripts/sub_fugaku_a64fx_omp.sh
else
echo "No executable!"
fi
|
#!/bin/bash
set -e
TARGET_REPO="https://framagit.org/kresusapp/kresus"
TARGET_BRANCH="master"
git remote |
{
while read remote
do
if [ `git remote get-url $remote | grep -e $TARGET_REPO` ]
then
echo "Remote '$remote' already exists"
REMOTE_NAME=$remote
break;
fi
done
if [ "$REMOTE_NAME" == '' ]
then
echo "Creating remote 'upstream-kresus'"
git remote add upstream-kresus $TARGET_REPO
REMOTE_NAME='upstream-kresus'
fi
# Ensure the remote is up to date.
echo "Fetching '$REMOTE_NAME'"
git fetch $REMOTE_NAME
git rebase $REMOTE_NAME/$TARGET_BRANCH -x "git log -1 --oneline && yarn && yarn run check"
}
|
import { PipeTransform } from '@angular/core';
import * as ɵngcc0 from '@angular/core';
export declare class CountPipe implements PipeTransform {
transform(input: any): any;
static ɵfac: ɵngcc0.ɵɵFactoryDeclaration<CountPipe, never>;
static ɵpipe: ɵngcc0.ɵɵPipeDeclaration<CountPipe, "count">;
}
export declare class NgCountPipeModule {
static ɵfac: ɵngcc0.ɵɵFactoryDeclaration<NgCountPipeModule, never>;
static ɵmod: ɵngcc0.ɵɵNgModuleDeclaration<NgCountPipeModule, [typeof CountPipe], never, [typeof CountPipe]>;
static ɵinj: ɵngcc0.ɵɵInjectorDeclaration<NgCountPipeModule>;
}
//# sourceMappingURL=count.pipe.d.ts.map |
<filename>packages/sitecore-jss-vue/src/components/Link.test.ts
/* eslint-disable no-unused-expressions */
import { mount } from '@vue/test-utils';
import { generalLinkField as eeLinkData } from '../test/data/field-data-EE-on';
import { Link } from './Link';
describe('<Link />', () => {
it('should render nothing with missing field', () => {
// Need to mock console.error as Vue will log an error for the missing "field" prop
// that is marked as required.
const errorSpy = jest.spyOn(console, 'error');
errorSpy.mockImplementation(() => {});
const rendered = mount(Link);
expect(rendered.isEmpty()).toBe(true);
errorSpy.mockRestore();
});
it('should render nothing with missing editable and value', () => {
const props: { field: null } = {
field: null,
};
// Need to mock console.error as Vue will log an error for the null "field" prop
// that is marked as an Object.
const errorSpy = jest.spyOn(console, 'error');
errorSpy.mockImplementation(() => {});
const rendered = mount(Link, { context: { props } });
expect(rendered.isEmpty()).toBe(true);
errorSpy.mockRestore();
});
it('should render editable with an editable value', () => {
const props = {
field: {
editableFirstPart: '<a href="/services" class="yo">Lorem',
editableLastPart: '</a>',
},
};
const rendered = mount(Link, { context: { props } }).find('.sc-link-wrapper > a');
expect(rendered.html()).toContain(props.field.editableFirstPart);
});
it('should render value with editing explicitly disabled', () => {
const props = {
field: {
value: {
href: '/lorem',
text: 'ipsum',
},
editableFirstPart: '<a href="/services" class="yo">Lorem',
editableLastPart: '</a>',
},
editable: false,
};
const rendered = mount(Link, { context: { props } }).find('a');
expect(rendered.attributes().href).toBe(props.field.value.href);
expect(rendered.html()).toContain(props.field.value.text);
});
it('should render with href directly on provided field', () => {
const props = {
field: {
href: '/lorem',
text: 'ipsum',
},
};
const rendered = mount(Link, { context: { props } }).find('a');
expect(rendered.attributes().href).toBe(props.field.href);
expect(rendered.text()).toBe(props.field.text);
});
it('should render ee HTML', () => {
const props = {
field: {
editableFirstPart: eeLinkData,
editableLastPart: '</a>',
},
};
const rendered = mount(Link, { context: { props } }).find('span');
expect(rendered.html().indexOf('<input')).toBeGreaterThan(-1);
expect(rendered.html().indexOf('chrometype="field"')).toBeGreaterThan(-1);
});
it('should render all value attributes', () => {
const props = {
field: {
value: {
href: '/lorem',
text: 'ipsum',
class: 'my-link',
title: 'My Link',
target: '_blank',
},
},
};
const rendered = mount(Link, { context: { props } }).find('a');
const renderedAttrs = rendered.attributes();
// note: order of comparison is important for `toMatchObject` as renderedAttrs won't fully match props.field.value
expect(props.field.value).toMatchObject(renderedAttrs);
});
it('should render other attributes with other props provided', () => {
const props = {
field: {
value: {
href: '/lorem',
text: 'ipsum',
},
},
};
const attrs = {
id: 'my-link',
disabled: true,
};
const rendered = mount(Link, { context: { props, attrs } }).find('a');
const renderedAttrs = rendered.attributes();
expect(renderedAttrs.id).toBe(attrs.id);
expect(renderedAttrs.disabled).toBe('disabled');
});
it('should render other attributes on wrapper span with other props provided with editable', () => {
const props = {
field: {
editableFirstPart: '<a href="/services" class="yo">Lorem',
editableLastPart: '</a>',
},
};
const attrs = {
id: 'my-link',
};
const rendered = mount(Link, { context: { props, attrs } }).find('span.sc-link-wrapper');
expect(rendered.attributes().id).toBe(attrs.id);
});
});
|
package cn.zbx1425.resourcepackupdater;
import java.io.IOException;
import java.io.OutputStream;
public class ProgressOutputStream extends OutputStream {
public interface WriteListener {
void registerWrite(long amountOfBytesWritten);
}
private final OutputStream outstream;
private long bytesWritten = 0;
private final WriteListener writeListener;
public ProgressOutputStream(OutputStream outstream, WriteListener writeListener) {
this.outstream = outstream;
this.writeListener = writeListener;
}
@Override
public void write(int b) throws IOException {
outstream.write(b);
bytesWritten++;
writeListener.registerWrite(bytesWritten);
}
@Override
public void write(byte[] b) throws IOException {
outstream.write(b);
bytesWritten += b.length;
writeListener.registerWrite(bytesWritten);
}
@Override
public void write(byte[] b, int off, int len) throws IOException {
outstream.write(b, off, len);
bytesWritten += len;
writeListener.registerWrite(bytesWritten);
}
@Override
public void flush() throws IOException {
outstream.flush();
}
@Override
public void close() throws IOException {
outstream.close();
}
} |
<gh_stars>0
import * as React from "react";
import * as ReactModal from "react-modal";
import { IPlayPhase, ITrickFormat, IHands, TrickDrawPolicy } from "./types";
import Header from "./Header";
import Beeper from "./Beeper";
import Trump from "./Trump";
import Friends from "./Friends";
import Trick from "./Trick";
import Cards from "./Cards";
import Points, { calculatePoints } from "./Points";
import LabeledPlay from "./LabeledPlay";
import Players from "./Players";
import ArrayUtils from "./util/array";
import AutoPlayButton from "./AutoPlayButton";
import BeepButton from "./BeepButton";
import { WebsocketContext } from "./WebsocketProvider";
import WasmContext, {
IFoundViablePlay,
ISortedAndGroupedCards,
} from "./WasmContext";
import InlineCard from "./InlineCard";
const contentStyle: React.CSSProperties = {
position: "absolute",
top: "50%",
left: "50%",
transform: "translate(-50%, -50%)",
};
interface IProps {
playPhase: IPlayPhase;
name: string;
beepOnTurn: boolean;
showLastTrick: boolean;
unsetAutoPlayWhenWinnerChanges: boolean;
showTrickInPlayerOrder: boolean;
}
const Play = (props: IProps): JSX.Element => {
const { send } = React.useContext(WebsocketContext);
const [selected, setSelected] = React.useState<string[]>([]);
const [grouping, setGrouping] = React.useState<IFoundViablePlay[]>([]);
const {
findViablePlays,
canPlayCards,
nextThresholdReachable,
sortAndGroupCards,
} = React.useContext(WasmContext);
const playCards = (): void => {
send({ Action: { PlayCardsWithHint: [selected, grouping[0].grouping] } });
setSelected([]);
setGrouping([]);
};
const sendEvent = (event: {}) => () => send(event);
const takeBackCards = sendEvent({ Action: "TakeBackCards" });
const endTrick = sendEvent({ Action: "EndTrick" });
const endGameEarly = sendEvent({ Action: "EndGameEarly" });
const startNewGame = sendEvent({ Action: "StartNewGame" });
const { playPhase } = props;
// TODO: instead of telling who the player is by checking the name, pass in
// the Player object
let isSpectator = true;
let currentPlayer = playPhase.propagated.players.find(
(p) => p.name === props.name
);
if (currentPlayer === undefined) {
currentPlayer = playPhase.propagated.observers.find(
(p) => p.name === props.name
);
} else {
isSpectator = false;
}
if (currentPlayer === undefined) {
currentPlayer = {
id: -1,
name: props.name,
level: "",
metalevel: 0,
};
}
React.useEffect(() => {
// When the hands change, our `selected` cards may become invalid, since we
// could have raced and selected cards that we just played.
//
// In that case, let's fix the selected cards.
const hand =
currentPlayer.id in playPhase.hands.hands
? { ...playPhase.hands.hands[currentPlayer.id] }
: {};
selected.forEach((card) => {
if (card in hand) {
hand[card] = hand[card] - 1;
} else {
hand[card] = -1;
}
});
const toRemove = Object.entries(hand)
.filter((x) => x[1] < 0)
.map((x) => x[0]);
const newSelected = ArrayUtils.minus(selected, toRemove);
setSelected(newSelected);
setGrouping(findViablePlays(playPhase.trump, newSelected));
}, [playPhase.hands.hands, currentPlayer.id]);
const nextPlayer = playPhase.trick.player_queue[0];
const lastPlay =
playPhase.trick.played_cards[playPhase.trick.played_cards.length - 1];
const isCurrentPlayerTurn = currentPlayer.id === nextPlayer;
let canPlay = false;
if (!isSpectator) {
canPlay = canPlayCards({
trick: playPhase.trick,
id: currentPlayer.id,
hands: playPhase.hands,
cards: selected,
trick_draw_policy: playPhase.propagated.trick_draw_policy,
});
// In order to play the first trick, the grouping must be disambiguated!
if (lastPlay === undefined) {
canPlay = canPlay && grouping.length === 1;
}
}
canPlay = canPlay && !playPhase.game_ended_early;
const canTakeBack =
lastPlay !== undefined &&
currentPlayer.id === lastPlay.id &&
!playPhase.game_ended_early;
const shouldBeBeeping =
props.beepOnTurn && isCurrentPlayerTurn && !playPhase.game_ended_early;
const remainingCardsInHands = ArrayUtils.sum(
Object.values(playPhase.hands.hands).map((playerHand) =>
ArrayUtils.sum(Object.values(playerHand))
)
);
const { totalPointsPlayed, nonLandlordPointsWithPenalties } = calculatePoints(
playPhase.propagated.players,
playPhase.landlords_team,
playPhase.points,
playPhase.penalties
);
const noCardsLeft =
remainingCardsInHands === 0 && playPhase.trick.played_cards.length === 0;
const canFinish = noCardsLeft || playPhase.game_ended_early;
const canEndGameEarly =
!canFinish &&
!nextThresholdReachable({
decks: playPhase.decks,
params: playPhase.propagated.game_scoring_parameters,
non_landlord_points: nonLandlordPointsWithPenalties,
observed_points: totalPointsPlayed,
});
const landlordSuffix =
playPhase.propagated.landlord_emoji !== undefined &&
playPhase.propagated.landlord_emoji !== null &&
playPhase.propagated.landlord_emoji !== ""
? playPhase.propagated.landlord_emoji
: "(当庄)";
const landlordTeamSize = playPhase.landlords_team.length;
let configFriendTeamSize = 0;
let smallerTeamSize = false;
if (playPhase.game_mode !== "Tractor") {
configFriendTeamSize =
playPhase.game_mode.FindingFriends.num_friends != null
? playPhase.game_mode.FindingFriends.num_friends + 1
: playPhase.propagated.players.length / 2;
smallerTeamSize = landlordTeamSize < configFriendTeamSize;
}
const getCardsFromHand = (pid: number): ISortedAndGroupedCards[] => {
const cardsInHand =
pid in playPhase.hands.hands
? Object.entries(playPhase.hands.hands[pid]).flatMap(([c, ct]) =>
Array(ct).fill(c)
)
: [];
return sortAndGroupCards({
cards: cardsInHand,
trump: props.playPhase.trump,
});
};
return (
<div>
{shouldBeBeeping ? <Beeper /> : null}
<Header
gameMode={playPhase.propagated.game_mode}
chatLink={playPhase.propagated.chat_link}
/>
<Players
players={playPhase.propagated.players}
observers={playPhase.propagated.observers}
landlord={playPhase.landlord}
landlords_team={playPhase.landlords_team}
name={props.name}
next={nextPlayer}
/>
<Trump trump={playPhase.trump} />
<Friends gameMode={playPhase.game_mode} showPlayed={true} />
{playPhase.removed_cards.length > 0 ? (
<p>
Note:{" "}
{playPhase.removed_cards.map((c) => (
<InlineCard key={c} card={c} />
))}{" "}
have been removed from the deck
</p>
) : null}
<Trick
trick={playPhase.trick}
players={playPhase.propagated.players}
landlord={playPhase.landlord}
landlord_suffix={landlordSuffix}
landlords_team={playPhase.landlords_team}
next={nextPlayer}
name={props.name}
showTrickInPlayerOrder={props.showTrickInPlayerOrder}
/>
<AutoPlayButton
onSubmit={playCards}
playDescription={
grouping.length === 1 && lastPlay === undefined
? grouping[0].description
: null
}
canSubmit={canPlay}
currentWinner={playPhase.trick.current_winner}
unsetAutoPlayWhenWinnerChanges={props.unsetAutoPlayWhenWinnerChanges}
isCurrentPlayerTurn={isCurrentPlayerTurn}
/>
{playPhase.propagated.play_takeback_policy === "AllowPlayTakeback" && (
<button onClick={takeBackCards} disabled={!canTakeBack}>
Take back last play
</button>
)}
<button
onClick={endTrick}
disabled={
playPhase.trick.player_queue.length > 0 || playPhase.game_ended_early
}
>
Finish trick
</button>
{canEndGameEarly && (
<button
onClick={() => {
confirm(
"Do you want to end the game early? There may still be points in the bottom..."
) && endGameEarly();
}}
>
End game early
</button>
)}
{canFinish && <button onClick={startNewGame}>Finish game</button>}
<BeepButton />
{canFinish && !noCardsLeft && (
<div>
<p>Cards remaining (that were not played):</p>
{playPhase.propagated.players.map((p) => (
<LabeledPlay
key={p.id}
label={p.name}
cards={getCardsFromHand(p.id).flatMap((g) => g.cards)}
/>
))}
</div>
)}
{!canFinish && (
<>
{playPhase.trick.trick_format !== null &&
!isSpectator &&
playPhase.trick.player_queue.includes(currentPlayer.id) ? (
<TrickFormatHelper
format={playPhase.trick.trick_format}
hands={playPhase.hands}
playerId={currentPlayer.id}
trickDrawPolicy={playPhase.propagated.trick_draw_policy}
/>
) : null}
{lastPlay === undefined &&
isCurrentPlayerTurn &&
grouping.length > 1 && (
<div>
<p>
It looks like you are making a play that can be interpreted in
multiple ways!
</p>
<p>Which of the following did you mean?</p>
{grouping.map((g, gidx) => (
<button
key={gidx}
onClick={(evt) => {
evt.preventDefault();
setGrouping([g]);
}}
className="normal"
>
{g.description}
</button>
))}
</div>
)}
<Cards
hands={playPhase.hands}
playerId={currentPlayer.id}
trump={playPhase.trump}
selectedCards={selected}
onSelect={(newSelected) => {
setSelected(newSelected);
setGrouping(findViablePlays(playPhase.trump, newSelected));
}}
notifyEmpty={isCurrentPlayerTurn}
/>
</>
)}
{playPhase.last_trick !== undefined &&
playPhase.last_trick !== null &&
props.showLastTrick ? (
<div>
<p>Previous trick</p>
<Trick
trick={playPhase.last_trick}
players={playPhase.propagated.players}
landlord={playPhase.landlord}
landlord_suffix={landlordSuffix}
landlords_team={playPhase.landlords_team}
name={props.name}
showTrickInPlayerOrder={props.showTrickInPlayerOrder}
/>
</div>
) : null}
<Points
points={playPhase.points}
penalties={playPhase.penalties}
decks={playPhase.decks}
players={playPhase.propagated.players}
landlordTeam={playPhase.landlords_team}
landlord={playPhase.landlord}
hideLandlordPoints={playPhase.propagated.hide_landlord_points}
gameScoringParameters={playPhase.propagated.game_scoring_parameters}
smallerTeamSize={smallerTeamSize}
/>
<LabeledPlay className="kitty" cards={playPhase.kitty} label="底牌" />
</div>
);
};
const TrickFormatHelper = (props: {
format: ITrickFormat;
hands: IHands;
playerId: number;
trickDrawPolicy: TrickDrawPolicy;
}): JSX.Element => {
const [modalOpen, setModalOpen] = React.useState<boolean>(false);
const { decomposeTrickFormat } = React.useContext(WasmContext);
const decomp = decomposeTrickFormat({
trick_format: props.format,
hands: props.hands,
player_id: props.playerId,
trick_draw_policy: props.trickDrawPolicy,
});
const trickSuit = props.format.suit;
const bestMatch = decomp.findIndex((d) => d.playable.length > 0);
return (
<>
<button
onClick={(evt) => {
evt.preventDefault();
setModalOpen(true);
}}
>
?
</button>
<ReactModal
isOpen={modalOpen}
onRequestClose={() => setModalOpen(false)}
shouldCloseOnOverlayClick
shouldCloseOnEsc
style={{ content: contentStyle }}
>
<p>
In order to win, you have to play {decomp[0].description} in{" "}
{trickSuit}
</p>
{decomp[0].playable.length > 0 && (
<p>
It looks like you are able to match this format, e.g. with
{decomp[0].playable.map((c, cidx) => (
<InlineCard key={cidx} card={c} />
))}
</p>
)}
{decomp.length > 1 && props.trickDrawPolicy !== "NoFormatBasedDraw" && (
<>
<p>
If you can't play that, but you <em>can</em> play one of the
following, you have to play it
</p>
<ol>
{decomp.slice(1).map((d, idx) => (
<li
key={idx}
style={{
fontWeight: idx === bestMatch - 1 ? "bold" : "normal",
}}
>
{d.description} in {trickSuit}
{idx === bestMatch - 1 && (
<>
{" "}
(for example:{" "}
{d.playable.map((c, cidx) => (
<InlineCard key={cidx} card={c} />
))}
)
</>
)}
</li>
))}
</ol>
</>
)}
<p>
Otherwise, you have to play as many {trickSuit} as you can. The
remaining cards can be anything.
</p>
{trickSuit !== "Trump" && (
<p>
If you have no cards in {trickSuit}, you can play{" "}
{decomp[0].description} in Trump to potentially win the trick.
</p>
)}
</ReactModal>
</>
);
};
export default Play;
|
def is_anagram(str1, str2):
# Defining a dict to store the character count
char_map = {}
# Iterating over the characters of first string
for char in str1:
if char in char_map.keys():
# Incrementing character count
char_map[char] += 1
else:
char_map[char] = 1
# Iterating over the characters of second string
for char in str2:
if char in char_map.keys():
# Incrementing character count
char_map[char] -= 1
else:
char_map[char] = 1
# Iterating over the character map
for count in char_map.values():
if count != 0:
# Return false if all values are not 0
return False
# Otherwise, return true
return True
# Testcase
str1 = "army"
str2 = "mary"
# Function call
is_anagram(str1, str2)
# Output
True |
#!/bin/sh
### BEGIN INIT INFO
# Provides: reparaService
### END INIT INFO
DAEMON=/home/repara/ScopeControlService/scopeControlService
DAEMONARGS=" -c /home/repara/ScopeControlService/scopeControlService.cfg"
. /lib/lsb/init-functions
start_reparaService_daemon() {
start-stop-daemon --start --quiet --exec $DAEMON --$DAEMONARGS
}
stop_reparaService_daemon() {
start-stop-daemon --stop --quiet --signal TERM --oknodo --exec $DAEMON
}
case "$1" in
start)
log_daemon_msg "Starting distributed compiler daemon" "reparaService"
start_reparaService_daemon
log_end_msg $?
;;
stop)
log_daemon_msg "Stopping distributed compiler daemon" "reparaService"
stop_reparaService_daemon
log_end_msg $?
;;
restart|force-reload)
log_daemon_msg "Restarting distributed compiler daemon" "reparaService"
stop_reparaService_daemon
sleep 1
start_reparaService_daemon
log_end_msg $?
;;
status)
status_of_proc "$DAEMON" "reparaService" && exit 0 || exit $?
;;
*)
N=/etc/init.d/reparaService
echo "Usage: $N {start|stop|restart|force-reload|status}" >&2
exit 1
;;
esac
exit 0
|
<reponame>dvinubius/meta-multisig
import { List, Spin } from 'antd';
import React, { FC, useContext, useState } from 'react';
import './MultiSig.css';
import { MsVaultContext } from './MultiSig';
import { CheckCircleOutlined } from '@ant-design/icons';
import { primaryColor } from '~~/styles/styles';
import { Address } from '~~/eth-components/ant/Address';
import OwnerMark from '../Shared/OwnerMark';
import { useEthersContext } from 'eth-hooks/context';
export interface IOwnersProps {
confirmations?: boolean[];
}
const Owners: FC<IOwnersProps> = (props) => {
const { account } = useEthersContext();
const { owners } = useContext(MsVaultContext);
const singleColumn = (
<>
{owners && owners.length && (
<>
<div>
<List size="small">
{owners.map((owner) => (
<List.Item key={owner} style={{ padding: '0.25rem 2rem', display: 'flex', justifyContent: 'center' }}>
<div style={{ position: 'relative' }}>
<Address address={owner} fontSize={14} />
<div style={{ position: 'absolute', right: '-2rem', top: 0 }}>
{owner === account ? <OwnerMark /> : ''}
</div>
</div>
</List.Item>
))}
</List>
</div>
</>
)}
</>
);
const twoColumns = (
<>
{owners && owners.length && props.confirmations && (
<>
<div>
<List size="small">
{owners.map((owner, idx) => (
<List.Item key={owner} style={{ padding: '0.25rem 2rem', display: 'flex', justifyContent: 'center' }}>
<div style={{ display: 'flex', justifyContent: 'space-between', width: '100%' }}>
<div style={{ display: 'flex', gap: '1rem' }}>
<Address address={owner} fontSize={14} />
{owner === account ? <OwnerMark /> : ''}
</div>
<div style={{ opacity: props.confirmations && props.confirmations[idx] ? 1 : 0.5 }}>
{
<CheckCircleOutlined
style={{ color: props.confirmations && props.confirmations[idx] ? primaryColor : '#bebebe' }}
/>
}
</div>
</div>
</List.Item>
))}
</List>
</div>
</>
)}
</>
);
return (
<div style={{ display: 'flex', flexDirection: 'column' }} className="OwnersCard">
{(!owners || !owners.length) && (
<div style={{ height: '8rem', display: 'flex', alignItems: 'center' }}>
<Spin></Spin>
</div>
)}
<div
style={{
overflowY: 'auto',
display: 'flex',
alignItems: 'stretch',
flexDirection: 'column',
}}>
{props.confirmations ? twoColumns : singleColumn}
</div>
</div>
);
};
export default Owners;
|
<filename>script/preprocess_backup_v2.py
#!/usr/bin/env python3
# Copyright 2017-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
"""Preprocess the SQuAD dataset for training."""
import sys
sys.path.append('.')
import argparse
import os
try:
import ujson as json
except ImportError:
import json
import time
from multiprocessing import Pool, cpu_count
from multiprocessing.util import Finalize
from functools import partial
from spacy_tokenizer import SpacyTokenizer
import nltk
# ------------------------------------------------------------------------------
# Tokenize + annotate.
# ------------------------------------------------------------------------------
TOK = None
ANNTOTORS = {'lemma', 'pos', 'ner'}
def init():
global TOK
TOK = SpacyTokenizer(annotators=ANNTOTORS)
Finalize(TOK, TOK.shutdown, exitpriority=100)
def tokenize(text):
"""Call the global process tokenizer on the input text."""
global TOK
tokens = TOK.tokenize(text)
output = {
'words': tokens.words(),
'chars': tokens.chars(),
'offsets': tokens.offsets(),
'pos': tokens.pos(),
'lemma': tokens.lemmas(),
'ner': tokens.entities(),
}
return output
# ------------------------------------------------------------------------------
# Process dataset examples
# ------------------------------------------------------------------------------
def load_dataset(path):
"""Load json file and store fields separately."""
with open(path) as f:
data = json.load(f)['data']
output = {'qids': [], 'questions': [], 'answers': [],
'contexts': [], 'qid2cid': [], 'sentences': []}
for article in data:
for paragraph in article['paragraphs']:
output['contexts'].append(paragraph['context'])
context=paragraph['context']
sentenceList=nltk.sent_tokenize(context)
output['sentences'].append(sentenceList)
for qa in paragraph['qas']:
output['qids'].append(qa['id'])
output['questions'].append(qa['question'])
output['qid2cid'].append(len(output['contexts']) - 1)
if 'answers' in qa:
output['answers'].append(qa['answers'])
return output
def find_answer(offsets, begin_offset, end_offset):
"""Match token offsets with the char begin/end offsets of the answer."""
start = [i for i, tok in enumerate(offsets) if tok[0] == begin_offset]
end = [i for i, tok in enumerate(offsets) if tok[1] == end_offset]
assert(len(start) <= 1)
assert(len(end) <= 1)
if len(start) == 1 and len(end) == 1:
return start[0], end[0]
def process_dataset(data, tokenizer, workers=None):
"""Iterate processing (tokenize, parse, etc) dataset multithreaded."""
make_pool = partial(Pool, workers, initializer=init)
workers = make_pool(initargs=())
q_tokens = workers.map(tokenize, data['questions'])
workers.close()
workers.join()
workers = make_pool(initargs=())
c_tokens = workers.map(tokenize, data['contexts'])
workers.close()
workers.join()
workers = make_pool(initargs=())
s_tokensList = []
for sent in data['sentences']:
s_tokens = workers.map(tokenize, sent)
s_tokensList.append(s_tokens)
workers.close()
workers.join()
for idx in range(len(data['qids'])):
question = q_tokens[idx]['words']
question_char = q_tokens[idx]['chars']
qlemma = q_tokens[idx]['lemma']
qpos = q_tokens[idx]['pos']
qner = q_tokens[idx]['ner']
document = c_tokens[data['qid2cid'][idx]]['words']
document_char = c_tokens[data['qid2cid'][idx]]['chars']
offsets = c_tokens[data['qid2cid'][idx]]['offsets']
clemma = c_tokens[data['qid2cid'][idx]]['lemma']
cpos = c_tokens[data['qid2cid'][idx]]['pos']
cner = c_tokens[data['qid2cid'][idx]]['ner']
ans_tokens = []
if len(data['answers']) > 0:
for ans in data['answers'][idx]:
found = find_answer(offsets,
ans['answer_start'],
ans['answer_start'] + len(ans['text']))
if found:
ans_tokens.append(found)
senIdxList = []
senBeginIndex =0
for sen in range(len(s_tokensList[data['qid2cid'][idx]])):
senTokenList = s_tokensList[data['qid2cid'][idx]][sen]['words']
senEndIndex = senBeginIndex+len(senTokenList)
ansSenIdx = [senBeginIndex,senEndIndex]
senIdxList.append(ansSenIdx)
senBeginIndex += len(senTokenList)
ansSenIdxList = []
for i in range(len(ans_tokens)):
ans_begin_idx, ans_end_idx = ans_tokens[i]
for j in range(len(senIdxList)):
senBeginIndex, senEndIndex = senIdxList[j]
if(senBeginIndex <= ans_begin_idx and senEndIndex > ans_end_idx):
ansSenIdxList.append(j)
yield {
'id': data['qids'][idx],
'question': question,
'question_char': question_char,
'document': document,
'document_char': document_char,
'offsets': offsets,
'answers': ans_tokens,
'qlemma': qlemma,
'qpos': qpos,
'qner': qner,
'clemma': clemma,
'cpos': cpos,
'cner': cner,
'document_sentence': senIdxList,
'sentence_answers': ansSenIdxList,
}
# -----------------------------------------------------------------------------
# Commandline options
# -----------------------------------------------------------------------------
parser = argparse.ArgumentParser()
parser.add_argument('data_dir', type=str, help='Path to SQuAD data directory')
parser.add_argument('out_dir', type=str, help='Path to output file dir')
parser.add_argument('--split', type=str, help='Filename for train/dev split')
parser.add_argument('--num-workers', type=int, default=1)
parser.add_argument('--tokenizer', type=str, default='spacy')
args = parser.parse_args()
t0 = time.time()
in_file = os.path.join(args.data_dir, args.split + '.json')
print('Loading dataset %s' % in_file, file=sys.stderr)
dataset = load_dataset(in_file)
out_file = os.path.join(
args.out_dir, '%s-processed-%s.txt' % (args.split, args.tokenizer)
)
print('Will write to file %s' % out_file, file=sys.stderr)
with open(out_file, 'w') as f:
for ex in process_dataset(dataset, args.tokenizer, args.num_workers):
f.write(json.dumps(ex) + '\n')
print('Total time: %.4f (s)' % (time.time() - t0))
|
package com.example.entity;
import javax.persistence.*;
import java.io.Serializable;
import java.util.HashSet;
import java.util.Set;
@Entity
//@JsonIgnoreProperties("authors")
public class Book implements Serializable
{
private static final long serialVersionUID = 1L;
@Id
@GeneratedValue
private Integer id;
private String name;
@ManyToMany(cascade = CascadeType.ALL)
@JoinTable(name = "BOOK_AUTHOR", joinColumns = {
@JoinColumn(name = "BOOK_ID", referencedColumnName = "ID") }, inverseJoinColumns = {
@JoinColumn(name = "AUTHOR_ID", referencedColumnName = "ID") })
private Set<Author> authors;
public Book()
{
super();
}
public Book(String name)
{
super();
this.name = name;
this.authors = new HashSet<>();
}
public Book(String name, Set<Author> authors)
{
super();
this.name = name;
this.authors = authors;
}
public Integer getId()
{
return id;
}
public void setId(Integer id)
{
this.id = id;
}
public String getName()
{
return name;
}
public void setName(String name)
{
this.name = name;
}
public Set<Author> getAuthors()
{
return authors;
}
public void setAuthors(Set<Author> authors)
{
this.authors = authors;
}
@Override
public String toString()
{
return String.format("Book [id=%s, name=%s, authors=%s]", id, name, authors);
}
}
|
<reponame>amogilev/yagson<gh_stars>1-10
/*
* Copyright (C) 2017 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.gilecode.yagson.tests;
import com.gilecode.yagson.tests.util.BindingTestCase;
import com.gilecode.yagson.tests.util.EqualityCheckMode;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Supplier;
/**
* Test for serializable.
*
* @author <NAME>
*/
public class TestSerializableLambdas extends BindingTestCase {
public void testSerializableLambda1() throws Exception {
Supplier s1 = (Supplier & Serializable) () -> "foo";
// NOTE: actual JSON is fragile and may change on test changes
Supplier s2 = test(s1, jsonStr("{'@type':'java.lang.invoke.SerializedLambda','@val':{" +
"'capturingClass':'com.gilecode.yagson.tests.TestSerializableLambdas'," +
"'functionalInterfaceClass':'java/util/function/Supplier','functionalInterfaceMethodName':'get'," +
"'functionalInterfaceMethodSignature':'()Ljava/lang/Object;'," +
"'implClass':'com/gilecode/yagson/tests/TestSerializableLambdas'," +
"'implMethodName':'lambda$testSerializableLambda1$c9303e5c$1'," +
"'implMethodSignature':'()Ljava/lang/Object;'," +
"'implMethodKind':6,'instantiatedMethodType':'()Ljava/lang/Object;'," +
"'capturedArgs':[]}}"),
EqualityCheckMode.NONE);
assertEquals("foo", s2.get());
}
public void testSerializableLambda2() throws Exception {
AtomicInteger cnt = new AtomicInteger(1);
Supplier s1 = (Supplier & Serializable) () -> "foo" + cnt;
Supplier s2 = test(s1, null, EqualityCheckMode.NONE);
assertEquals("foo1", s2.get());
// however, dynamic changes of counter are not supported:
cnt.set(2);
assertEquals("foo2", s1.get());
assertEquals("foo1", s2.get());
}
public void testSerializableLambda3() throws Exception {
Supplier<ArrayList> s1 = (Supplier & Serializable) ArrayList::new;
Supplier<ArrayList> s2 = test(s1, null, EqualityCheckMode.NONE);
ArrayList list1 = s2.get();
ArrayList list2 = s2.get();
assertFalse(list1 == list2);
assertNotNull(list1);
assertNotNull(list2);
}
}
|
<reponame>ch1huizong/learning
class TraceNormal(object):
' state for normal level of verbosity '
def startMessage(self):
self.nstr = self.characters = 0
def emitString(self, s):
self.nstr += 1
self.characters += len(s)
def endMessage(self):
print '%d characters in %d strings' % (self.characters, self.nstr)
class TraceChatty(object):
' state for high level of verbosity '
def startMessage(self):
self.msg = []
def emitString(self, s):
self.msg.append(repr(s))
def endMessage(self):
print 'Message: ', ', '.join(self.msg)
class TraceQuiet(object):
' state for zero level of verbosity '
def startMessage(self): pass
def emitString(self, s): pass
def endMessage(self): pass
class Tracer(object):
def __init__(self, state): self.state = state
def setState(self, state): self.state = state
def emitStrings(self, strings):
self.state.startMessage()
for s in strings: self.state.emitString(s)
self.state.endMessage()
if __name__ == '__main__':
t = Tracer(TraceNormal())
t.emitStrings('some example strings here'.split())
# emits: 21 characters in 4 strings
t.setState(TraceQuiet())
t.emitStrings('some example strings here'.split())
# emits nothing
t.setState(TraceChatty())
t.emitStrings('some example strings here'.split())
# emits: Message: 'some', 'example', 'strings', 'here'
|
#!/bin/bash
export DEBIAN_FRONTEND=noninteractive
sudo apt-get update
sudo apt-get install -y \
libgl1-mesa-glx \
libgl1-mesa-dev \
libglapi-mesa \
libosmesa6-dev \
libxt-dev
|
<gh_stars>0
package sec.project;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.cache.annotation.EnableCaching;
@EnableCaching
@SpringBootApplication
public class CyberSecurityBaseProjectApplication {
public static void main(String[] args) throws Throwable {
SpringApplication.run(CyberSecurityBaseProjectApplication.class);
}
}
|
/* **** Notes
Initialise
Remarks:
Refer at fn. cli_init_property.
*/
# define CLI_SYM
# define CAR
# include <stdio.h>
# include "./../../../incl/config.h"
signed(__cdecl cli_init_text(signed(arg),cli_text_t(*argp))) {
auto signed char *b;
auto signed i,r;
auto signed char CUE_SYM[] = {
SYM_EXCLAMATION_MARK,
SYM_QUOTATION_MARK,
SYM_NUMBER_SIGN,
SYM_DOLLAR_SIGN,
SYM_PERCENT_SIGN,
SYM_AMPERSAND,
SYM_APOSTROPHE,
SYM_LEFT_PARENTHESIS,
SYM_RIGHT_PARENTHESIS,
SYM_ASTERISK,
SYM_PLUS_SIGN,
SYM_COMMA,
SYM_HYPHEN_MINUS,
SYM_FULL_STOP,
SYM_SOLIDUS,
SYM_COLON,
SYM_SEMICOLON,
SYM_LESS_THAN_SIGN,
SYM_EQUALS_SIGN,
SYM_GREATER_THAN_SIGN,
SYM_QUESTION_MARK,
SYM_COMMERCIAL_AT,
SYM_LEFT_SQUARE_BRACKET,
SYM_REVERSE_SOLIDUS,
SYM_RIGHT_SQUARE_BRACKET,
SYM_CIRCUMFLEX_ACCENT,
SYM_LOW_LINE,
SYM_GRAVE_ACCENT,
SYM_LEFT_CURLY_BRACKET,
SYM_VERTICAL_LINE,
SYM_RIGHT_CURLY_BRACKET,
SYM_TILDE,
0x00,
};
if(!argp) return(0x00);
// initialise
if(!(CLI_INIT&(R(flag,*argp)))) AND(arg,0x00);
AND(i,0x00);
OR(i,CLI_OBJS);
if(!arg) {
r = ct(CUE_SYM);
if(!r) return(0x00);
r++;
r = (r*(sizeof(*CUE_SYM)));
b = (signed char(*)) alloc(r);
if(!b) return(0x00);
r = cpy(b,CUE_SYM);
if(!r) return(0x00);
}
else {
b = (*(CLI_BASE+(R(sym,*argp))));
embed(0x00,b);
if(b) rl(b);
b = (0x00);
}
while(i) *(--i+(R(sym,*argp))) = (b);
OR(i,CLI_OBJS);
while(i) *(--i+(R(cf,*argp))) = (0x00);
OR(i,CLI_OBJS);
while(i) {
r = cli_init_rule(arg,--i+(R(rule,*argp)));
if(!r) {
printf("%s%d%s \n","<< Error at fn. cli_init_rule(arg,",i,"+(R(rule,*argp)))");
return(0x00);
}}
AND(R(width,*argp),0x00);
AND(R(height,*argp),0x00);
AND(R(flag,*argp),0x00);
AND(R(attribute,*argp),0x00);
R(eol,*argp) = (CR|LF);
R(align,*argp) = (0x08);
R(optl,*argp) = (0x00);
if(!arg) OR(R(flag,*argp),CLI_INIT);
if(b) b = (0x00);
return(0x01);
}
|
<reponame>thenativeweb/dot-file<filename>src/dotFile.js
'use strict';
const fs = require('fs'),
os = require('os'),
path = require('path');
const promisify = require('util.promisify');
const readFile = promisify(fs.readFile),
writeFile = promisify(fs.writeFile);
const getFile = function (filename) {
if (!filename) {
throw new Error('File name is missing.');
}
return path.join(os.homedir(), filename);
};
const dotFile = {
async read (filename) {
if (!filename) {
throw new Error('File name is missing.');
}
if (!filename.startsWith('.')) {
throw new Error('File name does not start with a dot.');
}
const file = getFile(filename);
let data;
try {
data = await readFile(file, { encoding: 'utf8' });
} catch (ex) {
if (ex.code === 'ENOENT') {
// If the dot file does not exist, return an empty object anyway.
return {};
}
throw ex;
}
const json = JSON.parse(data);
return json;
},
async write (filename, json) {
if (!filename) {
throw new Error('File name is missing.');
}
if (!json) {
throw new Error('JSON is missing.');
}
if (!filename.startsWith('.')) {
throw new Error('File name does not start with a dot.');
}
const file = getFile(filename);
const data = JSON.stringify(json);
await writeFile(file, data, { encoding: 'utf8' });
}
};
module.exports = dotFile;
|
const express = require("express")
const app = express();
const PORT = process.env.PORT || 3000;
const chatbot = require("./chatbot.js");
app.get("/", (req, res) => {
res.send("Hello, world!");
});
app.post("/api/chatbot", (req, res) => {
const response = chatbot.getResponse(req.body.userInput);
res.send(response);
});
app.listen(PORT, () => {
console.log(`Chatbot is running on port ${PORT}`);
}); |
def interest(balance, rate)
interest = 0
for year in 1..3
interest += balance * rate
end
return interest
end
balance = 1000
rate = 0.05
puts "Interest over 3 years: $#{interest(balance,rate).round(2)}" |
#!/bin/bash
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
set -e
cd "$(dirname "$0")"
#run all the python steps in a background process
time superset db upgrade
time superset load_test_users
time superset load_examples --load-test-data
time superset init
echo "[completed python build steps]"
flask run -p 8081 --with-threads --reload --debugger &
#block on the longer running javascript process
time npm ci
time npm run install-cypress
time npm run build
echo "[completed js build steps]"
CYPRESS_PATH='cypress/integration/'${1}'/*'
time npm run cypress run -- --spec "$CYPRESS_PATH" --record false --config video=false
kill %1
|
#!/bin/bash
# re-initialize writable dataset
initialize_dataset "$BASE_URL_WRITABLE" "../dataset.trig" "$ENDPOINT_URL_WRITABLE"
# delete default graph
curl -w "%{http_code}\n" -f -s -G \
-X DELETE \
"${BASE_URL_WRITABLE}service" \
--data-urlencode "default=true" \
| grep -q "${STATUS_NO_CONTENT}"
curl -w "%{http_code}\n" -f -s -G \
-H "Accept: application/n-triples" \
"${BASE_URL_WRITABLE}service" \
--data-urlencode "default=true" \
| grep -q "${STATUS_OK}" |
#! /bin/bash
PORT=3001 SERVERURI="http://localhost:3001" VIDEOFOLDER="$PWD/test/video/" node server |
<filename>src/heap/store-buffer-inl.h
// Copyright 2011 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_HEAP_STORE_BUFFER_INL_H_
#define V8_HEAP_STORE_BUFFER_INL_H_
#include "src/heap/store-buffer.h"
#include "src/heap/heap-inl.h"
namespace v8 {
namespace internal {
void StoreBuffer::InsertIntoStoreBuffer(Address slot) {
if (top_ + sizeof(Address) > limit_[current_]) {
StoreBufferOverflow(heap_->isolate());
}
*top_ = slot;
top_++;
}
} // namespace internal
} // namespace v8
#endif // V8_HEAP_STORE_BUFFER_INL_H_
|
package com.ruoyi.project.system.spdeptXm.service;
import java.util.List;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import com.ruoyi.common.constant.UserConstants;
import com.ruoyi.common.support.Convert;
import com.ruoyi.common.utils.StringUtils;
import com.ruoyi.common.utils.security.ShiroUtils;
import com.ruoyi.project.system.spdeptXm.domain.SpdeptXm;
import com.ruoyi.project.system.spdeptXm.mapper.SpdeptXmMapper;
/**
* 特殊科室操作项目 服务层实现
*
* @author panda
* @date 2018-12-18
*/
@Service
public class SpdeptXmServiceImpl implements ISpdeptXmService
{
@Autowired
private SpdeptXmMapper spdeptXmMapper;
/**
* 查询特殊科室操作项目信息
*
* @param id 特殊科室操作项目ID
* @return 特殊科室操作项目信息
*/
@Override
public SpdeptXm selectSpdeptXmById(Integer id)
{
return spdeptXmMapper.selectSpdeptXmById(id);
}
/**
* 查询特殊科室操作项目列表
*
* @param spdeptXm 特殊科室操作项目信息
* @return 特殊科室操作项目集合
*/
@Override
public List<SpdeptXm> selectSpdeptXmList(SpdeptXm spdeptXm)
{
return spdeptXmMapper.selectSpdeptXmList(spdeptXm);
}
/**
* 新增特殊科室操作项目
*
* @param spdeptXm 特殊科室操作项目信息
* @return 结果
*/
@Override
public int insertSpdeptXm(SpdeptXm spdeptXm)
{
spdeptXm.setCreateBy(ShiroUtils.getLoginName());
return spdeptXmMapper.insertSpdeptXm(spdeptXm);
}
/**
* 修改特殊科室操作项目
*
* @param spdeptXm 特殊科室操作项目信息
* @return 结果
*/
@Override
public int updateSpdeptXm(SpdeptXm spdeptXm)
{
return spdeptXmMapper.updateSpdeptXm(spdeptXm);
}
/**
* 删除特殊科室操作项目对象
*
* @param ids 需要删除的数据ID
* @return 结果
*/
@Override
public int deleteSpdeptXmByIds(String ids)
{
return spdeptXmMapper.deleteSpdeptXmByIds(Convert.toStrArray(ids));
}
/**
* 校验项目名称是否唯一
*
* @param spdeptXm 项目信息
* @return 结果
*/
@Override
public String checkSpdeptXmNameUnique(SpdeptXm spdeptXm) {
Long xmId = StringUtils.isNull(spdeptXm.getId()) ? -1L : spdeptXm.getId();
SpdeptXm info = spdeptXmMapper.checkSpdeptXmNameUnique(spdeptXm.getXmname());
if (StringUtils.isNotNull(info) && info.getId().longValue() != xmId.longValue())
{
return UserConstants.SPDEPTXM_NAME_NOT_UNIQUE;
}
return UserConstants.SPDEPTXM_NAME_UNIQUE;
}
/**
* 校验项目编码是否唯一
*
* @param spdeptXm 项目信息
* @return 结果
*/
@Override
public String checkSpdeptXmCodeUnique(SpdeptXm spdeptXm) {
Long xmId = StringUtils.isNull(spdeptXm.getId()) ? -1L : spdeptXm.getId();
SpdeptXm info = spdeptXmMapper.checkSpdeptXmCodeUnique(spdeptXm.getXmcode());
if (StringUtils.isNotNull(info) && info.getId().longValue() != xmId.longValue())
{
return UserConstants.SPDEPTXM_CODE_NOT_UNIQUE;
}
return UserConstants.SPDEPTXM_CODE_UNIQUE;
}
/***/
@Override
public String checkSpdeptXmCodeList(String xmcode) {
// TODO Auto-generated method stub
return null;
}
}
|
#import <UIKit/UIKit.h>
#import <CoreData/CoreData.h>
@interface CCLEntityManagedObjectListViewController : UITableViewController
- (instancetype)initWithManagedObjectContext:(NSManagedObjectContext *)managedObjectContext;
@property (nonatomic, strong, readonly) NSManagedObjectContext *managedObjectContext;
@property (nonatomic, strong) NSEntityDescription *entity;
@property (nonatomic, strong) NSPredicate *predicate;
@end
|
#!/bin/sh
# Copyright (c) 2014-2015 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
export LC_ALL=C
set -e
ROOTDIR=dist
BUNDLE="${ROOTDIR}/Agecoin-Qt.app"
CODESIGN=codesign
TEMPDIR=sign.temp
TEMPLIST=${TEMPDIR}/signatures.txt
OUT=signature-osx.tar.gz
OUTROOT=osx
if [ ! -n "$1" ]; then
echo "usage: $0 <codesign args>"
echo "example: $0 -s MyIdentity"
exit 1
fi
rm -rf ${TEMPDIR} ${TEMPLIST}
mkdir -p ${TEMPDIR}
${CODESIGN} -f --file-list ${TEMPLIST} "$@" "${BUNDLE}"
grep -v CodeResources < "${TEMPLIST}" | while read i; do
TARGETFILE="${BUNDLE}/`echo "${i}" | sed "s|.*${BUNDLE}/||"`"
SIZE=`pagestuff "$i" -p | tail -2 | grep size | sed 's/[^0-9]*//g'`
OFFSET=`pagestuff "$i" -p | tail -2 | grep offset | sed 's/[^0-9]*//g'`
SIGNFILE="${TEMPDIR}/${OUTROOT}/${TARGETFILE}.sign"
DIRNAME="`dirname "${SIGNFILE}"`"
mkdir -p "${DIRNAME}"
echo "Adding detached signature for: ${TARGETFILE}. Size: ${SIZE}. Offset: ${OFFSET}"
dd if="$i" of="${SIGNFILE}" bs=1 skip=${OFFSET} count=${SIZE} 2>/dev/null
done
grep CodeResources < "${TEMPLIST}" | while read i; do
TARGETFILE="${BUNDLE}/`echo "${i}" | sed "s|.*${BUNDLE}/||"`"
RESOURCE="${TEMPDIR}/${OUTROOT}/${TARGETFILE}"
DIRNAME="`dirname "${RESOURCE}"`"
mkdir -p "${DIRNAME}"
echo "Adding resource for: \"${TARGETFILE}\""
cp "${i}" "${RESOURCE}"
done
rm ${TEMPLIST}
tar -C "${TEMPDIR}" -czf "${OUT}" .
rm -rf "${TEMPDIR}"
echo "Created ${OUT}"
|
SELECT *
FROM table
ORDER BY added_date DESC
LIMIT 10; |
#!/bin/bash
if [ $# -eq 0 ]
then
echo "use: $0 ip_address"
exit 0;
fi
mkdir -p screenshots/telnet/raw/
mkdir -p screenshots/telnet/jpg/
mkdir -p screenshots/telnet/text/
echo "screenshooting $1"
CMD="telnet $1 2>&1 | tee screenshots/telnet/text/$1.txt"
#CMD="telnet $1 2>&1 | tee screenshots/telnet/text/$1.txt"
temp_file=$(mktemp)
echo $CMD > $temp_file
chmod +x $temp_file
export DISPLAY=:99
#echo running: timeout 10s xterm -T scrot-$1 -e $temp_file -name scrot-$1 &
timeout 10s xterm -name scrot-$1 -T scrot-$1 -e $temp_file &
#timeout 10s xfce4-terminal -T scrot-$1 -e $temp_file &
sleep 5
#SCROTCMD="xwd -root -out screenshots/telnet/raw/$1"
SCROTCMD="xwd -name scrot-$1 -out screenshots/telnet/raw/$1"
echo $SCROTCMD
timeout 0.5s $SCROTCMD
sleep 5
# rm $temp_file
cp screenshots/telnet/raw/$1 screenshots/telnet/jpg/$1.xwd
convert screenshots/telnet/jpg/$1.xwd screenshots/telnet/jpg/$1.jpg
rm screenshots/telnet/jpg/$1.xwd
echo "done grabbing $1"
|
// Function to perform wildcard string matching
function wildcardMatch(pattern, string) {
// Replace all the asterisks (*) with a regex wildcard
let re = new RegExp(pattern.replace(/\*/g, ".*"));
return re.test(string);
}
console.log(wildcardMatch(pattern, string));
// Output: true |
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/512+0+512-SS/13-model --tokenizer_name model-configs/1024-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/512+0+512-SS/13-512+0+512-common-1 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function remove_all_but_common_words_first_half_full --eval_function last_element_eval |
#!/usr/bin/env bash
function createContainerKeys () {
local keyname
local pubkey
local overwrite
local sshport
local sshuser
local sshalias
local envfile
overwrite="${1:-true}"
keyname="${2:-vpncsshkey}"
sshport="${3:-2299}"
sshuser="${4:-root}"
sshalias="sshvpnc"
envfile=".env"
if [ -x "$(command -v ssh-keygen)" ]; then
if [[ $overwrite = "true" ]]; then
echo "Creating VPNC ssh key ${keyname} to access container..."
rm -rf "${keyname}"
rm -rf "${keyname}.pub"
ssh-keygen -t rsa -b 2048 -f "${keyname}" -C "${keyname}" -N "" -q
fi
if [ -s "${keyname}" ] && [ -s "${keyname}".pub ];then
chmod 0600 "${keyname}"
echo "Updating the VPNC_SSHD_AUTHORIZED_KEYS value in the .env"
pubkey=$(< "${keyname}".pub)
sed "s#VPNC_SSHD_AUTHORIZED_KEYS=.*#VPNC_SSHD_AUTHORIZED_KEYS=${pubkey}#"\
"${envfile}" > "${envfile}".new && \
mv -- "${envfile}".new "${envfile}"
grep VPNC_SSHD_AUTHORIZED_KEYS "${envfile}"
echo "Creating ssh alias script to use key and login to container..."
cat > bin/${sshalias} <<EOF
#!/usr/bin/env bash
set -xv
ssh -o "StrictHostKeyChecking=no" -i "${keyname}" -p ${sshport} ${sshuser}@127.0.0.1 -A
EOF
[[ -s bin/"${sshalias}" ]] && chmod a+x bin/"${sshalias}"
echo "Run the following to access the container after start:"
echo "bin/${sshalias}"
fi
fi
}
createContainerKeys "true" "vpncsshkey" "2299" "root"
echo ""
echo "Container keys should be created now you can run the following command to start the container."
echo "docker-compose up -d"
echo "Once container is running you can execute the bin/sshvpnc commadn to access it"
echo ""
echo "To stop container once done run"
echo "docker-compose kill && docker-compose rm -f"
|
#!/usr/bin/env bash
# Script Name: remove_branch.sh
# Description: Removes a branch from a git repository, both locally and remotely.
# Usage: remove_branch.sh [<branch_name>]
# [<branch_name>] - the name of the branch to remove.
# Example: ./remove_branch.sh test
main() {
if [ $# -eq 0 ]; then
echo "you have to provide the branch name!"
exit 1
elif [ $# -eq 1 ]; then
branch_name="$1"
else
echo "currently not supported!"
exit 1
fi
is_remote_branch=$(git branch -r | grep -Fw $branch_name > /dev/null)
is_local_branch=$(git branch -l | grep -Fw $branch_name > /dev/null)
if [ $is_remote_branch -ne 0 ] && [ $is_local_branch -ne 0 ]; then
echo "provided branch doesn't exists"
exit 1
fi
if [ $is_local_branch -eq 0 ]; then
git push -d origin $branch_name
fi
if [ $is_remote_branch -eq 0 ]; then
git branch -D $branch_name
fi
}
main "$@"
|
#!/bin/bash
# general configuration
backend=pytorch
stage=-1 # start from -1 if you need to start from data download
stop_stage=100
ngpu=0 # number of gpus ("0" uses cpu, otherwise use gpu)
debugmode=1
dumpdir=dump # directory to dump full features
N=0 # number of minibatches to be used (mainly for debugging). "0" uses all minibatches.
verbose=1 # verbose option
resume= # Resume the training from snapshot
# feature configuration
do_delta=false
# network architecture
# encoder related
etype=blstmp # encoder architecture type
elayers=3
eunits=128
eprojs=128
subsample=1_2_2_1_1 # skip every n frame from input to nth layers
# decoder related
dlayers=1
dunits=128
# attention related
atype=location
adim=128
aconv_chans=10
aconv_filts=100
# hybrid CTC/attention
mtlalpha=0.5
# minibatch related
batchsize=5
maxlen_in=800 # if input length > maxlen_in, batchsize is automatically reduced
maxlen_out=150 # if output length > maxlen_out, batchsize is automatically reduced
# optimization related
sortagrad=0 # Feed samples from shortest to longest ; -1: enabled for all epochs, 0: disabled, other: enabled for 'other' epochs
opt=adadelta
epochs=20
patience=3
# decoding parameter
beam_size=5
penalty=0.0
maxlenratio=0.0
minlenratio=0.0
ctc_weight=0.5
recog_model=model.acc.best # set a model to be used for decoding: 'model.acc.best' or 'model.loss.best'
# scheduled sampling option
samp_prob=0.0
# exp tag
tag="" # tag for managing experiments.
. utils/parse_options.sh || exit 1;
. ./path.sh
. ./cmd.sh
set -euo pipefail
fbankdir=fbank
train_set=train_nodev
train_dev=train_dev
eval_set=test_yesno
recog_set="$eval_set"
if [ ${stage} -le -1 ] && [ ${stop_stage} -ge -1 ]; then
echo "stage -1: Data Download"
if [ ! -d waves_yesno ]; then
wget http://www.openslr.org/resources/1/waves_yesno.tar.gz || exit 1;
# was:
# wget http://sourceforge.net/projects/kaldi/files/waves_yesno.tar.gz || exit 1;
tar -xvzf waves_yesno.tar.gz || exit 1;
rm ./waves_yesno/README*
fi
fi
if [ ${stage} -le 0 ] && [ ${stop_stage} -ge 0 ]; then
### Task dependent. You have to make data the following preparation part by yourself.
### But you can utilize Kaldi recipes in most cases
echo "stage 0: Data preparation"
local/prepare_data.sh waves_yesno
fi
feat_tr_dir=${dumpdir}/${train_set}/delta${do_delta}; mkdir -p ${feat_tr_dir}
feat_dt_dir=${dumpdir}/${train_dev}/delta${do_delta}; mkdir -p ${feat_dt_dir}
feat_ev_dir=${dumpdir}/${eval_set}/delta${do_delta}; mkdir -p ${feat_ev_dir}
if [ ${stage} -le 1 ] && [ ${stop_stage} -ge 1 ]; then
### Task dependent. You have to design training and dev sets by yourself.
### But you can utilize Kaldi recipes in most cases
echo "stage 1: Feature Generation"
# Feature extraction
for x in train_yesno test_yesno; do
steps/make_fbank_pitch.sh --nj 1 --write_utt2num_frames true data/${x} exp/make_fbank/${x} ${fbankdir}
utils/fix_data_dir.sh data/${x}
steps/compute_cmvn_stats.sh data/${x} exp/make_fbank/${x} ${fbankdir}
done
# make a dev set
utils/subset_data_dir.sh --first data/train_yesno 2 data/${train_dev}
n=$(($(wc -l < data/train_yesno/text) - 2))
utils/subset_data_dir.sh --last data/train_yesno ${n} data/${train_set}
# compute global CMVN
compute-cmvn-stats scp:data/${train_set}/feats.scp data/${train_set}/cmvn.ark
# dump features
dump.sh --cmd "$train_cmd" --nj 2 --do_delta ${do_delta} \
data/${train_set}/feats.scp data/${train_set}/cmvn.ark exp/dump_feats/train ${feat_tr_dir}
dump.sh --cmd "$train_cmd" --nj 2 --do_delta ${do_delta} \
data/${train_dev}/feats.scp data/${train_set}/cmvn.ark exp/dump_feats/dev ${feat_dt_dir}
for rtask in ${recog_set}; do
feat_recog_dir=${dumpdir}/${rtask}/delta${do_delta}; mkdir -p ${feat_recog_dir}
dump.sh --cmd "$train_cmd" --nj 2 --do_delta ${do_delta} \
data/${rtask}/feats.scp data/${train_set}/cmvn.ark exp/dump_feats/recog/${rtask} \
${feat_recog_dir}
done
fi
dict=data/lang_1char/${train_set}_units.txt
echo "dictionary: ${dict}"
if [ ${stage} -le 2 ] && [ ${stop_stage} -ge 2 ]; then
### Task dependent. You have to check non-linguistic symbols used in the corpus.
echo "stage 2: Dictionary and Json Data Preparation"
mkdir -p data/lang_1char/
echo "<unk> 1" > ${dict} # <unk> must be 1, 0 will be used for "blank" in CTC
text2token.py -s 1 -n 1 data/${train_set}/text | cut -f 2- -d" " | tr " " "\n" \
| sort | uniq | grep -v -e '^\s*$' | awk '{print $0 " " NR+1}' >> ${dict}
wc -l ${dict}
# make json labels
data2json.sh --feat ${feat_tr_dir}/feats.scp \
data/${train_set} ${dict} > ${feat_tr_dir}/data.json
data2json.sh --feat ${feat_dt_dir}/feats.scp \
data/${train_dev} ${dict} > ${feat_dt_dir}/data.json
for rtask in ${recog_set}; do
feat_recog_dir=${dumpdir}/${rtask}/delta${do_delta}
data2json.sh --feat ${feat_recog_dir}/feats.scp \
data/${rtask} ${dict} > ${feat_recog_dir}/data.json
done
fi
if [ -z ${tag} ]; then
expname=${train_set}_${backend}_${etype}_e${elayers}_subsample${subsample}_unit${eunits}_proj${eprojs}_d${dlayers}_unit${dunits}_${atype}_aconvc${aconv_chans}_aconvf${aconv_filts}_mtlalpha${mtlalpha}_${opt}_sampprob${samp_prob}_bs${batchsize}_mli${maxlen_in}_mlo${maxlen_out}
if ${do_delta}; then
expname=${expname}_delta
fi
else
expname=${train_set}_${backend}_${tag}
fi
expdir=exp/${expname}
mkdir -p ${expdir}
if [ ${stage} -le 3 ] && [ ${stop_stage} -ge 3 ]; then
echo "stage 3: Network Training"
${cuda_cmd} --gpu ${ngpu} ${expdir}/train.log \
asr_train.py \
--ngpu ${ngpu} \
--backend ${backend} \
--outdir ${expdir}/results \
--tensorboard-dir tensorboard/${expname} \
--debugmode ${debugmode} \
--dict ${dict} \
--debugdir ${expdir} \
--minibatches ${N} \
--verbose ${verbose} \
--resume ${resume} \
--train-json ${feat_tr_dir}/data.json \
--valid-json ${feat_dt_dir}/data.json \
--etype ${etype} \
--elayers ${elayers} \
--eunits ${eunits} \
--eprojs ${eprojs} \
--subsample ${subsample} \
--dlayers ${dlayers} \
--dunits ${dunits} \
--atype ${atype} \
--adim ${adim} \
--aconv-chans ${aconv_chans} \
--aconv-filts ${aconv_filts} \
--mtlalpha ${mtlalpha} \
--batch-size ${batchsize} \
--maxlen-in ${maxlen_in} \
--maxlen-out ${maxlen_out} \
--sampling-probability ${samp_prob} \
--opt ${opt} \
--sortagrad ${sortagrad} \
--epochs ${epochs} \
--patience ${patience}
fi
if [ ${stage} -le 4 ] && [ ${stop_stage} -ge 4 ]; then
echo "stage 4: Decoding"
nj=2
pids=() # initialize pids
for rtask in ${recog_set}; do
(
decode_dir=decode_${rtask}_beam${beam_size}_e${recog_model}_p${penalty}_len${minlenratio}-${maxlenratio}_ctcw${ctc_weight}
feat_recog_dir=${dumpdir}/${rtask}/delta${do_delta}
# split data
splitjson.py --parts ${nj} ${feat_recog_dir}/data.json
#### use CPU for decoding
ngpu=0
${decode_cmd} JOB=1:${nj} ${expdir}/${decode_dir}/log/decode.JOB.log \
asr_recog.py \
--ngpu ${ngpu} \
--backend ${backend} \
--debugmode ${debugmode} \
--verbose ${verbose} \
--recog-json ${feat_recog_dir}/split${nj}utt/data.JOB.json \
--result-label ${expdir}/${decode_dir}/data.JOB.json \
--model ${expdir}/results/${recog_model} \
--beam-size ${beam_size} \
--penalty ${penalty} \
--maxlenratio ${maxlenratio} \
--minlenratio ${minlenratio} \
--ctc-weight ${ctc_weight}
score_sclite.sh ${expdir}/${decode_dir} ${dict}
) &
pids+=($!) # store background pids
done
i=0; for pid in "${pids[@]}"; do wait ${pid} || ((++i)); done
[ ${i} -gt 0 ] && echo "$0: ${i} background jobs are failed." && false
echo "Finished"
fi
|
<gh_stars>0
#This will print some common text
print("I will now count my chickens.")
#Next will print some common text between the quotation mark.
#The division will be made first than the addition, thats why the result will be
#30.0
#There is a ".0" after the result because of the division, that turned the number
#into a floating number. Is it right? Not sure...
print("Hens",25+30/6)
#Next the result will be 97 because the order of the operations are:
#1st."*"
#2nd."%"
#3rd."-"
#Remember that "%" here means the rest of the division
print("Roosters",100-25*3%4)
#Next should only print common text again.
print("Now I will count the eggs.")
#Next the result should be 6.75 because the order of the operations are:
#1st."%"
#2nd."/"
#Than the operations follow the order of appearance
print(3+2+1-5+4%2-1/4+6)
#Next should print common text
print("Is it true that 3 + 2 < 5 - 7?")
#Next should print false because the result is false...
print(3+2<5-7)
print("What is 3 + 2?",3+2)
print("What is 5 - 7?",5-7)
print("Oh, that's why it's False.")
print("How about some more.")
#Next should print True or False if is is so...
print("Is it greater?",5>-2)
print("Is it greater or equal?",5>=-2)
print("Is it less or equal?",5<=-2)
|
<reponame>v55448330/cattle<gh_stars>0
package io.cattle.platform.process.host;
import io.cattle.platform.core.constants.ClusterConstants;
import io.cattle.platform.core.dao.ClusterHostMapDao;
import io.cattle.platform.core.model.Host;
import io.cattle.platform.core.model.tables.records.ClusterHostMapRecord;
import io.cattle.platform.engine.handler.HandlerResult;
import io.cattle.platform.engine.process.ProcessInstance;
import io.cattle.platform.engine.process.ProcessState;
import io.cattle.platform.process.base.AbstractDefaultProcessHandler;
import java.util.Collections;
import java.util.List;
import javax.inject.Inject;
import javax.inject.Named;
@Named
public class HostPurge extends AbstractDefaultProcessHandler {
@Inject
ClusterHostMapDao clusterHostMapDao;
@Override
public HandlerResult handle(final ProcessState state, ProcessInstance process) {
final Host host = (Host) state.getResource();
// TODO: Implement some bulk operation
List<ClusterHostMapRecord> clusterHostMaps = Collections.emptyList();
if (host.getKind().equals(ClusterConstants.KIND)) {
clusterHostMaps = clusterHostMapDao.findClusterHostMapsForCluster(host);
} else {
clusterHostMaps = clusterHostMapDao.findClusterHostMapsHavingHost(host);
}
for (ClusterHostMapRecord mapping : clusterHostMaps) {
objectProcessManager.scheduleProcessInstance(ClusterConstants.CLUSTER_HOST_MAP_REMOVE_PROCESS, mapping, null);
}
if (host.getAgentId() == null) {
return null;
}
return null;
}
}
|
#!/bin/bash
# Get SHAs of commits which were merged for pull request
parents=`git -C "$GITHUB_WORKSPACE/src" log -1 --merges --pretty=format:%P`
# If no commits were merged (ie test was ran on standalone commit) use the hash of that commit
parents=${parents:-$GITHUB_SHA}
# Call Github's statuses API
curl --request POST \
--url "$GITHUB_API_URL/repos/$GITHUB_REPOSITORY/statuses/${parents#* }" \
--header "Authorization: Bearer $GITHUB_TOKEN" \
--header "Content-Type: application/json" \
--header "Accept: application/vnd.github.v3+json" \
--data '{
"state": "'"$1"'",
"context": "'"$2"'",
"description": "'"$3"'"
}' |
#!/usr/bin/env zsh
set -euo pipefail
setopt extended_glob
pushd "${0:A:h}/.." >/dev/null
diff -y \
<(cat Bindings/Libsass/**.hs | grep -E 'foreign import .+ (libsass|sass)_' | sed -E 's/^.+ (libsass|sass)_(.+)$/\1_\2/gm' | tr -d '"' | sort) \
<(cat libsass/include/sass.h libsass/include/sass/*.h | grep -E 'ADDAPI .+ ADDCALL (sass|libsass).+ ?\(' | sed -E 's/^.+ (libsass|sass)_(.+?) ?\(.+$/\1_\2/gm' | awk '{$1=$1};1' | sort)
popd >/dev/null
|
# coding: utf-8
# # Convert all daily JSON log files for a deployment to a single NetCDF file
# using CF-1.6, Discrete Sampling Geometry (DSG) conventions, **`featureType=timeSeries`**
# In[26]:
import json
import pandas as pd
import numpy as np
import glob
import os
from pyaxiom.netcdf.sensors import TimeSeries
# In[27]:
var = 'pwrsys'
buoys = {'ce02shsm':{'lon':-124.31, 'lat':44.64, 'depth':0.0},
'ce04ossm':{'lon':-124.31, 'lat':44.64, 'depth':0.0},
'ce07shsm':{'lon':-124.31, 'lat':44.64, 'depth':0.0},
'ce09ossm':{'lon':-124.31, 'lat':44.64, 'depth':0.0}
}
# pick the last deployment
deployment_index = -1
# In[28]:
global_attributes = {
'institution':'Oregon State University',
'title':'OOI CE02SHSM Pwrsys Data',
'summary':'OOI data from Coastal Endurance',
'creator_name':'<NAME>',
'creator_email':'<EMAIL>',
'creator_url':'http://ceoas.oregonstate.edu/ooi'
}
# In[29]:
def json2df(infile):
with open(infile) as jf:
df = pd.DataFrame(json.load(jf))
return df
# In[31]:
ipath = '/sand/usgs/users/rsignell/data/ooi/endurance/cg_proc/'
odir = '/usgs/data2/notebook/data/nc'
for buoy in buoys.keys():
deployment_path = glob.glob(os.path.join(ipath,buoy,'D*'))[deployment_index]
deployment = deployment_path.split('/')[-1]
path = os.path.join(deployment_path,'buoy',var,'*.{}.json'.format(var))
ofile = '{}_{}_{}.nc'.format(buoy,var,deployment)
print(path)
df = pd.concat([json2df(file) for file in glob.glob(path)])
df['time'] = pd.to_datetime(df.time, unit='s')
df.index = df['time']
df['depth'] = buoys[buoy]['depth']
global_attributes['title']='OOI {} {} Data'.format(buoy,var)
ts = TimeSeries(output_directory=odir,
latitude=buoys[buoy]['lat'],
longitude=buoys[buoy]['lon'],
station_name=buoy,
global_attributes=global_attributes,
times=df.time.values.astype(np.int64) // 10**9,
verticals=df.depth.values,
output_filename=ofile,
vertical_positive='down'
)
df.columns.tolist();
# In[22]:
# In[2]:
ce02shsm/D00004/buoy/pwrsys/*.pwrsys.json'
# In[7]:
path, filename = os.path.split(path)
print(path,filename)
# In[4]:
# In[5]:
# ### Define the NetCDF global attributes
# In[7]:
global_attributes = {
'institution':'Oregon State University',
'title':'OOI CE02SHSM Pwrsys Data',
'summary':'OOI Pwrsys data from Coastal Endurance Oregon Shelf Surface Mooring',
'creator_name':'<NAME>',
'creator_email':'<EMAIL>',
'creator_url':'http://ceoas.oregonstate.edu/ooi'
}
# ### Create initial file
# In[8]:
# ### Add data variables
# In[9]:
df.columns.tolist()
# In[10]:
for c in df.columns:
if c in ts._nc.variables:
print("Skipping '{}' (already in file)".format(c))
continue
if c in ['time', 'lat', 'lon', 'depth', 'cpm_date_time_string']:
print("Skipping axis '{}' (already in file)".format(c))
continue
print("Adding {}".format(c))
try:
ts.add_variable(c, df[c].values)
except:
print('skipping, hit object')
# In[ ]:
|
<gh_stars>1-10
# This will guess the User class
FactoryBot.define do
factory :gallery, class: MediaGallery::Gallery do
sequence(:name) { |n| "user#{n}_gallery" }
description "a test gallery"
created_at DateTime.parse("2018-03-31T10:36:57.813Z")
updated_at DateTime.parse("2018-03-31T11:36:57.813Z")
association :ownable, factory: :user
end
end
|
def sieve_of_eratosthenes(n):
sieve = [True] * (n + 1)
sieve[0:2] = [False, False] # 0 and 1 are not prime
p = 2
while p * p <= n:
if sieve[p]:
for i in range(p * p, n + 1, p):
sieve[i] = False
p += 1
primes = [i for i in range(2, n + 1) if sieve[i]]
return primes |
/* tslint:disable:no-unused-variable */
import { async, ComponentFixture, TestBed } from '@angular/core/testing';
import { By } from '@angular/platform-browser';
import { DebugElement } from '@angular/core';
import { PictureFrameComponent } from './picture-frame.component';
describe('PictureFrameComponent', () => {
let component: PictureFrameComponent;
let fixture: ComponentFixture<PictureFrameComponent>;
beforeEach(async(() => {
TestBed.configureTestingModule({
declarations: [ PictureFrameComponent ]
})
.compileComponents();
}));
beforeEach(() => {
fixture = TestBed.createComponent(PictureFrameComponent);
component = fixture.componentInstance;
fixture.detectChanges();
});
it('should create', () => {
expect(component).toBeTruthy();
});
});
|
package localfs
import (
"encoding/json"
"fmt"
"io"
"io/ioutil"
"os"
"path"
"path/filepath"
"strings"
"time"
"github.com/araddon/gou"
"github.com/lytics/cloudstorage"
"github.com/lytics/cloudstorage/csbufio"
"github.com/pborman/uuid"
"golang.org/x/net/context"
"google.golang.org/api/iterator"
)
func init() {
cloudstorage.Register(StoreType, localProvider)
}
func localProvider(conf *cloudstorage.Config) (cloudstorage.Store, error) {
store, err := NewLocalStore(conf.LocalFS, conf.TmpDir)
if err != nil {
return nil, err
}
return store, nil
}
var (
// Ensure Our LocalStore implement CloudStorage interfaces
_ cloudstorage.StoreReader = (*LocalStore)(nil)
)
const (
// AuthFileSystem Authentication Method
AuthFileSystem cloudstorage.AuthMethod = "localfiles"
// StoreType name of our Local Storage provider = "localfs"
StoreType = "localfs"
)
// LocalStore is client to local-filesystem store.
type LocalStore struct {
storepath string // possibly is relative ./tables
pathCleaned string // cleaned removing ./ = "tables"
cachepath string
Id string
}
// NewLocalStore create local store from storage path on local filesystem, and cachepath.
func NewLocalStore(storepath, cachepath string) (*LocalStore, error) {
if storepath == "" {
return nil, fmt.Errorf("storepath=%q cannot be empty", storepath)
}
if storepath == cachepath {
return nil, fmt.Errorf("storepath=%q cannot be the same as cachepath=%q", storepath, cachepath)
}
pathCleaned := strings.TrimPrefix(storepath, "./")
err := os.MkdirAll(storepath, 0775)
if err != nil {
return nil, fmt.Errorf("unable to create path. path=%s err=%v", storepath, err)
}
err = os.MkdirAll(cachepath, 0775)
if err != nil {
return nil, fmt.Errorf("unable to create path. path=%s err=%v", cachepath, err)
}
uid := uuid.NewUUID().String()
uid = strings.Replace(uid, "-", "", -1)
return &LocalStore{
storepath: storepath,
pathCleaned: pathCleaned,
cachepath: cachepath,
Id: uid,
}, nil
}
// Type is store type = "localfs"
func (l *LocalStore) Type() string {
return StoreType
}
func (l *LocalStore) Client() interface{} {
return l
}
// NewObject create new object of given name.
func (l *LocalStore) NewObject(objectname string) (cloudstorage.Object, error) {
obj, err := l.Get(context.Background(), objectname)
if err != nil && err != cloudstorage.ErrObjectNotFound {
return nil, err
} else if obj != nil {
return nil, cloudstorage.ErrObjectExists
}
of := path.Join(l.storepath, objectname)
err = cloudstorage.EnsureDir(of)
if err != nil {
return nil, err
}
cf := cloudstorage.CachePathObj(l.cachepath, objectname, l.Id)
return &object{
name: objectname,
storepath: of,
cachepath: cf,
}, nil
}
// List objects at Query location.
func (l *LocalStore) List(ctx context.Context, query cloudstorage.Query) (*cloudstorage.ObjectsResponse, error) {
resp := cloudstorage.NewObjectsResponse()
objects := make(map[string]*object)
metadatas := make(map[string]map[string]string)
spath := path.Join(l.storepath, query.Prefix)
if !cloudstorage.Exists(spath) {
return resp, nil
}
err := filepath.Walk(spath, func(fo string, f os.FileInfo, err error) error {
if err != nil {
return err
}
obj := strings.Replace(fo, l.pathCleaned, "", 1)
if f.IsDir() {
return nil
} else if filepath.Ext(f.Name()) == ".metadata" {
b, err := ioutil.ReadFile(fo)
if err != nil {
return err
}
md := make(map[string]string)
err = json.Unmarshal(b, &md)
if err != nil {
return err
}
mdkey := strings.Replace(obj, ".metadata", "", 1)
metadatas[mdkey] = md
} else {
oname := strings.TrimPrefix(obj, "/")
objects[obj] = &object{
name: oname,
updated: f.ModTime(),
storepath: fo,
cachepath: cloudstorage.CachePathObj(l.cachepath, oname, l.Id),
}
}
return err
})
if err != nil {
return nil, fmt.Errorf("localfile: error occurred listing files. searchpath=%v err=%v", spath, err)
}
for objname, obj := range objects {
if md, ok := metadatas[objname]; ok {
obj.metadata = md
}
resp.Objects = append(resp.Objects, obj)
}
resp.Objects = query.ApplyFilters(resp.Objects)
return resp, nil
}
// Objects returns an iterator over the objects in the local folder that match the Query q.
// If q is nil, no filtering is done.
func (l *LocalStore) Objects(ctx context.Context, csq cloudstorage.Query) (cloudstorage.ObjectIterator, error) {
resp, err := l.List(ctx, csq)
if err != nil {
return nil, err
}
return &objectIterator{objects: resp.Objects}, nil
}
// Folders list of folders for given path query.
func (l *LocalStore) Folders(ctx context.Context, csq cloudstorage.Query) ([]string, error) {
spath := path.Join(l.storepath, csq.Prefix)
if !cloudstorage.Exists(spath) {
return nil, fmt.Errorf("That folder %q does not exist", spath)
}
select {
case <-ctx.Done():
return nil, ctx.Err()
default:
}
folders := make([]string, 0)
files, _ := ioutil.ReadDir(spath)
for _, f := range files {
if f.IsDir() {
folders = append(folders, fmt.Sprintf("%s/", path.Join(csq.Prefix, f.Name())))
}
}
return folders, nil
}
// NewReader create local file-system store reader.
func (l *LocalStore) NewReader(o string) (io.ReadCloser, error) {
return l.NewReaderWithContext(context.Background(), o)
}
func (l *LocalStore) NewReaderWithContext(ctx context.Context, o string) (io.ReadCloser, error) {
fo := path.Join(l.storepath, o)
if !cloudstorage.Exists(fo) {
return nil, cloudstorage.ErrObjectNotFound
}
return csbufio.OpenReader(fo)
}
func (l *LocalStore) NewWriter(o string, metadata map[string]string) (io.WriteCloser, error) {
return l.NewWriterWithContext(context.Background(), o, metadata)
}
func (l *LocalStore) NewWriterWithContext(ctx context.Context, o string, metadata map[string]string, opts ...cloudstorage.Opts) (io.WriteCloser, error) {
fo := path.Join(l.storepath, o)
err := cloudstorage.EnsureDir(fo)
if err != nil {
return nil, err
}
if metadata != nil && len(metadata) > 0 {
metadata = make(map[string]string)
}
fmd := fo + ".metadata"
if err := writemeta(fmd, metadata); err != nil {
return nil, err
}
flag := os.O_RDWR | os.O_CREATE | os.O_TRUNC
if len(opts) > 0 && opts[0].IfNotExists {
flag = flag | os.O_EXCL
}
f, err := os.OpenFile(fo, flag, 0665)
if err != nil {
return nil, err
}
return csbufio.NewWriter(f), nil
}
func (l *LocalStore) Get(ctx context.Context, o string) (cloudstorage.Object, error) {
fo := path.Join(l.storepath, o)
if !cloudstorage.Exists(fo) {
return nil, cloudstorage.ErrObjectNotFound
}
var updated time.Time
if stat, err := os.Stat(fo); err == nil {
updated = stat.ModTime()
}
return &object{
name: o,
updated: updated,
storepath: fo,
cachepath: cloudstorage.CachePathObj(l.cachepath, o, l.Id),
}, nil
}
// Delete the object from underlying store.
func (l *LocalStore) Delete(ctx context.Context, obj string) error {
fo := path.Join(l.storepath, obj)
os.Remove(fo)
mf := fo + ".metadata"
if cloudstorage.Exists(mf) {
os.Remove(mf)
}
return nil
}
func (l *LocalStore) String() string {
return fmt.Sprintf("[id:%s file://%s/]", l.Id, l.storepath)
}
type objectIterator struct {
objects cloudstorage.Objects
err error
cursor int
}
func (l *objectIterator) Next() (cloudstorage.Object, error) {
if l.err != nil {
return nil, l.err
}
if l.cursor >= len(l.objects) {
return nil, iterator.Done
}
o := l.objects[l.cursor]
l.cursor++
return o, nil
}
func (l *objectIterator) Close() {}
type object struct {
name string
updated time.Time
metadata map[string]string
storepath string
cachepath string
cachedcopy *os.File
readonly bool
opened bool
}
func (o *object) StorageSource() string {
return StoreType
}
func (o *object) Name() string {
return o.name
}
func (o *object) String() string {
return o.name
}
func (o *object) Updated() time.Time {
return o.updated
}
func (o *object) MetaData() map[string]string {
return o.metadata
}
func (o *object) SetMetaData(meta map[string]string) {
o.metadata = meta
}
func (o *object) Delete() error {
if err := o.Release(); err != nil {
gou.Errorf("could not release %v", err)
}
if err := os.Remove(o.storepath); err != nil {
return err
}
mf := o.storepath + ".metadata"
if cloudstorage.Exists(mf) {
if err := os.Remove(mf); err != nil {
return err
}
}
return nil
}
func (o *object) Open(accesslevel cloudstorage.AccessLevel) (*os.File, error) {
if o.opened {
return nil, fmt.Errorf("the store object is already opened. %s", o.storepath)
}
var readonly = accesslevel == cloudstorage.ReadOnly
storecopy, err := os.OpenFile(o.storepath, os.O_RDWR|os.O_CREATE, 0665)
if err != nil {
return nil, fmt.Errorf("localfs: local=%q could not create storecopy err=%v", o.storepath, err)
}
defer storecopy.Close()
err = cloudstorage.EnsureDir(o.cachepath)
if err != nil {
return nil, fmt.Errorf("localfs: cachepath=%s could not create cachedcopy dir err=%v", o.cachepath, err)
}
cachedcopy, err := os.Create(o.cachepath)
if err != nil {
return nil, fmt.Errorf("localfs: cachepath=%s could not create cachedcopy err=%v", o.cachepath, err)
}
_, err = io.Copy(cachedcopy, storecopy)
if err != nil {
return nil, fmt.Errorf("localfs: storepath=%s cachedcopy=%v could not copy from store to cache err=%v", o.storepath, cachedcopy.Name(), err)
}
if readonly {
cachedcopy.Close()
cachedcopy, err = os.Open(o.cachepath)
if err != nil {
return nil, fmt.Errorf("localfs: storepath=%s cachedcopy=%v could not opencache err=%v", o.storepath, cachedcopy.Name(), err)
}
} else {
if _, err := cachedcopy.Seek(0, os.SEEK_SET); err != nil {
return nil, fmt.Errorf("error seeking to start of cachedcopy err=%v", err) //don't retry on local fs errors
}
}
o.cachedcopy = cachedcopy
o.readonly = readonly
o.opened = true
return o.cachedcopy, nil
}
func (o *object) File() *os.File {
return o.cachedcopy
}
func (o *object) Read(p []byte) (n int, err error) {
return o.cachedcopy.Read(p)
}
// Write the given bytes to object. Won't be writen until Close() or Sync() called.
func (o *object) Write(p []byte) (n int, err error) {
if o.cachedcopy == nil {
_, err := o.Open(cloudstorage.ReadWrite)
if err != nil {
return 0, err
}
}
return o.cachedcopy.Write(p)
}
func (o *object) Sync() error {
if !o.opened {
return fmt.Errorf("object isn't opened %s", o.name)
}
if o.readonly {
return fmt.Errorf("trying to Sync a readonly object %s", o.name)
}
cachedcopy, err := os.OpenFile(o.cachepath, os.O_RDONLY, 0664)
if err != nil {
return err
}
defer cachedcopy.Close()
storecopy, err := os.OpenFile(o.storepath, os.O_CREATE|os.O_TRUNC|os.O_RDWR, 0664)
if err != nil {
return err
}
defer storecopy.Close()
_, err = io.Copy(storecopy, cachedcopy)
if err != nil {
return err
}
if o.metadata != nil && len(o.metadata) > 0 {
o.metadata = make(map[string]string)
}
fmd := o.storepath + ".metadata"
return writemeta(fmd, o.metadata)
}
func writemeta(filename string, meta map[string]string) error {
bm, err := json.MarshalIndent(meta, "", " ")
if err != nil {
return err
}
err = ioutil.WriteFile(filename, bm, 0664)
if err != nil {
return err
}
return nil
}
func (o *object) Close() error {
if !o.opened {
return nil
}
defer func() {
if o.cachedcopy != nil {
n := o.cachedcopy.Name()
os.Remove(n)
}
o.cachedcopy = nil
o.opened = false
}()
if !o.readonly {
err := o.cachedcopy.Sync()
if err != nil {
return err
}
}
err := o.cachedcopy.Close()
if err != nil {
if !strings.Contains(err.Error(), os.ErrClosed.Error()) {
return err
}
}
if o.opened && !o.readonly {
err := o.Sync()
if err != nil {
return err
}
}
return nil
}
func (o *object) Release() error {
if o.cachedcopy != nil {
o.cachedcopy.Close()
o.cachedcopy = nil
o.opened = false
err := os.Remove(o.cachepath)
if err != nil {
return err
}
}
// most likely this doesn't exist so don't return error
os.Remove(o.cachepath)
return nil
}
|
package com.ctrip.persistence.entity;
import javax.persistence.Entity;
import javax.persistence.Table;
/**
* @author minglei
*/
@Entity
@Table(name = "t_element_param_history")
public class ElementParamHistory extends AbstractElementParam {
}
|
#!/bin/bash
set -e
rm -rf ~/HappyMacApp build dist
RED='\x1B[0;31m'
NC='\x1B[0m'
IDENTITY='Developer ID Application: LAFFRA JOHANNES (29P9D64BXJ)'
echo -e "${RED}-1. install dependencies"
python -m pip install -r requirements.txt
echo -e "${RED}0. Probably need a new version?${NC}"
python src/version.py
echo -e "${RED}1. run pyinstaller - this takes 15 seconds...${NC}"
cp app/pyinstaller.spec .
pyinstaller --onefile --windowed --osx-bundle-identifier com.chrislaffra.osx.happymac pyinstaller.spec
rm pyinstaller.spec
rm -rf build
echo -e "${RED}2. cp -R happymac.app dist${NC}"
cp -R app/happymac.app.template dist/happymac.app
echo -e "${RED}3. mv dist/happymac dist/happymac.app/Contents/MacOS${NC}"
mv dist/happymac dist/happymac.app/Contents/MacOS
echo -e "${RED}4. code sign package${NC}"
codesign -v -f --deep -i com.chrislaffra.osx.happymac -s "${IDENTITY}" dist/happymac.app/Contents/MacOs/happymac
codesign -v -f -i com.chrislaffra.osx.happymac -s "${IDENTITY}" dist/happymac.app
pushd dist
echo -e "${RED}5. create-dmg HappyMac.app${NC}"
src/vendor/create-dmg HappyMac.app
mv happymac\ 0.1.0.dmg happymac.dmg
popd
echo -e "${RED}6. code sign dmg${NC}"
ls -l dist
codesign -v -f -i com.chrislaffra.osx.happymac -s "${IDENTITY}" dist/happymac.dmg
echo -e "${RED}7. done building${NC}"
echo -e "${RED}8. Final step: open dist/happymac.dmg"
open dist/happymac.dmg
|
#!/bin/bash
USR="Your Username goes here"
PASSWD="Your Password goes here"
HOST="Your Hostname or IP Address goes here"
PORT="Your database port number goes here"
mysql -u ${USR} -p${PASSWD} -h ${HOST}:${PORT} -e 'SELECT table_schema AS "Database", ROUND(SUM(data_length + index_length) / 1024 / 1024, 2) AS "Size (MB)" FROM information_schema.TABLES GROUP BY table_schema;'
|
<gh_stars>0
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package javabluetoothserver;
import java.awt.AWTException;
import java.awt.MouseInfo;
import java.awt.Point;
import java.awt.Robot;
import java.awt.event.InputEvent;
import java.awt.event.KeyEvent;
import java.io.DataInputStream;
import java.io.IOException;
import java.io.InputStream;
import com.sun.jna.Library;
import com.sun.jna.Native;
import com.sun.jna.Platform;
import java.awt.Desktop;
import java.io.DataOutputStream;
import java.io.File;
import java.io.OutputStream;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.microedition.io.StreamConnection;
import javax.swing.JLabel;
import javax.swing.JTextArea;
import jdk.nashorn.internal.parser.JSONParser;
import ui.ControlPanel;
/**
*
* @author user
*/
//this thread processes all kinds of inputs recoeved to the server
public class ProcessConnectionThread implements Runnable {
public static boolean connectionDropped = false;
private StreamConnection mConnection;
private DataInputStream dis;
private DataOutputStream dos;
private static final int EXIT_CMD = -1;
public Thread t;
private String command;
public static boolean destroyed = false; //boolean to check if the thread is destroyed or not
public Robot robot;
InputStream inputStream;
OutputStream outputStream;
//private static final int \
JTextArea message;
public ProcessConnectionThread(StreamConnection connection) throws AWTException {
mConnection = connection;
// t = new Thread(this);
// t.start();
robot = new Robot();
}
ProcessConnectionThread(StreamConnection connection, JTextArea msg) {
mConnection = connection;
message=msg;
message.setText(message.getText()+"\nProcess thread started");
}
@Override
public void run() {
try {
//throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
message.setText("Process thread running");
inputStream = mConnection.openInputStream();
outputStream = mConnection.openOutputStream(); // for sending the output
} catch (IOException ex) {
Logger.getLogger(ProcessConnectionThread.class.getName()).log(Level.SEVERE, null, ex);
message.setText(message.getText()+"\n error opening input stream");
}
dis = new DataInputStream(inputStream);
while (!connectionDropped){
// System.out.println ("waiting to recieve input");
// message.setText(message.getText()+"\nWaiting to recieve input");
// message.setText(message.getText()+"\nWorking... ");
command="";
try {
command = dis.readUTF();
final String command2 = command;
} catch (IOException ex) {
//Logger.getLogger(ProcessConnectionThread.class.getName()).log(Level.SEVERE, null, ex);
message.setText(message.getText()+"\n error reading input");
connectionDropped = true;
break;
//Thread.currentThread().interrupt();
}
// if (connectionDropped){
// message.setText(message.getText()+"\n connection droppped");
// //Thread t = Thread.currentThread();
// break;
// }
// System.out.println("the given command is "+command+connectionDropped);
processCommand(command);
}
// t.interrupt();
//message.setText(message.getText()+"\n out of while");
}
private void processSpecialChar (char x){
try{
Robot robot = new Robot();
System.out.println("processing "+x);
//message.setText(message.getText()+"\nProcessing "+x);
switch (x){
case ':':
robot.keyPress(KeyEvent.VK_SHIFT);
robot.delay(100);
robot.keyPress(KeyEvent.VK_SEMICOLON);
robot.keyRelease(KeyEvent.VK_SEMICOLON);
robot.keyRelease(KeyEvent.VK_SHIFT);
break;
case ';':
robot.keyPress(KeyEvent.VK_SEMICOLON);
robot.keyRelease(KeyEvent.VK_SEMICOLON);
break;
case ',':
robot.keyPress(KeyEvent.VK_COMMA);
robot.keyRelease(KeyEvent.VK_COMMA);
break;
case '/':
System.out.println ("printing "+x);
//message.setText(message.getText()+"\nPrinting"+x);
robot.keyPress(KeyEvent.VK_SLASH);
robot.keyRelease(KeyEvent.VK_SLASH);
break;
case '.':
System.out.println ("printing "+x);
//message.setText(message.getText()+"\nPrinting"+x);
robot.keyPress(KeyEvent.VK_PERIOD);
robot.keyRelease(KeyEvent.VK_PERIOD);
break;
case '@':
System.out.println ("printing "+x);
//message.setText(message.getText()+"\nPrinting"+x);
robot.keyPress(KeyEvent.VK_SHIFT);
robot.delay(100);
robot.keyPress(KeyEvent.VK_2);
robot.keyRelease(KeyEvent.VK_SHIFT);
robot.keyRelease(KeyEvent.VK_2);
break;
case '$':
robot.keyPress(KeyEvent.VK_SHIFT);
robot.delay(100);
robot.keyPress(KeyEvent.VK_4);
robot.keyRelease(KeyEvent.VK_4);
robot.keyRelease(KeyEvent.VK_SHIFT);
break;
case '&':
robot.keyPress(KeyEvent.VK_SHIFT);
robot.delay(100);
robot.keyPress(KeyEvent.VK_7);
robot.keyRelease(KeyEvent.VK_7);
robot.keyRelease(KeyEvent.VK_SHIFT);
break;
case '!':
robot.keyPress(KeyEvent.VK_SHIFT);
robot.delay(100);
robot.keyPress(KeyEvent.VK_1);
robot.keyRelease(KeyEvent.VK_SHIFT);
robot.keyRelease(KeyEvent.VK_1);
break;
case '#':
robot.keyPress(KeyEvent.VK_SHIFT);
robot.delay(100);
robot.keyPress(KeyEvent.VK_3);
robot.keyRelease(KeyEvent.VK_SHIFT);
robot.keyRelease(KeyEvent.VK_3);
break;
case '%':
//robot.keyPress(KeyEvent.);
robot.keyPress(KeyEvent.VK_SHIFT);
robot.delay(100);
robot.keyPress(KeyEvent.VK_5);
robot.keyRelease(KeyEvent.VK_5);
robot.keyRelease(KeyEvent.VK_SHIFT);
break;
case '^':
robot.keyPress(KeyEvent.VK_SHIFT);
robot.delay(100);
robot.keyPress(KeyEvent.VK_6);
robot.keyRelease(KeyEvent.VK_SHIFT);
robot.keyRelease(KeyEvent.VK_6);
break;
case '*':
robot.keyPress(KeyEvent.VK_SHIFT);
robot.delay(100);
robot.keyPress(KeyEvent.VK_8);
robot.keyRelease(KeyEvent.VK_8);
robot.keyRelease(KeyEvent.VK_SHIFT);
break;
case '(':
//robot.keyPress;
robot.keyPress(KeyEvent.VK_SHIFT);
robot.delay(100);
robot.keyPress(KeyEvent.VK_9);
robot.keyRelease(KeyEvent.VK_9);
robot.keyRelease(KeyEvent.VK_SHIFT);
break;
case ')':
robot.keyPress(KeyEvent.VK_SHIFT);
robot.delay(100);
robot.keyPress(KeyEvent.VK_0);
robot.keyRelease(KeyEvent.VK_0);
robot.keyRelease(KeyEvent.VK_SHIFT);
break;
case '+':
robot.keyPress(KeyEvent.VK_SHIFT);
robot.delay(100);
robot.keyPress(KeyEvent.VK_EQUALS);
robot.keyRelease(KeyEvent.VK_EQUALS);
robot.keyRelease(KeyEvent.VK_SHIFT);
break;
case '=':
robot.keyPress(KeyEvent.VK_EQUALS);
robot.keyRelease(KeyEvent.VK_EQUALS);
break;
case '-':
robot.keyPress(KeyEvent.VK_MINUS);
robot.keyRelease(KeyEvent.VK_MINUS);
break;
case '[':
robot.keyPress(KeyEvent.VK_OPEN_BRACKET);
robot.keyRelease(KeyEvent.VK_OPEN_BRACKET);
break;
case ']':
robot.keyPress(KeyEvent.VK_CLOSE_BRACKET);
robot.keyRelease(KeyEvent.VK_CLOSE_BRACKET);
break;
case '\\':
robot.keyPress(KeyEvent.VK_BACK_SLASH);
robot.keyRelease(KeyEvent.VK_BACK_SLASH);
break;
case '"':
robot.keyPress(KeyEvent.VK_SHIFT);
robot.delay(100);
robot.keyPress(KeyEvent.VK_QUOTE);
robot.keyRelease(KeyEvent.VK_SHIFT);
robot.keyRelease(KeyEvent.VK_QUOTE);
break;
case '\'' :
robot.keyPress(KeyEvent.VK_QUOTE);
robot.keyRelease(KeyEvent.VK_QUOTE);
break;
case '?' :
robot.keyPress(KeyEvent.VK_SHIFT);
robot.delay(100);
robot.keyPress(KeyEvent.VK_SLASH);
robot.keyRelease(KeyEvent.VK_SLASH);
robot.keyRelease(KeyEvent.VK_SHIFT);
break;
case '{':
robot.keyPress(KeyEvent.VK_SHIFT);
robot.delay(100);
robot.keyPress(KeyEvent.VK_OPEN_BRACKET);
robot.keyRelease(KeyEvent.VK_OPEN_BRACKET);
robot.keyRelease(KeyEvent.VK_SHIFT);
break;
case '}':
robot.keyPress(KeyEvent.VK_SHIFT);
robot.delay(100);
robot.keyPress(KeyEvent.VK_CLOSE_BRACKET);
robot.keyRelease(KeyEvent.VK_CLOSE_BRACKET);
robot.keyRelease(KeyEvent.VK_SHIFT);
break;
case '_':
robot.keyPress(KeyEvent.VK_SHIFT);
robot.delay(100);
robot.keyPress(KeyEvent.VK_MINUS);
robot.keyRelease(KeyEvent.VK_MINUS);
robot.keyRelease(KeyEvent.VK_SHIFT);
break;
case '<':
robot.keyPress(KeyEvent.VK_SHIFT);
robot.delay(100);
robot.keyPress(KeyEvent.VK_COMMA);
robot.keyRelease(KeyEvent.VK_COMMA);
robot.keyRelease(KeyEvent.VK_SHIFT);
break;
case '>':
robot.keyPress(KeyEvent.VK_SHIFT);
robot.delay(100);
robot.keyPress(KeyEvent.VK_PERIOD);
robot.keyRelease(KeyEvent.VK_PERIOD);
robot.keyRelease(KeyEvent.VK_SHIFT);
break;
}
}
catch (AWTException e){
System.out.println ("Caught Exception");
message.setText(message.getText()+"\nCaught Exception");
e.printStackTrace();
}
}
private void typeWord (String word){
try{
Robot robot = new Robot();
if (word.length() == 1){
char x = word.charAt(0);
if (Character.isLetter(x)){
if (Character.isUpperCase(x)){
robot.keyPress(KeyEvent.VK_SHIFT);
robot.delay(100);
}
robot.keyPress(Character.toUpperCase(x));
robot.keyRelease(Character.toUpperCase(x));
if (Character.isUpperCase(x)){
robot.keyRelease(KeyEvent.VK_SHIFT);
}
}
else if (Character.isWhitespace(x)){
robot.keyPress(KeyEvent.VK_SPACE);
robot.keyRelease(KeyEvent.VK_SPACE);
}
else if (Character.isDigit(x)){
robot.keyPress(x);
robot.keyRelease(x);
}
else{
processSpecialChar(x);
}
}
else {
for (int i=0;i<word.length();i++){
char x = word.charAt(i);
if (Character.isLetter(x)){
if (Character.isUpperCase(x)){
robot.keyPress(KeyEvent.VK_SHIFT);
robot.delay(100);
}
robot.keyPress(Character.toUpperCase(x));
robot.keyRelease(Character.toUpperCase(x));
if (Character.isUpperCase(x)){
robot.keyRelease(KeyEvent.VK_SHIFT);
}
}
else if (Character.isWhitespace(x)){
robot.keyPress(KeyEvent.VK_SPACE);
robot.keyRelease(KeyEvent.VK_SPACE);
}
else if (Character.isDigit(x)){
robot.keyPress(x);
robot.keyRelease(x);
}
else{
processSpecialChar(x);
}
}
}
}
catch (AWTException e){
e.printStackTrace();
}
catch (Exception e){
e.printStackTrace();
}
}
public void mouseGlide(int x1, int y1, int x2, int y2, int t, int n) {
try {
Robot r = new Robot();
double dx = (x2 - x1) / ((double) n);
double dy = (y2 - y1) / ((double) n);
double dt = t / ((double) n);
for (int step = 1; step <= n; step++) {
r.delay((int) dt);
r.mouseMove((int) (x1 + dx * step), (int) (y1 + dy * step));
}
} catch (AWTException e) {
e.printStackTrace();
}
// catch (InterruptedException ex) {
// Logger.getLogger(ProcessConnectionThread.class.getName()).log(Level.SEVERE, null, ex);
// }
}
private void processCommand (String command){
try {
Runtime r = Runtime.getRuntime();
Robot robot = new Robot ();
if (command.contains(" ")){
if (command.contains("mouse")){
int wheel = Integer.parseInt(command.split("mouse")[1]);
if (wheel > 0)
robot.mouseWheel(1 * (1 + 50 / 100));
else
robot.mouseWheel(-1);
}
else{
float movex=Float.parseFloat(command.split(" ")[0]);//extract movement in x direction
float movey=Float.parseFloat(command.split(" ")[1]);//extract movement in y direction
Point point = MouseInfo.getPointerInfo().getLocation(); //Get current mouse position
float nowx=point.x;
float nowy=point.y;
//starting a new thread here
// mouseGlide((int)nowx, (int)nowy, (int)(nowx+(int)movex), (int)(nowy + (int)movey), 1, 100);
//robot.mouseMove((int) (nowx+(int)movex),(int)(nowy+(int)movey));//Move mouse pointer to new location
robot.setAutoDelay(5);
robot.mouseMove((int)(nowx+movex), (int)(nowy+movey));
}
}
else if (command.equals ("pauseThread")){
destroyed = true;
Thread.currentThread().destroy();
System.out.println ("process thread destroyed by the user");
}
else if (command.equals("taskview")){
robot.keyPress(KeyEvent.VK_WINDOWS);
robot.delay(100);
robot.keyPress(KeyEvent.VK_TAB);
robot.keyRelease(KeyEvent.VK_TAB);
robot.keyRelease(KeyEvent.VK_WINDOWS);
}
else if (command.equals("doubletap")){
// robot.mouseRelease(InputEvent.BUTTON1_MASK);
// robot.mousePress(InputEvent.BUTTON1_MASK);
// robot.mouseRelease(InputEvent.BUTTON1_MASK);
// robot.delay(50);
// robot.mousePress(InputEvent.BUTTON1_MASK);
// robot.mousePress(InputEvent.BUTTON1_MASK);
robot.mousePress(InputEvent.BUTTON1_MASK);
}
else if (command.equals("mouserelease")){
robot.mouseRelease(InputEvent.BUTTON1_MASK);
robot.delay(50);
robot.mousePress(InputEvent.BUTTON1_MASK);
robot.mouseRelease(InputEvent.BUTTON1_MASK);
}
else if (command.equals("backspace")){
robot.keyPress(KeyEvent.VK_BACK_SPACE);
robot.keyRelease(KeyEvent.VK_BACK_SPACE);
}
else if (command.equals("enter")){
robot.keyPress(KeyEvent.VK_ENTER);
robot.keyRelease(KeyEvent.VK_ENTER);
}
else if (command.equals("STARTTHREAD")){
//start a new thread to send results to android
//pass the current StreamConnection instance and the JTEXTarea instance
//open the output Stream of the current Stream Connection
dos = new DataOutputStream(outputStream);
// these are the default directories
dos.writeUTF("Root Directory\nC:\\\nDirectory");
dos.writeUTF("Root Directory\nD:\\\nDirectory");
dos.writeUTF("END_OF_REQUEST_REACHED"); //command so that the client stops listenning any further.f
}
else if (command.contains("keyboard")){
// keyboard input events
if (command.equals("keyboard")){
command +=" ";
}
if (command.length()>8){
String word="";
word = command.substring(8);
//type the word by simulating keystrokes
typeWord(word);
}
}
else if (command.equals("lmb")||command.equals("rmb")){
if (command.equals("lmb")){
robot.mousePress(InputEvent.BUTTON1_MASK);
robot.mouseRelease(InputEvent.BUTTON1_MASK);
}
else{
robot.mousePress(InputEvent.BUTTON3_MASK);
robot.mouseRelease(InputEvent.BUTTON3_MASK);
}
}
else if (command.equals("lmb_press")){
robot.mousePress(InputEvent.BUTTON1_MASK);
}
else if (command.equals("lmb_release")){
robot.mouseRelease(InputEvent.BUTTON1_MASK);
}
else if (command.equals("longClickUp")){
robot.mouseRelease(InputEvent.BUTTON1_MASK);
}
else if (command.contains("power")){
switch (command) {
case "power_shutdown":
r.exec("shutdown -s -f -t 05");
break;
case "power_restart":
r.exec("Shutdown.exe -r -t 05");
break;
case "power_lock":
r.exec("rundll32.exe user32.dll, LockWorkStation");
break;
case "power_hibernate":
r.exec("rundll32.exe PowrProf.dll,SetSuspendState");
break;
}
}
else if (command.contains("explorer")){
r.exec(command);
}
else if (command.equals("volup_press"))
{
//new A().vol_up();
new MediaHandler().pressVolUpKey();
}
else if (command.equals("voldown_press")){
new MediaHandler().pressVolDownKey();
}
else if (command.equals("volup_release")){
new MediaHandler().releaseVolUpKey();
}
else if (command.equals("voldown_release")){
new MediaHandler().releaseVolDownKey();
}
else if (command.equals("play_press")){
new MediaHandler().pressPlayKey();
}
else if (command.equals("play_release")){
//do nothing
}
else if (command.equals("fast_forward_press")){
new MediaHandler().pressFastForwardKey();
}
else if (command.equals("fast_forward_release")){
new MediaHandler().releaseFastForwardKey();
}
else if (command.equals("fast_reverse_press")){
new MediaHandler().pressPreviousKey();
}
else if (command.equals("fast_reverse_release")){
new MediaHandler().releasePreviousKey();
}
else if (command.equals("seek_forward_press")){
//new MediaHandler().pressSeekForwardKey();
robot.keyPress(KeyEvent.VK_SHIFT);
robot.delay(20);
robot.keyPress(KeyEvent.VK_RIGHT);
}
else if (command.equals("seek_forward_release")){
//new MediaHandler().releaseSeekForwardKey();
robot.keyRelease(KeyEvent.VK_RIGHT);
robot.keyRelease(KeyEvent.VK_SHIFT);
}
else if (command.equals("seek_backward_press")){
//new MediaHandler().pressSeekBackwardKey();
robot.keyPress(KeyEvent.VK_SHIFT);
robot.delay(20);
robot.keyPress(KeyEvent.VK_LEFT);
}
else if (command.equals("seek_backward_release")){
//new MediaHandler().releaseSeekBackwardKey();
robot.keyRelease(KeyEvent.VK_SHIFT);
robot.keyRelease(KeyEvent.VK_LEFT);
}
else if (command.contains("explore")){
//The Desktop class allows a Java application to launch associated applications registered
//on the native desktop to handle a URI or a file.
File folder = new File (command.substring(7,command.length()));
File files[] = folder.listFiles();
String fileOrDirectory="";
try{
for (File file : files){
if (file.isFile()){
fileOrDirectory = "file";
// execute the file
// try to open the file display a download option in the android app
// Desktop dt = Desktop.getDesktop();
// dt.open(file);
}
else{
fileOrDirectory = "directory";
}
dos.writeUTF(file.getName()+"\n"+file.getAbsolutePath()+"\n"+fileOrDirectory);
}
}
catch (NullPointerException e){
System.out.println ("Printing end of request...");
dos.writeUTF("END_OF_REQUEST_REACHED");
}
System.out.println ("Printing end of request...");
dos.writeUTF("END_OF_REQUEST_REACHED");
}
else if (command.contains("open")){
File file = new File(command.substring(4,command.length()));
Desktop d = Desktop.getDesktop();
try{
d.open(file);
}
catch (IOException e){
System.out.println("UNABLE TO OPEN FILE");
}
}
else if (command.equals("LASER_ON")){
// robot.mouseRelease(InputEvent.BUTTON1_MASK);
robot.keyPress(KeyEvent.VK_CONTROL);
robot.mousePress(InputEvent.BUTTON1_MASK);
robot.mouseRelease(InputEvent.BUTTON1_MASK);
robot.delay(5);
robot.mousePress(InputEvent.BUTTON1_MASK);
}
else if (command.equals("LASER_OFF")){
robot.mouseRelease(InputEvent.BUTTON1_MASK);
robot.keyRelease(KeyEvent.VK_CONTROL);
}
else if (command.equals("presenatation_next_slide")){
// press the right arrow key
robot.keyPress(KeyEvent.VK_RIGHT);
robot.keyRelease(KeyEvent.VK_RIGHT);
}
else if (command.equals("presenation_back_slide")){
// press the left arrow key
robot.keyPress(KeyEvent.VK_LEFT);
robot.keyRelease(KeyEvent.VK_LEFT);
}
else if (command.equals("presentation_start_slideshow")){
robot.keyPress(KeyEvent.VK_SHIFT);
robot.delay(100);
robot.keyPress(KeyEvent.VK_F5);
robot.keyRelease(KeyEvent.VK_F5);
robot.keyRelease(KeyEvent.VK_SHIFT);
}
else if (command.equals("presentation_pause_slide_show")){
robot.keyPress(KeyEvent.VK_ESCAPE);
robot.keyRelease(KeyEvent.VK_ESCAPE);
}
else if (command.equals("presenation_toggle_annotation")){
robot.keyPress(KeyEvent.VK_CONTROL);
robot.delay(100);
robot.keyPress(KeyEvent.VK_P);
robot.keyRelease(KeyEvent.VK_P);
robot.keyRelease(KeyEvent.VK_CONTROL);
}
}
catch (AWTException | IOException e){
{
e.printStackTrace();
System.out.println ("Error parsing the command");
message.setText(message.getText()+"\nError parsing the command");
try {
dos.writeUTF("END_OF_REQUEST_REACHED"); // in case any error occours, send an end of request.
} catch (IOException ex) {
Logger.getLogger(ProcessConnectionThread.class.getName()).log(Level.SEVERE, null, ex);
}
}
}
}
} |
# This program generates two random numbers between 1 and 10 and prints out the product of the two numbers
import random
number1 = random.randint(1, 10)
number2 = random.randint(1, 10)
print(f"The product of {number1} and {number2} is {number1 * number2}") |
#!/bin/bash
# This script parses in the command line parameters from runCust,
# maps them to the correct command line parameters for DispNet training script and launches that task
# The last line of runCust should be: bash $CONFIG_FILE --data-dir $DATA_DIR --log-dir $LOG_DIR
# Parse the command line parameters
# that runCust will give out
DATA_DIR=NONE
LOG_DIR=NONE
CONFIG_DIR=NONE
MODEL_DIR=NONE
# Parsing command line arguments:
while [[ $# > 0 ]]
do
key="$1"
case $key in
-h|--help)
echo "Usage: run_dispnet_training_philly.sh [run_options]"
echo "Options:"
echo " -d|--data-dir <path> - directory path to input data (default NONE)"
echo " -l|--log-dir <path> - directory path to save the log files (default NONE)"
echo " -p|--config-file-dir <path> - directory path to config file directory (default NONE)"
echo " -m|--model-dir <path> - directory path to output model file (default NONE)"
exit 1
;;
-d|--data-dir)
DATA_DIR="$2"
shift # pass argument
;;
-p|--config-file-dir)
CONFIG_DIR="$2"
shift # pass argument
;;
-m|--model-dir)
MODEL_DIR="$2"
shift # pass argument
;;
-l|--log-dir)
LOG_DIR="$2"
shift
;;
*)
echo Unkown option $key
;;
esac
shift # past argument or value
done
# Prints out the arguments that were passed into the script
echo "DATA_DIR=$DATA_DIR"
echo "LOG_DIR=$LOG_DIR"
echo "CONFIG_DIR=$CONFIG_DIR"
echo "MODEL_DIR=$MODEL_DIR"
# Run training on philly
# Add the root folder of the code to the PYTHONPATH
export PYTHONPATH=$PYTHONPATH:$CONFIG_DIR
# Run the actual job
python $CONFIG_DIR/examples/AnytimeNetwork/imagenet-dense-ann.py \
--data_dir=$DATA_DIR \
--log_dir=$LOG_DIR \
--model_dir=$MODEL_DIR \
--load=${MODEL_DIR}/checkpoint \
-f=2 --densenet_depth=169 -s=14 --batch_size=256 --nr_gpu=8 --densenet_version=dense --min_predict_unit=10 --opt_at=-1 --samloss=6 -g=32 --num_classes=1000 --reduction_ratio=0.5
|
(function() {
'use strict';
/**
* App handles the communication among GUI, Game and Network.
*/
var App = function() {
this.game = null;
this.network = null;
// list of network clients, players[0]=>robot1...
this.players = [null, null];
this.state = 'DISCONNECT';
this._waiting_moves = false;
this._move_timout = 0;
}
/** Initialize all objects */
App.prototype.initialize = function() {
this.game = new soccer.Game();
this.network = new soccer.Network();
this.update_info();
var self = this;
createjs.Ticker.framerate = config.display.fps;
createjs.Ticker.on('tick', function(e) { self.on_update(e.delta/1000.); });
}
// CALLBACKS ----------------------------------------------------------------
App.prototype.on_update = function(tick) {
if (config.debug.show_fps) stats.begin();
if (this.state === 'PLAY') {
this.update_play(tick);
} else {
this.game.robot1.get_sensors(); // to show raycasting
this.game.robot2.get_sensors(); // to show raycasting
}
if (!desktop) {
this.game.update(tick);
} else {
this.game._render();
}
if (config.debug.show_fps) stats.end();
}
App.prototype.on_goal = function(robot_who_scored) {
if (robot_who_scored === 1) {
this.game.robot1.score += 1;
} else {
this.game.robot2.score += 1;
}
this.game.reset();
this.update_info();
}
App.prototype.on_connection = function(client) {
var i;
if (this.players[0] == null) {
i = 0;
} else if (this.players[1] == null) {
i = 1;
} else {
logger.error('Trying to register more players than supported.');
return;
}
this.players[i] = client;
if (this.players[0] !== null || this.players[1] !== null) {
this.do_stop();
}
this.update_info();
}
App.prototype.on_disconnection = function(client) {
var i = this.players.indexOf(client);
if (i < 0) {
return;
// logger.error('Trying to remove unregistered player.');
}
this.players[i] = null;
if (this.players[0] || this.players[1]) {
this.do_stop();
} else {
this.do_disconnect();
}
this.update_info();
}
App.prototype.on_rename = function() {
this.update_info();
}
// --------------------------------------------------------------------------
App.prototype.update_play = function(tick) {
// if app already asked for movement and is waiting
if (this._waiting_moves) {
var moves = this.network.get_moves();
// movements ready
if (moves) {
var move1 = moves[this.players[0]];
var move2 = moves[this.players[1]];
if (move1) { this.game.robot1.act(move1.force, move1.steer); }
if (move2) { this.game.robot2.act(move2.force, move2.steer); }
this.game.update(tick);
this._move_timout = 0;
this._waiting_moves = false;
}
}
// notice that the previous IF can set waiting_moves to false
if (!this._waiting_moves) {
var info = this._get_game_info();
this.network.ask_for_moves(info);
this._waiting_moves = true;
}
}
App.prototype._get_game_info = function() {
var p1 = this.players[0];
var p2 = this.players[1];
var info = {};
info['ball'] = this.game.get_ball_info();
if (p1) info[p1] = this.game.get_robot_info(1);
if (p2) info[p2] = this.game.get_robot_info(2);
return info;
}
App.prototype.update_info = function() {
var robot1 = this.game.robot1;
var robot2 = this.game.robot2;
gui.score1.html(robot1.score);
gui.score2.html(robot2.score);
var default_name = config.network.host+':'+config.network.port;
gui.name1.html(default_name)
gui.name2.html(default_name)
var default_status = 'Disconnected';
gui.status1.html(default_status)
gui.status2.html(default_status)
var clients = this.network.clients;
if (this.players[0]) {
var c = clients[ (clients[0].id===this.players[0])? 0:1 ];
gui.name1.html(c.name);
gui.status1.html('Connected');
}
if (this.players[1]) {
var c = clients[ (clients[0].id===this.players[1])? 0:1 ];
gui.name2.html(c.name);
gui.status2.html('Connected');
}
}
// ACTIONS (generally from GUI) ---------------------------------------------
App.prototype.do_newgame = function() {
logger.info('Creating new game.');
this.network.disconnect_all();
this.game.reset();
this.do_disconnect();
this.players = [null, null];
this.update_info();
}
App.prototype.do_play = function() {
logger.debug('Changing app state to PLAY.');
this.state = 'PLAY';
this._move_timout = 0;
this._waiting_moves = false;
if (!this.players[0]) { this.game.robot1.disable(); }
if (!this.players[1]) { this.game.robot2.disable(); }
gui.btn_play.attr('disabled', 'disabled');
gui.btn_pause.attr('disabled', false);
gui.btn_stop.attr('disabled', false);
gui.btn_reset.attr('disabled', false);
gui.btn_randomreset.attr('disabled', false);
gui.btn_invert.attr('disabled', 'disabled');
}
App.prototype.do_pause = function() {
logger.debug('Changing app state to PAUSE.');
this.state = 'PAUSE';
gui.btn_play.attr('disabled', false);
gui.btn_pause.attr('disabled', 'disabled');
gui.btn_stop.attr('disabled', false);
gui.btn_reset.attr('disabled', false);
gui.btn_randomreset.attr('disabled', false);
gui.btn_invert.attr('disabled', 'disabled');
}
App.prototype.do_stop = function() {
logger.debug('Changing app state to STOP.');
this.game.new_game();
this.update_info();
this.state = 'STOP';
this.game.robot1.enable();
this.game.robot2.enable();
gui.btn_play.attr('disabled', false);
gui.btn_pause.attr('disabled', 'disabled');
gui.btn_stop.attr('disabled', 'disabled');
gui.btn_reset.attr('disabled', 'disabled');
gui.btn_randomreset.attr('disabled', 'disabled');
gui.btn_invert.attr('disabled', false);
}
App.prototype.do_reset = function() {
this.game.reset();
}
App.prototype.do_randomreset = function() {
this.game.reset();
this.game.random_reset();
}
App.prototype.do_disconnect = function() {
logger.debug('Changing app state to DISCONNECT.');
this.state = 'DISCONNECT';
this.game.robot1.enable();
this.game.robot2.enable();
gui.btn_play.attr('disabled', 'disabled');
gui.btn_pause.attr('disabled', 'disabled');
gui.btn_stop.attr('disabled', 'disabled');
gui.btn_reset.attr('disabled', 'disabled');
gui.btn_randomreset.attr('disabled', 'disabled');
gui.btn_invert.attr('disabled', false);
}
App.prototype.do_invert = function() {
if (this.state !== 'DISCONNECT' && this.state !== 'STOP') {
logger.warn('Trying to invert players during game.');
return;
}
var temp = this.players[0];
this.players[0] = this.players[1];
this.players[1] = temp;
this.update_info();
}
// --------------------------------------------------------------------------
soccer.App = App;
})() |
SELECT
user_id,
comment_time,
comment_text
FROM comments
WHERE comment_time = (
SELECT MAX(comment_time)
FROM comments
WHERE user_id = comments.user_id
) |
/*
Copyright (c) 2020-2021 Intel Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
#include "oneapi/tbb/cache_aligned_allocator.h"
#include "oneapi/tbb/detail/_small_object_pool.h"
#include "oneapi/tbb/detail/_task.h"
#include "governor.h"
#include "thread_data.h"
#include "task_dispatcher.h"
#include <cstddef>
namespace tbb {
namespace detail {
namespace r1 {
small_object_pool_impl::small_object* const small_object_pool_impl::dead_public_list =
reinterpret_cast<small_object_pool_impl::small_object*>(1);
void* __TBB_EXPORTED_FUNC allocate(d1::small_object_pool*& allocator, std::size_t number_of_bytes, const d1::execution_data& ed) {
auto& tls = static_cast<const execution_data_ext&>(ed).task_disp->get_thread_data();
auto pool = tls.my_small_object_pool;
return pool->allocate_impl(allocator, number_of_bytes);
}
void* __TBB_EXPORTED_FUNC allocate(d1::small_object_pool*& allocator, std::size_t number_of_bytes) {
// TODO: optimize if the allocator contains a valid pool.
auto tls = governor::get_thread_data();
auto pool = tls->my_small_object_pool;
return pool->allocate_impl(allocator, number_of_bytes);
}
void* small_object_pool_impl::allocate_impl(d1::small_object_pool*& allocator, std::size_t number_of_bytes)
{
small_object* obj{nullptr};
if (number_of_bytes <= small_object_size) {
if (m_private_list) {
obj = m_private_list;
m_private_list = m_private_list->next;
} else if (m_public_list.load(std::memory_order_relaxed)) {
// No fence required for read of my_public_list above, because std::atomic::exchange() has a fence.
obj = m_public_list.exchange(nullptr);
__TBB_ASSERT( obj, "another thread emptied the my_public_list" );
m_private_list = obj->next;
} else {
obj = new (cache_aligned_allocate(small_object_size)) small_object{nullptr};
++m_private_counter;
}
} else {
obj = new (cache_aligned_allocate(number_of_bytes)) small_object{nullptr};
}
allocator = this;
// Return uninitialized memory for further construction on user side.
obj->~small_object();
return obj;
}
void __TBB_EXPORTED_FUNC deallocate(d1::small_object_pool& allocator, void* ptr, std::size_t number_of_bytes) {
auto pool = static_cast<small_object_pool_impl*>(&allocator);
auto tls = governor::get_thread_data();
pool->deallocate_impl(ptr, number_of_bytes, *tls);
}
void __TBB_EXPORTED_FUNC deallocate(d1::small_object_pool& allocator, void* ptr, std::size_t number_of_bytes, const d1::execution_data& ed) {
auto& tls = static_cast<const execution_data_ext&>(ed).task_disp->get_thread_data();
auto pool = static_cast<small_object_pool_impl*>(&allocator);
pool->deallocate_impl(ptr, number_of_bytes, tls);
}
void small_object_pool_impl::deallocate_impl(void* ptr, std::size_t number_of_bytes, thread_data& td) {
__TBB_ASSERT(ptr != nullptr, "pointer to deallocate should not be null");
__TBB_ASSERT(number_of_bytes >= sizeof(small_object), "number of bytes should be at least sizeof(small_object)");
if (number_of_bytes <= small_object_size) {
auto obj = new (ptr) small_object{nullptr};
if (td.my_small_object_pool == this) {
obj->next = m_private_list;
m_private_list = obj;
} else {
auto old_public_list = m_public_list.load(std::memory_order_relaxed);
for (;;) {
if (old_public_list == dead_public_list) {
obj->~small_object();
cache_aligned_deallocate(obj);
if (++m_public_counter == 0)
{
this->~small_object_pool_impl();
cache_aligned_deallocate(this);
}
break;
}
obj->next = old_public_list;
if (m_public_list.compare_exchange_strong(old_public_list, obj)) {
break;
}
}
}
} else {
cache_aligned_deallocate(ptr);
}
}
std::int64_t small_object_pool_impl::cleanup_list(small_object* list)
{
std::int64_t removed_count{};
while (list) {
small_object* current = list;
list = list->next;
current->~small_object();
cache_aligned_deallocate(current);
++removed_count;
}
return removed_count;
}
void small_object_pool_impl::destroy()
{
// clean up private list and subtract the removed count from private counter
m_private_counter -= cleanup_list(m_private_list);
// Grab public list and place dead mark
small_object* public_list = m_public_list.exchange(dead_public_list);
// clean up public list and subtract from private (intentionally) counter
m_private_counter -= cleanup_list(public_list);
__TBB_ASSERT(m_private_counter >= 0, "Private counter may not be less than 0");
// Equivalent to fetch_sub(m_private_counter) - m_private_counter. But we need to do it
// atomically with operator-= not to access m_private_counter after the subtraction.
auto new_value = m_public_counter -= m_private_counter;
// check if this method is responsible to clean up the resources
if (new_value == 0) {
this->~small_object_pool_impl();
cache_aligned_deallocate(this);
}
}
} // namespace r1
} // namespace detail
} // namespace tbb
|
/*-
* Copyright (c) 2019 <NAME> <<EMAIL>>
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
* OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
* OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
*/
#ifndef _ARM_NORDICSEMI_NRF9160_UICR_H_
#define _ARM_NORDICSEMI_NRF9160_UICR_H_
#define UICR_APPROTECT 0x000 /* Access port protection */
#define UICR_UNUSED0 0x004 /* Reserved */
#define UICR_UNUSED1 0x008 /* Reserved */
#define UICR_UNUSED2 0x00C /* Reserved */
#define UICR_UNUSED3 0x010 /* Reserved */
#define UICR_XOSC32M 0x014 /* Oscillator control */
#define UICR_HFXOSRC 0x01C /* HFXO clock source selection */
#define UICR_HFXOCNT 0x020 /* HFXO startup counter */
#define UICR_SECUREAPPROTECT 0x02C /* Secure access port protection */
#define UICR_ERASEPROTECT 0x030 /* Erase protection */
#define UICR_OTP(n) (0x108 + (n) * 0x4) /* OTP bits [31+n*32:0+n*32]. */
#define UICR_KEYSLOT_CONFIG_DEST(n) (0x400 + (n) * 0x8) /* Destination address where content of the key value registers */
#define UICR_KEYSLOT_CONFIG_PERM(n) (0x404 + (n) * 0x8) /* Define permissions for the key slot with ID=n+1. Bits 0-15 and 16-31 can only be written once. */
#define UICR_KEYSLOT_KEY_VALUE(n, o) (0x800 + (n) * 0x10 + (o) * 0x4) /* Define bits [31+o*32:0+o*32] of value assigned to KMU key slot ID=n+1 */
struct nrf_uicr_softc {
size_t base;
};
void nrf_uicr_init(struct nrf_uicr_softc *sc, uint32_t base);
#endif /* !_ARM_NORDICSEMI_NRF9160_UICR_H_ */
|
<filename>library/src/main/java/com/smoothsync/smoothsetup/services/providerservice/ProviderService.java<gh_stars>0
/*
* Copyright (c) 2020 dmfs GmbH
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.smoothsync.smoothsetup.services.providerservice;
import com.smoothsync.api.model.Provider;
import androidx.annotation.Keep;
import io.reactivex.rxjava3.core.Maybe;
import io.reactivex.rxjava3.core.Observable;
/**
* A service to provide {@link Provider} information.
*/
@Keep
public interface ProviderService
{
/**
* Return a specific provider by its id.
*/
Maybe<Provider> byId(String id);
/**
* Return all providers suporting a given domain.
*/
Observable<Provider> byDomain(String domain);
/**
* Return all providers known to this {@link ProviderService}.
*/
Observable<Provider> all();
/**
* Auto complete the given domain name to known domains.
*/
Observable<String> autoComplete(String domainFragment);
}
|
# Get a numerical input from user
num = int(input('Enter a number from 0 to 5: '))
# Check if input is within the range
if num in range(0, 6):
print("Input accepted!")
else:
print("Input not accepted.") |
#!/bin/bash
# Find all R packages loaded in a codebase
# Jacob Levernier
# 2016
# Released under an MIT license
############################
# Settings
############################
file_to_save="./markdown_draft_examples/R_Package_Version_Numbers_AUTOMATICALLY_GENERATED_DO_NOT_EDIT_MANUALLY.md" # This file should be located in the markdown drafts folder (the script "Markdown_to_LaTeX_PDF_Build_Script.sh" expects that).
code_directory_to_search="/path/to/your/code"
############################
# End Settings
############################
# Get all 'library()' calls, and save them to a file:
grep --recursive --include="*.R*" --only-matching --no-filename "library(.*)" "$code_directory_to_search" > "$file_to_save"
# Also get all 'require()' calls, and append them to the file:
grep --recursive --include="*.R*" --only-matching --no-filename "require(.*)" "$code_directory_to_search" >> "$file_to_save"
# Remove all 'library' and 'require' start-of-line strings:
perl -pi -e 's/^(require|library)//g' "$file_to_save"
# Remove all lines beginning with 'Binary file' (which is a false positive):
perl -pi -e 's/^Binary file.*$//g' "$file_to_save"
# Delete anything after the first encountered closing parenthesis (this assumes that there isn't more than one library() call per line of code in the codebase):
perl -pi -e 's/\).*$//g' "$file_to_save"
# Replace all commas with newlines
perl -pi -e 's/,/\n/g' "$file_to_save"
# Delete all opening parentheses:
perl -pi -e 's/\(//g' "$file_to_save"
# Remove all single- and double-quote marks:
perl -pi -e 's/"//g' "$file_to_save"
perl -pi -e "s/'//g" "$file_to_save"
# Get only unique values from the file:
unique_values=$(cat "$file_to_save" | sort --unique)
echo "$unique_values" > "$file_to_save"
# Run the R Script
markdown_table=$(Rscript "/home/jacoblevernier/Primary_Syncing_Folder/Documents/Files in Transit/Dissertation_Project/Dissertation_Proposal/Written_Report/Get_All_R_Library_Version_Numbers_and_Create_Draft_From_Them_R_Script_Portion.R")
# (Over)Write the final markdown file:
echo -e "% Generated using Get_All_R_Library_Version_Numbers_and_Create_Draft_From_Them.sh\n\n" > "$file_to_save" # Add a comment (which will be ignored by Pandoc) re: the source of the file.
echo "\chapter{R base and library version numbers}" >> "$file_to_save"
#echo "\section{(Generated from package documentation within R)}" >> "$file_to_save"
echo "\begin{center}Version numbers of R base and R packages used in this project. This table was generated automatically from package documentation within R; author names are therefore as the authors wished them to be printed.\end{center}" >> "$file_to_save"
echo -e "\n\n" >> "$file_to_save"
echo "$markdown_table" >> "$file_to_save"
|
#!/bin/sh
curl -sL https://deb.nodesource.com/setup_4.x | sudo -E bash -;
sudo apt-get install -y nodejs;
sudo apt-get install -y build-essential;
ln -s /usr/bin/nodejs /usr/bin/node
|
#!/usr/bin/env sh
systemctl daemon-reload
if [ -f "/var/lib/pufferpanel/database-RESTORE.db" ]; then
mv /var/lib/pufferpanel/database-RESTORE.db /var/lib/pufferpanel/database.db
fi
if [ ! -f "/var/lib/pufferpanel/database.db" ]; then
touch /var/lib/pufferpanel/database.db
fi
chown -R pufferpanel:pufferpanel /etc/pufferpanel /var/log/pufferpanel /var/lib/pufferpanel /var/www/pufferpanel
systemctl is-active --quiet pufferpanel && systemctl restart pufferpanel
exitCode=$?
[ $exitCode -eq 0 ] || [ $exitCode -eq 3 ] || exit $exitCode
|
/*
* Copyright [2020-2030] [https://www.stylefeng.cn]
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Guns采用APACHE LICENSE 2.0开源协议,您在使用过程中,需要注意以下几点:
*
* 1.请不要删除和修改根目录下的LICENSE文件。
* 2.请不要删除和修改Guns源码头部的版权声明。
* 3.请保留源码和相关描述文件的项目出处,作者声明等。
* 4.分发源码时候,请注明软件出处 https://gitee.com/stylefeng/guns
* 5.在修改包名,模块名称,项目代码等时,请注明软件出处 https://gitee.com/stylefeng/guns
* 6.若您的项目无法满足以上几点,可申请商业授权
*/
package cn.stylefeng.roses.kernel.customer.api.constants;
/**
* C端业务的常量
*
* @author fengshuonan
* @date 2021/6/7 11:28
*/
public interface CustomerConstants {
/**
* C端模块的名称
*/
String CUSTOMER_MODULE_NAME = "kernel-s-customer";
/**
* 异常枚举的步进值
*/
String CUSTOMER_EXCEPTION_STEP_CODE = "31";
/**
* C端用户的缓存
*/
String CUSTOMER_CACHE_PREFIX = "customer:";
/**
* 默认的空bcrypt加密的密码
*/
String DEFAULT_EMPTY_PASSWORD = <PASSWORD>";
}
|
<reponame>benrandall/idb
import React from 'react';
import PropTypes from 'prop-types';
import { Col } from 'reactstrap';
import './RSTeamMember.css';
const RSTeamMember = (props) => {
return (
<Col lg='4' sm='6' className="text-center mb-4">
<img className="rounded-circle img-responsive img-center" src={`${process.env.REACT_APP_API_HOST}/images/`+ props.icon } alt="" />
<h4>{ props.name }
<small className="text-muted"> { props.role }</small>
</h4>
<p> { props.bio }</p>
<p><b>Number of commits:</b> { props.commits || -1 }</p>
<p><b>Number of issues:</b> { props.issues || -1 }</p>
<p><b>Number of unit tests: </b> { props.tests || 0 }</p>
</Col>
);
};
RSTeamMember.propTypes = {
icon: PropTypes.string,
name: PropTypes.string,
role: PropTypes.string,
commits: PropTypes.number,
issues: PropTypes.number,
tests: PropTypes.number
};
export default RSTeamMember; |
docker build -t tileserver .
|
#!/bin/sh
# Script for Running all the Tests one after other
# Where are we called from?
P=`dirname $0`
# Current dir
ORIGPWD=`pwd`
# File for Storing Log Of UnitTest.
logUT=UnitTestResult
logSRT=SeleniumTestResult
###########################################################
##
## Create log for the tests
##
###########################################################
# Method to Create Log Folder if it does not Exists.
create_log()
{
cd $ORIGPWD/../test/
PATH4LOG=`pwd`
if [ ! -d "Result" ] ; then
mkdir Result
fi
}
###########################################################
##
## Following methods are used to run the different tests
##
###########################################################
# Method to Run Unit Tests.
run_UnitTest()
{
cd $ORIGPWD/../test
# Running Unit Tests
php UnitTests.php > $PATH4LOG/Result/$logUT
}
# Method to Run Selenium Ruby Tests.
run_seleniumTest()
{
sub_menu
echo "Enter Your Option: "
read choice
cd $ORIGPWD/../test/selenium-ruby/CRM
# Running Selenium (ruby) Tests
ruby ruby_unit_tests.rb $choice
}
# Method to Run Stress Test.
run_stressTest()
{
cd $ORIGPWD/
# running stress test
./runStressTest.sh
}
###########################################################
##
## Menu system for different purpos
##
###########################################################
main_menu()
{
clear
echo
echo " *********************** Select Method for Test *********************** "
echo
echo "Options available: "
echo " UT - Carry out Unit Tests"
echo " ST - Carry out Stress Tests"
echo " SRT - Carry out Selenium (Ruby) Tests"
echo " All - Carry out all the above mentioned Tests i.e. Unit Tests, Stress Test, Selenium Test"
echo
echo
}
sub_menu()
{
clear
echo
echo " *********************** Select the Option *********************** "
echo
echo "Options available: "
echo " 1 : Contact Individual"
echo " 2 : Contact Household"
echo " 3 : Contact Organization"
echo " 4 : New Group"
echo " 5 : Manage Group"
echo " 6 : Administer - Configuration Section"
echo " 7 : Administer - Configuration Custom Data"
echo " 8 : Administer - Configuration Profile"
echo " 9 : Administer - Setup Section"
echo " 10 : Administer - CiviContribute"
echo " 11 : Administer - CiviMember"
echo " 12 : Administer - CiviEvent"
echo " 13 : Find Contact - Basic Search"
echo " 14 : Advanced Search"
echo " 15 : Search Builder"
echo " 16 : Import - Contacts"
echo " 17 : Import - Activity History"
echo " 18 : CiviContribute - Find Contribution"
echo " 19 : CiviContribute - Import Contribution"
echo " 20 : CiviMember - Find Memberships"
echo " 21 : CiviMember - Import Memberships"
echo " 22 : CiviMail"
echo " 23 : CiviEvent"
}
###########################################################
##
## Main execution method.
##
## All test scripts will run usnig this method
##
###########################################################
run_option()
{
# Following Case Structure is used for Executing Menuing System.
case $1 in
# Unit Tests
"UT" | "ut" | "Ut")
echo "Running Unit Tests"; echo;
run_UnitTest
echo "Unit Tests Successfully Completed. Log stored in the File : " $PATH4LOG/Result/$logUT; echo;
echo " **************************************************************************** ";
;;
# Stress Tests
"ST" | "st" | "St")
echo "Running Stress Tests"; echo;
run_stressTest
echo "Stress Tests Successfully Completed."; echo;
echo " **************************************************************************** ";
;;
# Selenium (Ruby) Tests
"SRT" | "srt" | "Srt")
echo "Running Selenium (Ruby) Tests"; echo;
run_seleniumTest
#echo "Selenium (Ruby) Testing Successfully Completed. Log stored in the File : " $PATH4LOG/Result/$logSRT; echo;
echo " **************************************************************************** ";
;;
# All the Tests will be Executed one after other
"All" | "all" )
echo "Running all three Tests i.e. Unit Tests, Web Tests, maxQ Tests, Stress Test and Selenium(Ruby) Tests"; echo;
echo "Running Unit Tests"; echo;
run_UnitTest
echo "Unit Tests Successfully Completed. Log stored in the File : " $PATH4LOG/Result/$logUT; echo;
echo "Running Stress Tests"; echo;
run_stressTest
echo "Stress Tests Successfully Completed."; echo;
echo " **************************************************************************** ";
echo "Running Selenium (ruby) Tests"; echo;
run_seleniumTest
#echo "Selenium (Ruby) Testing Successfully Completed. Log stored in the File : " $PATH4LOG/Result/$logSRT; echo;
echo " **************************************************************************** ";
;;
*)
echo "You have entered Invalid Option."; echo;
exit
;;
esac
}
###########################################################
##
## Start of the script.
##
###########################################################
start()
{
create_log
main_menu
echo "Enter Your Option: "
read option
run_option $option
echo;
}
###########################################################
##
## Call to start of the script
##
###########################################################
start
|
<reponame>EntropyHaos/KOREAHAOS_diagramo_tester_C9_VM
"use strict";
/*
Copyright [2014] [Diagramo]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
/**
* Small library to smooth the dashed line support in <canvas>
* Dotted line is made available through:
*
* Basic canvas support for dashed lines is based on following CanvasRenderingContext2D's
* methods and properties:
* - CanvasRenderingContext2D.setLineDash();
* - CanvasRenderingContext2D.getLineDash();
* - CanvasRenderingContext2D.lineDashOffset;
*
* @see http://www.rgraph.net/blog/2013/january/html5-canvas-dashed-lines.html
* */
var dashSupport = false;
(function() {
/*Add setLineDash(...) to Canvas context
* */
if (typeof CanvasRenderingContext2D.prototype.setLineDash !== 'function') {
CanvasRenderingContext2D.prototype.setLineDash = function() {
if (CanvasRenderingContext2D.prototype.hasOwnProperty("mozDash")) { //Mozilla
/*If you got an error "NS_ERROR_ILLEGAL_VALUE:" this is because
* Util.operaReplacer from main.js->save() breaks serialization*/
this.mozDash = arguments[0];
}
else { //others
console.info("no setLineDash");
//TODO: implement fall back option. Right now do nothing :)
/**You are more than welcome to offer a fallback implementation. */
}
};
}
/*Add getLineDash(...) to Canvas context*/
if (typeof CanvasRenderingContext2D.prototype.getLineDash !== 'function') {
CanvasRenderingContext2D.prototype.getLineDash = function() {
var _dash = null;
//check if is a Mozilla variant
if (CanvasRenderingContext2D.prototype.hasOwnProperty("mozDash")) {
_dash = this.mozDash;
}
else {
console.info("no setLineDash");
/**You are more than welcome to offer a fallback implementation. */
}
return _dash;
};
}
/**Creates lineDashOffset property for context that miss it*/
if (!CanvasRenderingContext2D.prototype.hasOwnProperty("lineDashOffset")) {
if (CanvasRenderingContext2D.prototype.hasOwnProperty("mozDashOffset")) { //Mozilla
Object.defineProperty(CanvasRenderingContext2D.prototype, "lineDashOffset", {
/**ATTENTION: You must first set mozDash in order to use mozDashOffset*/
get: function() {
// console.info("mozDashOffset get");
return this.mozDashOffset;
},
set: function(val) {
this.mozDashOffset = val;
// console.info("mozDashOffset set");
},
enumerable: true
});
}
else{ //other browsers
Object.defineProperty(CanvasRenderingContext2D.prototype, "lineDashOffset", {
get: function() {
//TODO: add your own implementation
},
set: function(val) {
//TODO: add your own implementation
},
enumerable: true
});
}
}
// console.info("Context enhanced with dotted line");
dashSupport = true;
})(); |
<gh_stars>100-1000
/*
* Copyright © 2019 Lisk Foundation
*
* See the LICENSE file at the top-level directory of this distribution
* for licensing information.
*
* Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation,
* no part of this software, including this file, may be copied, modified,
* propagated, or distributed except according to the terms contained in the
* LICENSE file.
*
* Removal or modification of this copyright notice is prohibited.
*/
/* eslint-disable @typescript-eslint/no-floating-promises */
import { Sequence } from '../../../../src/node/utils/sequence';
describe('Sequence', () => {
jest.useFakeTimers();
let sequence: Sequence;
beforeEach(() => {
sequence = new Sequence();
});
describe('#constructor', () => {
it('should have the default config', () => {
expect(sequence['_config'].warningLimit).toEqual(50);
expect(sequence['_config'].onWarning).toBeUndefined();
});
it('should call the _tick after event loop', () => {
const tickSpy = jest.spyOn(sequence, '_tick' as any);
jest.advanceTimersByTime(3);
expect(tickSpy).toHaveBeenCalledTimes(1);
});
});
describe('#count', () => {
it('should register worker and count is correct', () => {
sequence.add(async () => Promise.resolve(true));
sequence.add(async () => Promise.resolve(1));
sequence.add(async () => Promise.resolve('new'));
expect(sequence.count()).toEqual(3);
});
it('should register worker and count should decrease after tick', () => {
sequence.add(async () => Promise.resolve(true));
sequence.add(async () => Promise.resolve(1));
sequence.add(async () => Promise.resolve('new'));
sequence['_tick']();
expect(sequence.count()).toEqual(2);
});
});
describe('#add', () => {
it('should throw an error if the input is not async function', async () => {
await expect(sequence.add((() => true) as any)).rejects.toThrow(
'Worker must be an async function.',
);
});
it('should enqueue the input to the sequence', () => {
sequence.add(async () => Promise.resolve(true));
expect(sequence['_queue']).toHaveLength(1);
});
});
describe('#tick', () => {
it('should resolve undefined when there is no task in the queue', async () => {
const result = await sequence['_tick']();
expect(result).toBeUndefined();
});
it('should resolve to the result of the fist function', async () => {
const expectedResult = 'result';
const resultPromise = sequence.add(async () => {
return new Promise(resolve => {
setTimeout(() => {
return resolve(expectedResult);
}, 10);
});
});
jest.advanceTimersByTime(13);
const result = await resultPromise;
expect(result).toEqual(expectedResult);
});
it('should resolve to the result of in sequence', async () => {
const expectedResult1 = 'result1';
const expectedResult2 = 'result2';
const result1Promise = sequence.add(async () => {
return new Promise(resolve => {
setTimeout(() => {
return resolve(expectedResult1);
}, 10);
});
});
const result2Promise = sequence.add(async () => {
return new Promise(resolve => {
setTimeout(() => {
return resolve(expectedResult2);
}, 2);
});
});
sequence['_tick']();
sequence['_tick']();
jest.runAllTimers();
const [result1, result2] = await Promise.all([result1Promise, result2Promise]);
expect(result1).toEqual(expectedResult1);
expect(result2).toEqual(expectedResult2);
});
});
});
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.