text stringlengths 1 1.05M |
|---|
#!/bin/sh
set -e
set -u
set -o pipefail
function on_error {
echo "$(realpath -mq "${0}"):$1: error: Unexpected failure"
}
trap 'on_error $LINENO' ERR
if [ -z ${FRAMEWORKS_FOLDER_PATH+x} ]; then
# If FRAMEWORKS_FOLDER_PATH is not set, then there's nowhere for us to copy
# frameworks to, so exit 0 (signalling the script phase was successful).
exit 0
fi
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
COCOAPODS_PARALLEL_CODE_SIGN="${COCOAPODS_PARALLEL_CODE_SIGN:-false}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
# Used as a return value for each invocation of `strip_invalid_archs` function.
STRIP_BINARY_RETVAL=0
# This protects against multiple targets copying the same framework dependency at the same time. The solution
# was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html
RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????")
# Copies and strips a vendored framework
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# Use filter instead of exclude so missing patterns don't throw errors.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
elif [ -L "${binary}" ]; then
echo "Destination binary is symlinked..."
dirname="$(dirname "${binary}")"
binary="${dirname}/$(readlink "${binary}")"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u)
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Copies and strips a vendored dSYM
install_dsym() {
local source="$1"
if [ -r "$source" ]; then
# Copy the dSYM into a the targets temp dir.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${DERIVED_FILES_DIR}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${DERIVED_FILES_DIR}"
local basename
basename="$(basename -s .framework.dSYM "$source")"
binary="${DERIVED_FILES_DIR}/${basename}.framework.dSYM/Contents/Resources/DWARF/${basename}"
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"Mach-O "*"dSYM companion"* ]]; then
strip_invalid_archs "$binary"
fi
if [[ $STRIP_BINARY_RETVAL == 1 ]]; then
# Move the stripped file into its final destination.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${DERIVED_FILES_DIR}/${basename}.framework.dSYM\" \"${DWARF_DSYM_FOLDER_PATH}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${DERIVED_FILES_DIR}/${basename}.framework.dSYM" "${DWARF_DSYM_FOLDER_PATH}"
else
# The dSYM was not stripped at all, in this case touch a fake folder so the input/output paths from Xcode do not reexecute this script because the file is missing.
touch "${DWARF_DSYM_FOLDER_PATH}/${basename}.framework.dSYM"
fi
fi
}
# Copies the bcsymbolmap files of a vendored framework
install_bcsymbolmap() {
local bcsymbolmap_path="$1"
local destination="${BUILT_PRODUCTS_DIR}"
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}"
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY:-}" -a "${CODE_SIGNING_REQUIRED:-}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identity
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS:-} --preserve-metadata=identifier,entitlements '$1'"
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
code_sign_cmd="$code_sign_cmd &"
fi
echo "$code_sign_cmd"
eval "$code_sign_cmd"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current target binary
binary_archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | awk '{$1=$1;print}' | rev)"
# Intersect them with the architectures we are building for
intersected_archs="$(echo ${ARCHS[@]} ${binary_archs[@]} | tr ' ' '\n' | sort | uniq -d)"
# If there are no archs supported by this binary then warn the user
if [[ -z "$intersected_archs" ]]; then
echo "warning: [CP] Vendored binary '$binary' contains architectures ($binary_archs) none of which match the current build architectures ($ARCHS)."
STRIP_BINARY_RETVAL=0
return
fi
stripped=""
for arch in $binary_archs; do
if ! [[ "${ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary"
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
STRIP_BINARY_RETVAL=1
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/Bolts/Bolts.framework"
install_framework "${BUILT_PRODUCTS_DIR}/DateTools/DateTools.framework"
install_framework "${BUILT_PRODUCTS_DIR}/Parse/Parse.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/Bolts/Bolts.framework"
install_framework "${BUILT_PRODUCTS_DIR}/DateTools/DateTools.framework"
install_framework "${BUILT_PRODUCTS_DIR}/Parse/Parse.framework"
fi
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
wait
fi
|
#!/bin/sh
# ------------------------------- input
build_dir=$HOME/build/tsgemm
source_dir=$HOME/code/tsgemm
job_time=10 # [min]
exe_name=v1 # v2 v2_pool v2_priority
arch=mc # or 'gpu'
apex=1 # or 0 for no APEX
len_m=3000
len_n=3000
len_k=523000
tile_m=512
tile_n=512
tile_k=10000
pgrid_rows=5
pgrid_cols=2
blk_rows=512
blk_cols=512
# --------------------------------
if [[ ${arch} == mc ]]; then
job_nprocs_per_node=2
job_threads_per_proc=18
elif [[ ${arch} == gpu ]]; then
job_nprocs_per_node=1
job_threads_per_proc=12
else
echo "arch must be gpu or mc!\n"
exit 1
fi
nprocs="$(( ${pgrid_rows} * ${pgrid_cols} ))"
if [[ $(( ${nprocs} % ${job_nprocs_per_node} )) -ne 0 ]]; then
echo "nprocs must be divisible by job_nprocs_per_node!\n"
exit 1
fi
job_queue=normal
job_nodes="$(( ${nprocs} / ${job_nprocs_per_node} ))"
date_str=$(date '+%Y.%m.%d')
job_name="${date_str}__${exe_name}_\
${len_m}.${len_n}.${len_k}_${tile_m}.${tile_n}.${tile_k}_\
${pgrid_rows}.${pgrid_cols}_${blk_rows}.${blk_cols}"
# exe parameters
# "--hpx:print-bind --hpx:threads=${job_threads_per_proc}"
params="--hpx:queuing=shared-priority --hpx:use-process-mask \
--len_m ${len_m} --len_n ${len_n} --len_k ${len_k} \
--tile_m ${tile_m} --tile_n ${tile_n} --tile_k ${tile_k} \
--pgrid_rows ${pgrid_rows} --pgrid_cols ${pgrid_cols} \
--blk_rows ${blk_rows} --blk_cols ${blk_cols}"
exe_path=${build_dir}/apps/${exe_name}
mkdir ${job_name}
cd ${job_name}
# generate job file
cat << JOB_EOF > job.sh
#!/bin/sh
#SBATCH --job-name=${job_name}
#SBATCH --constraint=${arch}
#SBATCH --partition=${job_queue}
#SBATCH --nodes=${job_nodes}
#SBATCH --time=${job_time}
#SBATCH --ntasks-per-node=${job_nprocs_per_node}
#SBATCH --cpus-per-task=${job_threads_per_proc}
#SBATCH --hint=nomultithread
#SBATCH --output=output.txt
#SBATCH --error=error.txt
device=daint
source ${source_dir}/scripts/env.sh
# MPI_THREAD_MULTIPLE for Cray
export MPICH_MAX_THREAD_SAFETY=multiple
# modules snapshot
module list &> modules.txt
# environment snapshot
printenv > env.txt
# libraries snapshot
ldd ${exe_path} > libs.txt
# run code
srun ./cmd.sh
JOB_EOF
chmod +x job.sh
# generate command file
cat << CMD_EOF > cmd.sh
#!/bin/sh
if [[ \${SLURM_PROCID} == 0 ]]; then
export APEX_OTF2=${apex}
#export APEX_PROFILE=1
#export APEX_SCREEN_OUTPUT=1
fi
${exe_path} ${params}
CMD_EOF
chmod +x cmd.sh
|
public static void copyStrings(String s1, String s2){
char[] ch1 = s1.toCharArray();
char[] ch2 = s2.toCharArray();
for(int i = 0; i < ch1.length; i++){
ch2[i] = ch1[i];
}
s2 = String.valueOf(ch2);
System.out.println(s2);
} |
import {
genConfig, ReducerGenerator, genActions, ONE, MANY,
} from '~/api/apiResultActionReducerGenerator';
export const config = genConfig({
plural: 'events',
endpoint: id => `/account/events/${id}`,
supports: [ONE, MANY],
// sort desc by created
sortFn: function (ids, state) {
return ids.sort(function (a, b) {
const aDate = new Date(state[a].created);
const bDate = new Date(state[b].created);
if (aDate > bDate) {
return -1;
}
if (aDate < bDate) {
return 1;
}
return 0;
});
},
});
export const actions = genActions(config);
export const { reducer } = new ReducerGenerator(config);
|
<filename>2021-05-09/企业版商城前端/pages/user/user.js
// pages/user/user.js
var app = getApp()
Page( {
data: {
userInfo: {},
orderInfo:{},
projectSource: 'https://github.com/liuxuanqiang/wechat-weapp-mall',
userListInfo: [ {
icon: '../../images/iconfont-dingdan.png',
text: '我的订单',
isunread: true,
unreadNum: 2
}, {
icon: '../../images/iconfont-card.png',
text: '我的代金券',
isunread: false,
unreadNum: 2
}, {
icon: '../../images/iconfont-icontuan.png',
text: '我的拼团',
isunread: true,
unreadNum: 1
}, {
icon: '../../images/iconfont-shouhuodizhi.png',
text: '收货地址管理'
}, {
icon: '../../images/iconfont-kefu.png',
text: '联系客服'
}, {
icon: '../../images/iconfont-help.png',
text: '常见问题'
}],
loadingText: '加载中...',
loadingHidden: false,
},onLoad: function () {
var that = this
//调用应用实例的方法获取全局数据
app.getUserInfo(function(userInfo){
//更新数据
that.setData({
userInfo:userInfo,
loadingHidden: true
})
});
this.loadOrderStatus();
},
onShow:function(){
this.loadOrderStatus();
},
loadOrderStatus:function(){
//用户的订单状态
var that = this;
console.log(this.data);
wx.request({
url: app.d.hostUrl + '/ztb/orderZBT/GetOrderSumZBT',
method:'post',
data: {
userId:app.d.userId,
},
header: {
'Content-Type': 'application/x-www-form-urlencoded'
},
success: function (res) {
//--init data
var data = res.data.data;
console.log(data);
that.setData({
orderInfo:data[0],
});
//endInitData
},
});
},
}) |
/*
* Copyright 2017 The Mifos Initiative.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.mifos.identity.internal.repository;
import com.datastax.driver.mapping.annotations.Field;
import com.datastax.driver.mapping.annotations.UDT;
import java.util.Objects;
import java.util.Set;
/**
* @author <NAME>
*/
@SuppressWarnings({"unused", "WeakerAccess"})
@UDT(name = Permissions.TYPE_NAME)
public class PermissionType {
@Field(name = Permissions.PERMITTABLE_GROUP_IDENTIFIER_FIELD)
private String permittableGroupIdentifier;
@Field(name = Permissions.ALLOWED_OPERATIONS_FIELD)
private Set<AllowedOperationType> allowedOperations;
public PermissionType() {
}
public PermissionType(String permittableGroupIdentifier, Set<AllowedOperationType> allowedOperations) {
this.permittableGroupIdentifier = permittableGroupIdentifier;
this.allowedOperations = allowedOperations;
}
public String getPermittableGroupIdentifier() {
return permittableGroupIdentifier;
}
public void setPermittableGroupIdentifier(String permittableGroupIdentifier) {
this.permittableGroupIdentifier = permittableGroupIdentifier;
}
public Set<AllowedOperationType> getAllowedOperations() {
return allowedOperations;
}
public void setAllowedOperations(Set<AllowedOperationType> allowedOperations) {
this.allowedOperations = allowedOperations;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
PermissionType that = (PermissionType) o;
return Objects.equals(permittableGroupIdentifier, that.permittableGroupIdentifier) &&
Objects.equals(allowedOperations, that.allowedOperations);
}
@Override
public int hashCode() {
return Objects.hash(permittableGroupIdentifier, allowedOperations);
}
@Override
public String toString() {
return "PermissionType{" +
"permittableGroupIdentifier='" + permittableGroupIdentifier + '\'' +
", allowedOperations=" + allowedOperations +
'}';
}
}
|
<gh_stars>0
package com.huli.jformatter.intellij.process;
import com.huli.jformatter.intellij.entity.FieldEntity;
import com.intellij.psi.*;
import org.apache.http.util.TextUtils;
import com.huli.jformatter.intellij.common.FieldHelper;
import com.huli.jformatter.intellij.common.Try;
import com.huli.jformatter.intellij.config.Config;
import com.huli.jformatter.intellij.config.Constant;
import com.huli.jformatter.intellij.entity.ClassEntity;
import java.util.regex.Pattern;
/**
* Created by dim on 16/11/7.
*/
class AutoValueProcessor extends Processor {
@Override
public void onStarProcess(ClassEntity classEntity, PsiElementFactory factory, PsiClass cls, IProcessor visitor) {
super.onStarProcess(classEntity, factory, cls, visitor);
injectAutoAnnotation(factory, cls);
}
@Override
public void generateField(PsiElementFactory factory, FieldEntity fieldEntity, PsiClass cls, ClassEntity classEntity) {
if (fieldEntity.isGenerate()) {
Try.run(new Try.TryListener() {
@Override
public void run() {
cls.add(factory.createMethodFromText(generateFieldText(classEntity, fieldEntity, null), cls));
}
@Override
public void runAgain() {
fieldEntity.setFieldName(FieldHelper.generateLuckyFieldName(fieldEntity.getFieldName()));
cls.add(factory.createMethodFromText(generateFieldText(classEntity, fieldEntity, Constant.FIXME), cls));
}
@Override
public void error() {
cls.addBefore(factory.createCommentFromText("// FIXME generate failure field " + fieldEntity.getFieldName(), cls), cls.getChildren()[0]);
}
});
}
}
@Override
public void generateGetterAndSetter(PsiElementFactory factory, PsiClass cls, ClassEntity classEntity) {
}
@Override
public void generateConvertMethod(PsiElementFactory factory, PsiClass cls, ClassEntity classEntity) {
super.generateConvertMethod(factory, cls, classEntity);
// if (PsiClassUtil.isClassAvailableForProject(cls.getProject(), "com.ryanharter.auto.value.gson.AutoValueGsonAdapterFactoryProcessor")) {
// String qualifiedName = cls.getQualifiedName();
// String autoAdapter = qualifiedName.substring(mainPackage.length()+1, qualifiedName.length());
// createMethod(factory, Constant.autoValueMethodTemplate.replace("$className$", classEntity.getClassName()).replace("$AdapterClassName$", getAutoAdpaterClass(autoAdapter)).trim(), cls);
// }
}
public static String getAutoAdpaterClass(String className) {
return String.join("_", className.split("\\."));
}
@Override
protected void onEndGenerateClass(PsiElementFactory factory, ClassEntity classEntity, PsiClass parentClass, PsiClass generateClass, IProcessor visitor) {
super.onEndGenerateClass(factory, classEntity, parentClass, generateClass, visitor);
injectAutoAnnotation(factory, generateClass);
}
private void injectAutoAnnotation(PsiElementFactory factory, PsiClass cls) {
PsiModifierList modifierList = cls.getModifierList();
if (modifierList != null) {
PsiElement firstChild = modifierList.getFirstChild();
Pattern pattern = Pattern.compile("@.*?AutoValue");
if (firstChild != null && !pattern.matcher(firstChild.getText()).find()) {
PsiAnnotation annotationFromText = factory.createAnnotationFromText("@com.google.auto.value.AutoValue", cls);
modifierList.addBefore(annotationFromText, firstChild);
}
if (!modifierList.hasModifierProperty(PsiModifier.ABSTRACT)) {
modifierList.setModifierProperty(PsiModifier.ABSTRACT, true);
}
}
}
private String generateFieldText(ClassEntity classEntity, FieldEntity fieldEntity, String fixme) {
fixme = fixme == null ? "" : fixme;
StringBuilder fieldSb = new StringBuilder();
String fieldName = fieldEntity.getGenerateFieldName();
if (!TextUtils.isEmpty(classEntity.getExtra())) {
fieldSb.append(classEntity.getExtra()).append("\n");
classEntity.setExtra(null);
}
if (!fieldName.equals(fieldEntity.getKey()) || Config.getInstant().isUseSerializedName()) {
fieldSb.append(Constant.gsonFullNameAnnotation.replaceAll("\\{filed\\}", fieldEntity.getKey()));
}
if (fieldEntity.getTargetClass() != null) {
fieldEntity.getTargetClass().setGenerate(true);
}
return fieldSb.append(String.format("public abstract %s %s(); " + fixme, fieldEntity.getFullNameType(), fieldName)).toString();
}
}
|
#!/bin/bash
set -e
(cd oot-explorer-web && npm install && npm run build)
mkdir -p -v www
cp -v oot-explorer-web/dist/* www/
cp -v oot-explorer-web/static/* www/
|
#!/bin/sh
#edit your ESP-IDF path here, tested with release/v3.3 branch
export IDF_PATH=~/esp3/esp-idf
#tune this to match yours
export ESPLAY_SDK=~/esp3/esplay-retro-emulation/esplay-sdk
cd retro-esp32
make -j4
#cd ../esplay-gnuboy
#make -j4
#cd ../esplay-nofrendo
#make -j4
#cd ../esplay-smsplusgx
#make -j4
cd ..
#ffmpeg -i Tile.png -f rawvideo -pix_fmt rgb565 tile.raw
/home/spring/espold/esplay-base-firmware/tools/mkfw/mkfw Retro-ESP32 assets/retro-esp32.raw 0 16 1048576 retro-esp32 retro-esp32/build/retro-esp32.bin 0 17 655360 esplay-nofrendo esplay-nofrendo/build/esplay-nofrendo.bin 0 18 655360 esplay-gnuboy esplay-gnuboy/build/esplay-gnuboy.bin 0 19 1310720 esplay-smsplusgx esplay-smsplusgx/build/esplay-smsplusgx.bin
rm retro-esp32.fw
mv firmware.fw retro-esp32.fw
|
#!/bin/bash
# Copyright 2018 Google, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# This initialization action installs Apache HBase on Dataproc Cluster.
set -Eeuxo pipefail
readonly NOT_SUPPORTED_MESSAGE="HBase initialization action is not supported on Dataproc 2.0+.
Use HBase Component instead: https://cloud.google.com/dataproc/docs/concepts/components/hbase"
[[ $DATAPROC_VERSION = 2.* ]] && echo "$NOT_SUPPORTED_MESSAGE" && exit 1
readonly HBASE_HOME='/etc/hbase'
readonly CLUSTER_NAME=$(/usr/share/google/get_metadata_value attributes/dataproc-cluster-name)
readonly WORKER_COUNT=$(/usr/share/google/get_metadata_value attributes/dataproc-worker-count)
readonly DATAPROC_MASTER=$(/usr/share/google/get_metadata_value attributes/dataproc-master)
readonly MASTER_ADDITIONAL=$(/usr/share/google/get_metadata_value attributes/dataproc-master-additional || true)
IFS=' ' read -r -a MASTER_HOSTNAMES <<<"${DATAPROC_MASTER} ${MASTER_ADDITIONAL//,/ }"
readonly ENABLE_KERBEROS=$(/usr/share/google/get_metadata_value attributes/enable-kerberos)
readonly KEYTAB_BUCKET=$(/usr/share/google/get_metadata_value attributes/keytab-bucket)
readonly DOMAIN=$(dnsdomainname)
readonly REALM=$(echo "${DOMAIN}" | awk '{print toupper($0)}')
readonly ROLE=$(/usr/share/google/get_metadata_value attributes/dataproc-role)
readonly FQDN=$(hostname -f)
function retry_command() {
cmd="$1"
for ((i = 0; i < 10; i++)); do
if eval "$cmd"; then
return 0
fi
sleep 5
done
return 1
}
function update_apt_get() {
retry_command "apt-get update"
}
function install_apt_get() {
pkgs="$*"
retry_command "apt-get install -y $pkgs"
}
function configure_hbase() {
bdconfig set_property \
--configuration_file "${HBASE_HOME}/conf/hbase-site.xml" \
--name 'hbase.cluster.distributed' --value 'true' \
--clobber
bdconfig set_property \
--configuration_file "${HBASE_HOME}/conf/hbase-site.xml" \
--name 'hbase.zookeeper.property.initLimit' --value '20' \
--clobber
cat <<EOF >/etc/systemd/system/hbase-master.service
[Unit]
Description=HBase Master
Wants=network-online.target
After=network-online.target hadoop-hdfs-namenode.service
[Service]
User=root
Group=root
Type=simple
EnvironmentFile=/etc/environment
Environment=HBASE_HOME=/etc/hbase
ExecStart=/usr/bin/hbase \
--config ${HBASE_HOME}/conf/ \
master start
[Install]
WantedBy=multi-user.target
EOF
cat <<EOF >/etc/systemd/system/hbase-regionserver.service
[Unit]
Description=HBase Regionserver
Wants=network-online.target
After=network-online.target hadoop-hdfs-datanode.service
[Service]
User=root
Group=root
Type=simple
EnvironmentFile=/etc/environment
Environment=HBASE_HOME=/etc/hbase
ExecStart=/usr/bin/hbase \
--config ${HBASE_HOME}/conf/ \
regionserver start
[Install]
WantedBy=multi-user.target
EOF
systemctl daemon-reload
local hdfs_root="hdfs://${CLUSTER_NAME}"
if [[ -z "${MASTER_ADDITIONAL}" ]]; then
hdfs_root="hdfs://${CLUSTER_NAME}-m:8020"
fi
# Prepare and merge configuration values:
# hbase.rootdir
local hbase_root_dir
hbase_root_dir="$(/usr/share/google/get_metadata_value attributes/hbase-root-dir ||
echo "${hdfs_root}/hbase")"
bdconfig set_property \
--configuration_file "${HBASE_HOME}/conf/hbase-site.xml" \
--name 'hbase.rootdir' --value "${hbase_root_dir}" \
--clobber
local hbase_wal_dir
hbase_wal_dir=$(/usr/share/google/get_metadata_value attributes/hbase-wal-dir || true)
if [[ -n ${hbase_wal_dir} ]]; then
bdconfig set_property \
--configuration_file "${HBASE_HOME}/conf/hbase-site.xml" \
--name 'hbase.wal.dir' --value "${hbase_wal_dir}" \
--clobber
fi
# zookeeper.quorum
local zookeeper_nodes
zookeeper_nodes="$(grep '^server\.' /etc/zookeeper/conf/zoo.cfg |
sort | uniq | cut -d '=' -f 2 | cut -d ':' -f 1 | xargs echo | sed "s/ /,/g")"
bdconfig set_property \
--configuration_file "${HBASE_HOME}/conf/hbase-site.xml" \
--name 'hbase.zookeeper.quorum' --value "${zookeeper_nodes}" \
--clobber
# Prepare kerberos specific config values for hbase-site.xml
if [ "${ENABLE_KERBEROS}" = true ]; then
# Kerberos authentication
bdconfig set_property \
--configuration_file "${HBASE_HOME}/conf/hbase-site.xml" \
--name 'hbase.security.authentication' --value "kerberos" \
--clobber
# Security authorization
bdconfig set_property \
--configuration_file "${HBASE_HOME}/conf/hbase-site.xml" \
--name 'hbase.security.authorization' --value "true" \
--clobber
# Kerberos master principal
bdconfig set_property \
--configuration_file "${HBASE_HOME}/conf/hbase-site.xml" \
--name 'hbase.master.kerberos.principal' --value "hbase/_HOST@${REALM}" \
--clobber
# Kerberos region server principal
bdconfig set_property \
--configuration_file "${HBASE_HOME}/conf/hbase-site.xml" \
--name 'hbase.regionserver.kerberos.principal' --value "hbase/_HOST@${REALM}" \
--clobber
# Kerberos master server keytab file path
bdconfig set_property \
--configuration_file "${HBASE_HOME}/conf/hbase-site.xml" \
--name 'hbase.master.keytab.file' --value "/etc/hbase/conf/hbase-master.keytab" \
--clobber
# Kerberos region server keytab file path
bdconfig set_property \
--configuration_file "${HBASE_HOME}/conf/hbase-site.xml" \
--name 'hbase.regionserver.keytab.file' --value "/etc/hbase/conf/hbase-region.keytab" \
--clobber
# Zookeeper authentication provider
bdconfig set_property \
--configuration_file "${HBASE_HOME}/conf/hbase-site.xml" \
--name 'hbase.zookeeper.property.authProvider.1' --value "org.apache.zookeeper.server.auth.SASLAuthenticationProvider" \
--clobber
# HBase coprocessor region classes
bdconfig set_property \
--configuration_file "${HBASE_HOME}/conf/hbase-site.xml" \
--name 'hbase.coprocessor.region.classes' --value "org.apache.hadoop.hbase.security.token.TokenProvider" \
--clobber
# Zookeeper remove host from principal
bdconfig set_property \
--configuration_file "${HBASE_HOME}/conf/hbase-site.xml" \
--name 'hbase.zookeeper.property.kerberos.removeHostFromPrincipal' --value "true" \
--clobber
# Zookeeper remove realm from principal
bdconfig set_property \
--configuration_file "${HBASE_HOME}/conf/hbase-site.xml" \
--name 'hbase.zookeeper.property.kerberos.removeRealmFromPrincipal' --value "true" \
--clobber
# Zookeeper znode
bdconfig set_property \
--configuration_file "${HBASE_HOME}/conf/hbase-site.xml" \
--name 'zookeeper.znode.parent' --value "/hbase-secure" \
--clobber
# HBase RPC protection
bdconfig set_property \
--configuration_file "${HBASE_HOME}/conf/hbase-site.xml" \
--name 'hbase.rpc.protection' --value "privacy" \
--clobber
fi
if [ "${ENABLE_KERBEROS}" = true ]; then
if [[ "${HOSTNAME}" == "${DATAPROC_MASTER}" ]]; then
# Master
for m in "${MASTER_HOSTNAMES[@]}"; do
kadmin.local -q "addprinc -randkey hbase/${m}.${DOMAIN}@${REALM}"
echo "Generating hbase keytab..."
kadmin.local -q "xst -k ${HBASE_HOME}/conf/hbase-${m}.keytab hbase/${m}.${DOMAIN}"
gsutil cp "${HBASE_HOME}/conf/hbase-${m}.keytab" \
"${KEYTAB_BUCKET}/keytabs/${CLUSTER_NAME}/hbase-${m}.keytab"
done
# Worker
for ((c = 0; c < WORKER_COUNT; c++)); do
kadmin.local -q "addprinc -randkey hbase/${CLUSTER_NAME}-w-${c}.${DOMAIN}"
echo "Generating hbase keytab..."
kadmin.local -q "xst -k ${HBASE_HOME}/conf/hbase-${CLUSTER_NAME}-w-${c}.keytab hbase/${CLUSTER_NAME}-w-${c}.${DOMAIN}"
gsutil cp "${HBASE_HOME}/conf/hbase-${CLUSTER_NAME}-w-${c}.keytab" \
"${KEYTAB_BUCKET}/keytabs/${CLUSTER_NAME}/hbase-${CLUSTER_NAME}-w-${c}.keytab"
done
touch /tmp/_success
gsutil cp /tmp/_success "${KEYTAB_BUCKET}/keytabs/${CLUSTER_NAME}/_success"
fi
success=1
while [[ $success == "1" ]]; do
sleep 1
success=$(
gsutil -q stat "${KEYTAB_BUCKET}/keytabs/${CLUSTER_NAME}/_success"
echo $?
)
done
# Define keytab path based on role
if [[ "${ROLE}" == 'Master' ]]; then
hbase_keytab_path=${HBASE_HOME}/conf/hbase-master.keytab
else
hbase_keytab_path=${HBASE_HOME}/conf/hbase-region.keytab
fi
# Copy keytab to machine
gsutil cp "${KEYTAB_BUCKET}/keytabs/${CLUSTER_NAME}/hbase-${HOSTNAME}.keytab" $hbase_keytab_path
# Change owner of keytab to hbase with read only permissions
if [ -f $hbase_keytab_path ]; then
chown hbase:hbase $hbase_keytab_path
chmod 0400 $hbase_keytab_path
fi
# Change regionserver information
for ((c = 0; c < WORKER_COUNT; c++)); do
echo "${CLUSTER_NAME}-w-${c}.${DOMAIN}" >>/tmp/regionservers
done
mv /tmp/regionservers ${HBASE_HOME}/conf/regionservers
# Add server JAAS
cat >/tmp/hbase-server.jaas <<EOF
Client {
com.sun.security.auth.module.Krb5LoginModule required
useKeyTab=true
storeKey=true
useTicketCache=false
keyTab="${hbase_keytab_path}"
principal="hbase/${FQDN}";
};
EOF
# Copy JAAS file to hbase conf directory
mv /tmp/hbase-server.jaas ${HBASE_HOME}/conf/hbase-server.jaas
# Add client JAAS
cat >/tmp/hbase-client.jaas <<EOF
Client {
com.sun.security.auth.module.Krb5LoginModule required
useKeyTab=false
useTicketCache=true;
};
EOF
# Copy JAAS file to hbase conf directory
mv /tmp/hbase-client.jaas ${HBASE_HOME}/conf/hbase-client.jaas
# Extend hbase enviroment variable script
cat ${HBASE_HOME}/conf/hbase-env.sh >/tmp/hbase-env.sh
cat >>/tmp/hbase-env.sh <<EOF
export HBASE_MANAGES_ZK=false
export HBASE_OPTS="\$HBASE_OPTS -Djava.security.auth.login.config=/etc/hbase/conf/hbase-client.jaas"
export HBASE_MASTER_OPTS="\$HBASE_MASTER_OPTS -Djava.security.auth.login.config=/etc/hbase/conf/hbase-server.jaas"
export HBASE_REGIONSERVER_OPTS="\$HBASE_REGIONSERVER_OPTS -Djava.security.auth.login.config=/etc/hbase/conf/hbase-server.jaas"
EOF
# Copy script to hbase conf directory
mv /tmp/hbase-env.sh ${HBASE_HOME}/conf/hbase-env.sh
fi
# On single node clusters we must also start regionserver on it.
if [[ "${WORKER_COUNT}" -eq 0 ]]; then
systemctl start hbase-regionserver
fi
}
function main() {
update_apt_get
install_apt_get hbase
configure_hbase
if [[ "${ROLE}" == 'Master' ]]; then
systemctl start hbase-master
else
systemctl start hbase-regionserver
fi
}
main
|
import torch
x = torch.tensor([
[1, 2, 3],
[-1, -2, -3]
]).unsqueeze(1).repeat(1, 2, 1)
print(x.view(4, 3)) |
from typing import List
def caseCombinations(S: str) -> List[str]:
def backtrack(index, current):
if index == len(S):
result.append(current)
return
if S[index].isalpha():
backtrack(index + 1, current + S[index].lower())
backtrack(index + 1, current + S[index].upper())
else:
backtrack(index + 1, current + S[index])
result = []
backtrack(0, "")
return result |
import React from 'react';
import SvgIcon from './svgIcon';
type Props = React.ComponentProps<typeof SvgIcon> & {
isCircled?: boolean;
};
const IconCheckmark = React.forwardRef(function IconCheckmark(
{isCircled = false, ...props}: Props,
ref: React.Ref<SVGSVGElement>
) {
return (
<SvgIcon {...props} ref={ref} data-test-id="icon-check-mark">
{isCircled ? (
<React.Fragment>
<path d="M7,12a.78.78,0,0,1-.57-.26L4,9.05A.76.76,0,0,1,4.07,8a.75.75,0,0,1,1.06.07l1.75,2L10.77,4.3A.75.75,0,0,1,12,5.14L7.58,11.7A.77.77,0,0,1,7,12Z" />
<path d="M8,16a8,8,0,1,1,8-8A8,8,0,0,1,8,16ZM8,1.53A6.47,6.47,0,1,0,14.47,8,6.47,6.47,0,0,0,8,1.53Z" />
</React.Fragment>
) : (
<path d="M6.19,14.51a.77.77,0,0,1-.57-.25l-4.2-4.8a.75.75,0,0,1,1.13-1l3.56,4.06L13.36,1.82a.75.75,0,0,1,1-.21.76.76,0,0,1,.21,1.05L6.81,14.18a.73.73,0,0,1-.58.33Z" />
)}
</SvgIcon>
);
});
IconCheckmark.displayName = 'IconCheckmark';
export {IconCheckmark};
|
import { LOGIN, LOGOUT } from '../actionTypes/userActionTypes';
const initialLoginState = {
userLoggedIn: false,
};
export const loginReducer = (state = initialLoginState, action) => {
switch (action.type) {
case LOGIN: {
return { userLoggedIn: true };
}
case LOGOUT: {
return {
userLoggedIn: false,
};
}
default:
return { ...initialLoginState };
}
};
|
import React from 'react';
import Likert from './likert.js';
export default function DemoMultiple() {
return (
<>
<h1>Auto-layout</h1>
<Likert
question='How was your service today?'
responses={[
{ value: 1, text: 'Poor' },
{ value: 2, text: '' },
{ value: 5, text: '' },
{ value: 6, text: '' },
{ value: 7, text: 'Excellent' },
]}
/>
<Likert
question='How many minutes did it take for your food arrive?'
responses={[
{ value: 5, text: '5' },
{ value: 10, text: '10' },
{ value: 15, text: '15' },
{ value: 20, text: '20' },
{ value: 25, text: '25' },
{ value: 30, text: '30+' },
]}
/>
<Likert
question='How did your meal taste?'
responses={[
{ value: 3, text: 'Horrible' },
{ value: 4, text: 'Meh' },
{ value: 5, text: 'OK' },
{ value: 6, text: 'Pretty good' },
{ value: 7, text: 'Amazing' },
]}
/>
<Likert
question='How likely are you to return again?'
responses={[
{ value: 5, text: 'Never' },
{ value: 10, text: 'Maybe' },
{ value: 15, text: 'Probably' },
{ value: 20, text: 'Definitely' },
]}
/>
<h2>Minimum sized likert scale layout</h2>
<Likert
flexible={false}
question='How happy are you?'
responses={[
{ value: 5, text: '😡' },
{ value: 10, text: '☹️' },
{ value: 15, text: '😐' },
{ value: 20, text: '😃' },
]}
/>
<Likert
flexible={false}
question='What is your mood?'
responses={[
{ value: 5, text: '😡' },
{ value: 10, text: '☹️' },
{ value: 15, text: '😐' },
{ value: 20, text: '😃' },
]}
/>
<h2>Stacked layout</h2>
<Likert
id='stacked1'
question='How was your service today?'
responses={[
{ value: 1, text: 'Poor' },
{ value: 2, text: '' },
{ value: 5, text: '' },
{ value: 6, text: '' },
{ value: 7, text: 'Excellent' },
]}
layout='stacked'
/>
<Likert
id='stacked2'
question='How many minutes did it take for your food arrive?'
responses={[
{ value: 5, text: '5' },
{ value: 10, text: '10' },
{ value: 15, text: '15' },
{ value: 20, text: '20' },
{ value: 25, text: '25' },
{ value: 30, text: '30+' },
]}
layout='stacked'
/>
<Likert
id='stacked3'
question='How did your meal taste?'
responses={[
{ value: 3, text: 'Horrible' },
{ value: 4, text: 'Meh' },
{ value: 5, text: 'OK' },
{ value: 6, text: 'Pretty good' },
{ value: 7, text: 'Amazing' },
]}
layout='stacked'
/>
</>
);
}
|
package gen
func Gen(args ...string) {
switch args[0] {
case "graphql":
//graphql()
case "rest":
rest(args[1])
}
}
|
<gh_stars>1-10
# File: D (Python 2.4)
from pandac.PandaModules import *
from direct.distributed.ClockDelta import *
from direct.interval.IntervalGlobal import *
from pirates.distributed import DistributedInteractive
from pirates.piratesbase import PiratesGlobals
from pirates.piratesbase import PLocalizer
from pirates.world.LocationConstants import LocationIds
from pirates.piratesbase import Freebooter
import string
from direct.showbase.PythonUtil import quickProfile
class DistributedDoor(DistributedInteractive.DistributedInteractive):
notify = directNotify.newCategory('DistributedDoor')
notify.setDebug(0)
openSfxDict = { }
closeSfx = None
def __init__(self, cr, name = 'DistributedDoor'):
DistributedInteractive.DistributedInteractive.__init__(self, cr)
NodePath.__init__(self, name)
self.otherDoorId = 0
self.doorState = PiratesGlobals.DOOR_CLOSED
self.doorTrack = None
self.fadeInTrack = None
self.fadeOutTrack = None
self.openOtherDoorIval = None
self.closeOtherDoorIval = None
self.soundNode = render
self.locked = False
self.hasDoors = 0
self.tOpen = 0.5
if not self.closeSfx:
DistributedDoor.openSfxDict['english'] = base.loadSfx('audio/sfx_door_english_open.mp3')
DistributedDoor.openSfxDict['shanty'] = base.loadSfx('audio/sfx_door_shanty_open.mp3')
DistributedDoor.openSfxDict['spanish'] = base.loadSfx('audio/sfx_door_spanish_open.mp3')
DistributedDoor.closeSfx = base.loadSfx('audio/sfx_door_shanty_slam.mp3')
DistributedDoor.openSfx = DistributedDoor.openSfxDict['english']
self.questNeeded = 1
def disable(self):
self.ignoreAll()
if self.doorTrack:
self.doorTrack.pause()
self.doorTrack = None
self.openDoorIval.pause()
del self.openDoorIval
self.closeDoorIval.pause()
del self.closeDoorIval
if self.closeOtherDoorIval:
self.closeOtherDoorIval.pause()
self.closeOtherDoorIval = None
if self.openOtherDoorIval:
self.openOtherDoorIval.pause()
self.openOtherDoorIval = None
self.fadeOutTrack = None
self.fadeInTrack = None
DistributedInteractive.DistributedInteractive.disable(self)
def announceGenerate(self):
DistributedInteractive.DistributedInteractive.announceGenerate(self)
if hasattr(self, 'buildingUid') and self.buildingUid == LocationIds.PORT_ROYAL_FORT_CHARLES:
tutFlag = localAvatar.style.getTutorial()
if tutFlag > PiratesGlobals.TUT_GOT_CUTLASS and tutFlag < PiratesGlobals.TUT_GOT_COMPASS:
self.setLocked(True)
else:
self.setLocked(False)
self.setupDoors()
self.setInteractOptions(proximityText = self.getPrompt(), diskRadius = 12.0, sphereScale = 7.0, endInteract = 0, allowInteract = self.allowInteract)
def getPrompt(self):
return PLocalizer.InteractOpenDoor
def setDoorIndex(self, doorIndex):
self.doorIndex = doorIndex
doorIndexStr = ''
if doorIndex > 0:
doorIndexStr = '_' + str(doorIndex + 1)
self.doorLeftStr = '**/door_left' + doorIndexStr + ';+s'
self.doorRightStr = '**/door_right' + doorIndexStr + ';+s'
self.doorLocatorStr = '**/door_locator' + doorIndexStr + ';+s'
def setBuildingUid(self, buildingUid):
self.buildingUid = buildingUid
def getBuildingUid(self):
return self.buildingUid
def setMovie(self, mode, avId, timestamp):
if avId == localAvatar.doId:
self.loadOtherSide()
return None
if timestamp is None:
ts = 0.0
else:
ts = globalClockDelta.localElapsedTime(timestamp)
if mode == PiratesGlobals.DOOR_OPEN and self.hasDoors:
if self.doorTrack:
self.doorTrack.pause()
self.doorTrack = None
self.doorTrack = Sequence(Func(base.playSfx, self.openSfx, node = self.soundNode, volume = 0.69999999999999996, cutoff = 100), self.openDoorIval, Wait(2.0), self.closeDoorIval, Func(base.playSfx, self.closeSfx, node = self.soundNode, volume = 0.69999999999999996, cutoff = 100))
self.doorTrack.start(ts)
def getParentModel(self):
pass
def setupDoors(self):
self.openDoorIval = Parallel()
self.closeDoorIval = Parallel()
doors = self.getDoorInfo()
if doors:
self.noDoors = False
else:
self.noDoors = True
return None
if self.doorIndex < len(doors['left']):
doorLeft = doors['left'][self.doorIndex]
self.openDoorIval.append(LerpHprInterval(doorLeft, self.tOpen, Vec3(-90, 0, 0)))
self.closeDoorIval.append(LerpHprInterval(doorLeft, self.tOpen, Vec3(0, 0, 0)))
else:
doorLeft = None
if self.doorIndex < len(doors['right']):
doorRight = doors['right'][self.doorIndex]
self.openDoorIval.append(LerpHprInterval(doorRight, self.tOpen, Vec3(90, 0, 0)))
self.closeDoorIval.append(LerpHprInterval(doorRight, self.tOpen, Vec3(0, 0, 0)))
else:
doorRight = None
modelName = ''
if self.doorIndex < len(doors['locator']):
doorLocator = doors['locator'][self.doorIndex]
self.soundNode = doorLocator
self.reparentTo(doorLocator)
self.setPos(0, 0, 0)
self.wrtReparentTo(self.getParentObj())
modelName = doorLocator.getNetTag('ModelName')
elif doorLeft and doorRight:
doorLocator = None
doorPos = doorRight.getPos(doorLeft) * 0.5
self.reparentTo(doorLeft)
self.setPos(doorPos)
self.wrtReparentTo(self.getParentObj())
modelName = doorLeft.getNetTag('ModelName')
self.soundNode = doorRight
elif doorLeft:
self.reparentTo(doorLeft)
self.setPos(4, 0, 0)
self.wrtReparentTo(self.getParentObj())
modelName = doorLeft.getNetTag('ModelName')
self.soundNode = doorLeft
elif doorRight:
self.reparentTo(doorRight)
self.setPos(-4, 0, 0)
self.wrtReparentTo(self.getParentObj())
modelName = doorRight.getNetTag('ModelName')
self.soundNode = doorRight
if not doorLeft:
pass
self.hasDoors = doorRight
for name in self.openSfxDict:
if name in modelName:
self.openSfx = self.openSfxDict[name]
continue
self.openDoorIval.append(Func(base.playSfx, self.openSfx, node = self.soundNode, volume = 0.69999999999999996, cutoff = 100))
def getOtherSideParentModel(self):
pass
def setupOtherSideDoors(self):
otherParent = self.getOtherSideParentModel()
doorLeft = otherParent.find(self.doorLeftStr)
doorRight = otherParent.find(self.doorRightStr)
self.openOtherDoorIval = Parallel()
self.closeOtherDoorIval = Parallel()
if not doorLeft.isEmpty():
self.openOtherDoorIval.append(LerpHprInterval(doorLeft, self.tOpen, Vec3(-90, 0, 0)))
self.closeOtherDoorIval.append(LerpHprInterval(doorLeft, self.tOpen, Vec3(0, 0, 0)))
if not doorRight.isEmpty():
self.openOtherDoorIval.append(LerpHprInterval(doorRight, self.tOpen, Vec3(90, 0, 0)))
self.closeOtherDoorIval.append(LerpHprInterval(doorRight, self.tOpen, Vec3(0, 0, 0)))
if self.closeSfx is not None:
self.closeOtherDoorIval.append(Sequence(Wait(0.25), Func(base.playSfx, self.closeSfx, node = self.soundNode, volume = 0.69999999999999996, cutoff = 100)))
def requestInteraction(self, avId, interactType = 0):
if self.buildingUid == LocationIds.KINGSHEAD_OUTER_DOOR and not Freebooter.getPaidStatus(localAvatar.getDoId()):
localAvatar.guiMgr.showNonPayer(quest = 'Restricted_Location', focus = 0)
return None
print 'requestinteraction', 0
if avId == localAvatar.doId:
self.fadeOut()
return None
if self.questNeeded:
questHistory = localAvatar.getQuestLadderHistory()
currentQuests = localAvatar.getQuests()
container = QuestLadderDB.getContainer(self.questNeeded)
canEnter = False
for quest in currentQuests:
if container.getQuestId() == quest.getQuestId() or container.hasQuest(quest.getQuestId()):
canEnter = True
continue
if not canEnter:
if self.buildingUid == LocationIds.UNDEAD_POKER_SHACK:
localAvatar.guiMgr.createWarning(PLocalizer.ClubheartsQuestWarning, PiratesGuiGlobals.TextFG6)
return None
print 'requestinteraction', 2
DistributedInteractive.DistributedInteractive.requestInteraction(self, avId, interactType)
def rejectInteraction(self):
if self.fadeOutTrack:
self.fadeOutTrack.finish()
si = Sequence(Func(base.transitions.fadeIn, self.tOpen), Func(localAvatar.gameFSM.request, 'LandRoam'))
self.fadeInTrack = si
self.fadeInTrack.start()
DistributedInteractive.DistributedInteractive.rejectInteraction(self)
def fadeOut(self):
base.transitions.setFadeColor(0, 0, 0)
def doFadeOut():
base.transitions.fadeOut(self.tOpen)
if localAvatar.gameFSM is None:
return None
si = Sequence(Func(localAvatar.b_setGameState, 'DoorInteract'), Func(doFadeOut), self.openDoorIval, self.closeDoorIval, Func(DistributedInteractive.DistributedInteractive.requestInteraction, self, base.localAvatar.doId, 0))
self.fadeOutTrack = si
self.fadeOutTrack.start()
def fadeIn(self):
sf = Sequence(Func(localAvatar.gameFSM.request, 'DoorInteract'), Func(base.transitions.fadeIn, self.tOpen), self.openOtherDoorIval, self.closeOtherDoorIval, Func(localAvatar.gameFSM.request, 'LandRoam'))
self.fadeInTrack = sf
self.fadeInTrack.start()
def setLocked(self, locked):
self.setAllowInteract(not locked)
self.locked = locked
def showProximityStuff(self):
DistributedInteractive.DistributedInteractive.showProximityStuff(self)
base.cr.interactionMgr.useLifter(self.disk)
def getDoorInfo(self):
pass
def setOtherDoorId(self, doorDoId):
self.otherDoorId = doorDoId
def getOtherDoorId(self):
return self.otherDoorId
def getDoorLocator(self):
return self.getDoorInfo()['locator'][self.doorIndex]
def getDoorLeft(self):
return self.getDoorInfo()['left'][self.doorIndex]
def getDoorRight(self):
return self.getDoorInfo()['right'][self.doorIndex]
def goOnStage(self):
pass
def goOffStage(self):
pass
def setQuestNeeded(self, questNeeded):
self.questNeeded = questNeeded
|
#!/bin/bash
# Submit to queue.
# This function creates a HOMER "Tag directory" in the directory tagdir in my data space. Tag directories are used to run various HOMER procedures; in this case they are used only to created bedGraph files.
# It creates a tag directory for each sample
# Output bedGraph files are placed in bedGraph directory
# Requires Homer
if [[ "$#" -lt 2 ]]
then
echo "$(basename $0) [dirName] [groupName]" 1>&2
echo " [dirName]: Name of subdirectory where the BAM files are located" 1>&2
echo " [groupName]: Valid prefix of BAM file, depends on naming convention" 1>&2
exit 1
fi
d=$(echo $1 | sed 's:/$::g');
g=$(echo $2 | sed 's:/$::g');
cd /data/emarquez/tagdir;
bamnames=$(find /data/emarquez/bamfiles/${d}/ -maxdepth 1 -name "${g}*.bam" | sort);
for f in ${bamnames}; do
samp=$(basename $f | sed 's/\_/ /g' | perl -lane 'print $F[0],"_",$F[1]');
sampname=$(basename $f .bam);
[[ ! -d "/data/emarquez/tagdir/${d}/${samp}" ]] && mkdir "/data/emarquez/tagdir/${d}/${samp}";
makeTagDirectory /data/emarquez/tagdir/${d}/${samp} ${f} -genome hg19 -single -fragLength 150;
makeUCSCfile /data/emarquez/tagdir/${d}/${samp} -fragLength 150 > /data/emarquez/bedGraph/${d}/${sampname}.bedGraph;
done;
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
__author__ = '<NAME>'
import unittest
from pyes.tests import ESTestCase
from pyes.es import DotDict
from copy import deepcopy
"""
Unit tests for pyes. These require an es server with thrift plugin running on the default port (localhost:9500).
"""
class ElasticSearchModelTestCase(ESTestCase):
def setUp(self):
super(ElasticSearchModelTestCase, self).setUp()
self.init_default_index()
def test_ElasticSearchModel_init(self):
obj = self.conn.factory_object(self.index_name, self.document_type, {"name":"test", "val":1})
self.assertEqual(obj.name, "test")
obj.name = "aaa"
self.assertEqual(obj.name, "aaa")
self.assertEqual(obj.val, 1)
self.assertEqual(obj.meta.id, None)
obj.meta.id = "dasdas"
self.assertEqual(obj.meta.id, "dasdas")
self.assertEqual(sorted(obj.keys()), ["meta", "name", "val"])
obj.save()
obj.name = "test2"
obj.save()
reloaded = self.conn.get(self.index_name, self.document_type, obj.meta.id)
self.assertEqual(reloaded.name, "test2")
def test_DotDict(self):
dotdict = DotDict(foo="bar")
dotdict2 = deepcopy(dotdict)
dotdict2["foo"] = "baz"
self.assertEqual(dotdict["foo"], "bar")
self.assertEqual(dotdict2["foo"], "baz")
self.assertEqual(type(dotdict2), DotDict)
dotdict = DotDict(foo="bar", bar=DotDict(baz="qux"))
dotdict2 = deepcopy(dotdict)
dotdict2["bar"]["baz"] = "foo"
self.assertEqual(dotdict["bar"]["baz"], "qux")
self.assertEqual(dotdict2["bar"]["baz"], "foo")
self.assertEqual(type(dotdict2), DotDict)
if __name__ == "__main__":
unittest.main()
|
<gh_stars>0
import {lodash} from '@roots/bud-support'
import {Box, Text} from 'ink'
import React from 'react'
import {Bar} from './bar'
const {isNumber, isString} = lodash
/**
* Progress component
*
* @public
*/
export const Progress = ({progress, theme}) => {
if (!Array.isArray(progress)) return null
const [value, message] = progress
const hasMessage = message && isString(message) && message !== '[0] '
const hasValue = value && isNumber(value)
return (
<Box flexDirection={`column`}>
<Text wrap={`truncate-end`}>
{hasMessage ? message.replace('[0] ', '') : ''}
</Text>
<Bar
character={`▇`}
maxWidth={theme.bounds.width}
colors={[theme.colors.primary, theme.colors.accent]}
width={hasValue ? value : 0}
/>
</Box>
)
}
|
#!/bin/sh
kill `cat stealer.pid` |
<filename>scripts/helpers/deploy-contracts.js
const spawn = require('cross-spawn')
const minifyContracts = require('./minify-contracts')
const deployContracts = () => {
return new Promise((resolve, reject) => {
const truffleMigrate = spawn('../node_modules/.bin/truffle', ['migrate', '--reset', '--compile-all'], { cwd: './contracts' })
truffleMigrate.stdout.pipe(process.stdout)
truffleMigrate.stderr.on('data', data => {
reject(String(data))
})
truffleMigrate.on('exit', code => {
if (code === 0) {
console.log('Truffle migrate finished OK.')
}
minifyContracts()
resolve()
})
})
}
module.exports = deployContracts
|
<reponame>opendroid/hk
package server
import (
"context"
"fmt"
"github.com/google/uuid"
"github.com/opendroid/hk/logger"
"go.uber.org/zap"
"net/http"
"strings"
"time"
)
// sessionHandler implements
type sessionHandler struct {
handler http.Handler
}
// Test to see if sessionHandler implements http.Handler
var _ http.Handler = &sessionHandler{}
// NewSessionHandler creates a new sessionHandler
func NewSessionHandler(h http.Handler) *sessionHandler {
return &sessionHandler{handler: h}
}
// ServeHTTP wraps handlers to set "user" Cookie and logging
func (s *sessionHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
start := time.Now()
var user string
newSession := false
cookie, err := r.Cookie("user")
if err != nil {
n := newUserCookie()
http.SetCookie(w, n)
r.AddCookie(n) // Add it in request as well
user = n.Value // n.Value is same as sanitized by AddCookie
newSession = true
} else {
user = cookie.Value
}
// If path contains "/public/images" set cache enable
enableImageCache(w, r.URL.Path)
ctx := context.WithValue(r.Context(), contextKeyUserID, user)
rc := r.WithContext(ctx)
s.handler.ServeHTTP(w, rc)
end := time.Since(start)
logger.Info("Request",
zap.String("method", r.Method),
zap.String("path", r.URL.Path),
zap.String("user", user),
zap.String("host", r.Host),
zap.String("accept", r.Header.Get("Accept-Encoding")),
zap.String("IP", r.RemoteAddr),
zap.Bool("new", newSession),
zap.Int64("ms", end.Milliseconds()))
// r.Header.Get("Accept-Encoding")
}
// newUserCookie
// To encrypt or decrypt read
// https://www.thepolyglotdeveloper.com/2018/02/encrypt-decrypt-data-golang-application-crypto-packages/
func newUserCookie() *http.Cookie {
return &http.Cookie{
Name: userCookieKey,
Value: uuid.New().String(),
Expires: time.Now().Add(dayHrs), // One day
MaxAge: daySeconds,
HttpOnly: true,
}
}
// enableImageCache on images directory
func enableImageCache(w http.ResponseWriter, path string) {
if path != "" && strings.Contains(path, "/images/") {
age := fmt.Sprintf("max-age:%d, public", daySeconds)
w.Header().Set("Cache-Control", age)
now := time.Now().Format(http.TimeFormat)
w.Header().Set("Last-Modified", now)
expire := time.Now().Add(dayHrs).Format(http.TimeFormat)
w.Header().Set("Expires", expire)
logger.Debug("Cache-Control Set", zap.String("path", path))
}
}
|
<reponame>varajala/templateman
"""
The command line interface implementation.
Author: <NAME>
"""
import sys
import os
import shutil
import runpy
import contextlib
import typing as types
import templateman
VERSION = '1.0.0'
TEMPLATE_DIRECTORY_ENV_VAR = 'PY_TEMPLATES_DIR'
commands: types.Dict[str, types.Callable[[types.List[str],], None]] = dict()
def mkdir_exc_safe(path: str) -> types.Optional[str]:
error = None
try:
os.mkdir(path)
except Exception as err:
error = f"{err.__class__.__name__}: {str(err)}"
return error
def copy_file_exc_safe(src: str, dst: str) -> types.Optional[str]:
error = None
try:
shutil.copy(src, dst)
except Exception as err:
error = f"{err.__class__.__name__}: {str(err)}"
return error
def list_directory_exc_safe(path: str) -> types.Tuple[types.List[str], types.Optional[str]]:
error = None
items = list()
try:
items = os.listdir(path)
except Exception as err:
error = f"{err.__class__.__name__}: {str(err)}"
return items, error
def open_file_exc_safe(path: str, mode = 'r', *args) -> types.Tuple[object, types.Optional[str]]:
file = None
error = None
try:
file = open(path, mode, *args)
except Exception as err:
error = f"{err.__class__.__name__}: {str(err)}"
return file, error
def remove_file_exc_safe(path: str) -> types.Optional[str]:
try:
os.remove(path)
return None
except (OSError, PermissionError) as err:
return str(err)
def register_command(alias: str):
def wrapper(func: types.Callable[[types.List[str],], None]):
commands[alias] = func
return func
return wrapper
def exec_command(command: str, args: types.List[str]):
if command not in commands:
raise RuntimeError(f"Unknown comman '{command}'")
commands[command](args)
def parse_args(args: types.List[str], options: dict):
skip = -1
for i in range(0, len(args)):
if skip > i:
continue
arg = args[i]
if arg not in options:
templateman.print_error(f"Unknown option '{arg}'")
templateman.abort()
return
args_consumed, handle_options = options[arg]
start = i + 1
end = start + args_consumed
if len(args) < end:
templateman.print_error(f"Option '{arg}' expected additional arguments")
templateman.abort()
return
handle_options(args[start:end])
skip = end
def resolve_template_directory() -> types.Optional[str]:
path = None
with contextlib.suppress(Exception):
default_directory = os.path.join(os.path.expanduser('~'), '.py-templates')
path = os.environ.get(TEMPLATE_DIRECTORY_ENV_VAR, default_directory)
return path
@register_command('help')
def print_help(args: types.List[str]):
"""Show this help information."""
def format_command_info(name: str, docstring: str = None) -> str:
if docstring is None:
return ' > ' + name + ': ' + 'No description available...\n'
return ' > ' + name + ': ' + docstring.strip() + '\n'
commands_help = [format_command_info(name, func.__doc__) for name, func in commands.items()]
help_text = [
f'TemplateManager, version: {VERSION}',
'',
'A simple utility to manage and run Python scripts for automating',
'project creation, boilerplate code and much more.'
'',
'Usage: ',
'',
' $ python -m templateman [command] [arguments]',
'',
'',
'Implemented commands:',
'',
*commands_help,
]
print('\n'.join(help_text))
@register_command('remove')
def remove_installed_template(args: types.List[str]):
"""
Remove installed template. This will remove the file from the
template directory permanently.
USAGE:
$ python -m templateman remove [template-name]
"""
template_dir = resolve_template_directory()
if template_dir is None:
error_message = 'Can\'t resolve users home directory for storing template scripts'
templateman.print_error(error_message)
templateman.abort()
return
if len(args) == 0:
templateman.print_error("Command 'remove' expected atleast one argument")
templateman.abort()
return
template_path = os.path.join(template_dir, args[0])
if not os.path.exists(template_path):
templateman.print_error(f"No template installed with name '{args[0]}'")
templateman.abort()
return
print(f"Are you sure you want to remove file '{template_path}' premanently?")
ans = input('Input Y/y to remove this file: ')
if ans.lower() == 'y':
error = remove_file_exc_safe(template_path)
if error:
error_message = 'Unexpected error when removing file:\n'
error_message += error
templateman.print_error(error_message)
templateman.abort()
return
@register_command('list')
def list_installed_templates(args: types.List[str]):
"""List all installed templates."""
template_dir = resolve_template_directory()
if template_dir is None:
error_message = 'Can\'t resolve users home directory for storing template scripts'
templateman.print_error(error_message)
templateman.abort()
return
if not os.path.exists(template_dir):
error = mkdir_exc_safe(template_dir)
if error:
error_message = 'Failed to create directory for storing template scripts:\n'
error_message += error
templateman.print_error(error_message)
templateman.abort()
return
installed_templates, error = list_directory_exc_safe(template_dir)
if error:
error_message = 'Unexpected error when listing installed templates:\n'
error_message += error
templateman.print_error(error_message)
templateman.abort()
return
print('Templates stored in directory:')
print(template_dir)
print()
for template_name in installed_templates:
print('> ', template_name)
if installed_templates:
print()
@register_command('install')
def install_template(args: types.List[str]):
"""
Install a template script. This creates a copy of the provided file
into the template directory.
USAGE:
$ python -m templateman install [filepath]
"""
template_dir = resolve_template_directory()
if template_dir is None:
error_message = 'Can\'t resolve users home directory for storing template scripts'
templateman.print_error(error_message)
templateman.abort()
return
if not os.path.exists(template_dir):
error = mkdir_exc_safe(template_dir)
if error:
error_message = 'Failed to create directory for storing template scripts:\n'
error_message += error
templateman.print_error(error_message)
templateman.abort()
return
if len(args) < 1:
templateman.print_error("Command 'install' expected atleast one argument")
templateman.abort()
return
filepath = args[0]
if not os.path.isabs(filepath):
filepath = os.path.join(templateman.working_dir, filepath)
if not os.path.exists(filepath):
templateman.print_error("Given path does not exist")
templateman.abort()
return
_, filename = os.path.split(filepath)
filename, _ = os.path.splitext(filename)
error = copy_file_exc_safe(filepath, os.path.join(template_dir, filename))
if error:
error_message = 'Install failed:\n'
error_message += error
templateman.print_error(error_message)
templateman.abort()
return
@register_command('run')
def run_template(args: types.List[str]):
"""
Execute a template script. If the given filepath has no suffix, an installed
template is searched. If file has any suffix, the current working directory is
searched.
USAGE:
$ python -m templateman run [template-name] [arguments]
ARGUMENTS:
-o / --output-directory: Provide a output directory path for the script.
Default value is the current working directory.
-n / --name: Provide a name for the script. This should be used to name
the generated file, project or other assets.
Default value is "UNKNOWN".
-a / --author: Provide author name for the script. This should be used
in config/setup files in place of project's author name.
Default value is "UNKNOWN".
"""
if len(args) < 1:
templateman.print_error("Command 'run' expected atleast one argument")
templateman.abort()
return
filename, suffix = os.path.splitext(args[0])
filepath = os.path.join(templateman.working_dir, args[0])
if suffix == '':
template_dir = resolve_template_directory()
if template_dir is None:
error_message = 'Can\'t resolve users home directory for storing template scripts'
templateman.print_error(error_message)
templateman.abort()
return
installed_templates, error = list_directory_exc_safe(template_dir)
if not error and filename in installed_templates:
filepath = os.path.join(template_dir, filename)
def set_name(args: types.List[str]):
templateman.template_info['name'] = args[0]
def set_author(args: types.List[str]):
templateman.template_info['author'] = args[0]
def set_output_directory(args: types.List[str]):
templateman.template_info['output_directory'] = args[0]
all_options = {
'--name': (1, set_name),
'-n': (1, set_name),
'--author': (1, set_author),
'-a': (1, set_author),
'--output-directory': (1, set_output_directory),
'-o': (1, set_output_directory),
}
parse_args(args[1:], all_options)
if not os.path.exists(filepath):
error_message = f"Can't find file '{filepath}'"
templateman.print_error(error_message)
templateman.abort()
return
file, error = open_file_exc_safe(filepath, 'r')
if error:
error_message = f"Can't open file '{filepath}'"
templateman.print_error(error_message)
templateman.abort()
return
try:
code = file.read() # type: ignore
exec(compile(code, filename, 'exec'))
except Exception as err:
error_message = 'There were errors during the execution of the script:'
error_message += f'\n{err.__class__.__name__}: {str(err)}'
templateman.print_error(error_message)
templateman.abort()
finally:
file.close() # type: ignore
|
import Parse from "parse";
import React, { useEffect, useState } from "react";
import useHeading from "../../../hooks/useHeading";
import { useForm } from "react-hook-form";
import { addError } from "../../../classes/Notifications/Notifications";
import { LoadingSpinner } from "../../LoadingSpinner/LoadingSpinner";
import { Link } from "react-router-dom";
import "./Register.scss";
import { makeCancelable } from "@clowdr-app/clowdr-db-schema/build/Util";
import { Conference } from "@clowdr-app/clowdr-db-schema";
interface Props {
conferenceId: string;
registrationId: string;
email: string;
}
type FormData = {
password: <PASSWORD>;
fullName: string;
}
type Status = { state: "notwaiting" } | { state: "waiting" } | { state: "registered" };
export default function Register(props: Props) {
useHeading("Register");
const { register, handleSubmit, watch, errors } = useForm<FormData>();
const [status, setStatus] = useState<Status>({ state: "notwaiting" });
const [conference, setConference] = useState<Conference | null>(null);
const [loadFailed, setLoadFailed] = useState<boolean>(false);
useEffect(() => {
let cancelConferencePromise: () => void = () => { };
async function getConference() {
try {
const { promise, cancel } = makeCancelable(Conference.getAll()
.then(xs => xs.find(x => x.id === props.conferenceId))
.catch(async (reason) => {
setLoadFailed(true);
if (reason.toString().toLowerCase().includes("invalid session token")) {
try {
await Parse.User.logOut();
}
catch {
}
window.localStorage.clear();
addError("An error has occurred and the website cannot recover on its own. Please refresh the page. If you see this error again, please go to the Clowdr home page, delete your site data and then try again. If you continue to see this error or the loading message, please close your tab and contact your conference organiser for support.");
}
return null;
}));
cancelConferencePromise = cancel;
const _conference = await promise;
if (!_conference) {
setLoadFailed(true);
}
setConference(_conference ?? null);
cancelConferencePromise = () => { };
}
catch (e) {
if (!e || !e.isCanceled) {
throw e;
}
}
}
getConference();
return function cleanupGetConference() {
cancelConferencePromise();
}
}, [props.conferenceId])
async function doRegister(data: FormData): Promise<boolean | string> {
const ok = await Parse.Cloud.run("user-register", {
registrationId: props.registrationId,
conferenceId: props.conferenceId,
password: <PASSWORD>,
fullName: data.fullName,
}) as boolean;
return ok;
}
async function onSubmit(data: FormData) {
try {
setStatus({ state: "waiting" });
const p = makeCancelable(doRegister(data));
let ok: boolean | string = false;
try {
ok = await p.promise;
}
catch (e) {
ok = false;
console.error("Failed to register", e);
}
setStatus(ok && ok !== "Use existing password" ? { state: "registered" } : { state: "notwaiting" });
if (!ok) {
addError("Registration failed.");
}
else if (ok === "Use existing password") {
addError("You already have a Clowdr account, but you need to register a new profile for this conference. Please use your existing Clowdr password.");
}
}
catch (e) {
if (!e.isCanceled) {
throw e;
}
}
}
const goToSignInButton =
<Link
className="button sign-in"
aria-label="Go to sign in"
to="/">
Go to sign in
</Link>;
function registrationForm(_conference: Conference) {
return <>
<form onSubmit={handleSubmit(onSubmit)}>
<p>Welcome to Clowdr. Please choose a password to complete your registration for {_conference.name}.</p>
{/* TODO: Make this Code of Conduct link a public ConferenceConfiguration*/}
<p>
This conference has a <a href="https://cscw.acm.org/2020/index.php/code-of-conduct/">Code of Conduct</a>.
By registering for this conference you are agreeing to the <a href="https://cscw.acm.org/2020/index.php/code-of-conduct/">Code of Conduct</a>. The conference organisers may
revoke your access to the conference at any time if they determine you have breached the <a href="https://cscw.acm.org/2020/index.php/code-of-conduct/">Code of Conduct</a>.
</p>
<label htmlFor="email">Email</label>
<input name="email" type="email" value={props.email} disabled />
<label htmlFor="email">Repeat your email</label>
<input name="email-confirm" type="email" ref={register({
validate: (value) => value === props.email
})} />
{errors["email-confirm"] && "Email addresses do not match. Please ensure you use your registration's email address listed above."}
<label htmlFor="fullName">Full name</label>
<input name="fullName" ref={register({
required: true
})} />
{errors.fullName && "Full name required."}
{/* TODO: Enable this message for future conferences: <p>If you have previously used Clowdr with the email address above, please enter your existing password.</p> */}
<label htmlFor="password">Choose a password</label>
<input name="password" type="password" ref={register({
required: "Password required",
minLength: {
value: 10,
message: "Minimum length of 10 characters"
},
maxLength: {
value: 100,
message: "Maximum length of 100 characters"
},
})} />
{errors.password && "Must be at least 10 characters (maximum 100 characters)."}
<label htmlFor="password-repeat">Repeat your password</label>
<input name="password-repeat" type="password" ref={register({
validate: (value) => value === watch("password")
})} />
{errors["password-repeat"] && "Must match your chosen password."}
<p>
Clowdr is free and open-source software and not a
legal entity. Your data is held by the conference and
processed by the Clowdr software. You can
<Link to="/legal">find out more on this page.</Link>
The Clowdr development team are currently unpaid volunteers
and will do their best to make the conference run smoothly
for you.
</p>
<p>
Clowdr requires the use of the Firefox, Chrome or Edge
browsers. Safari and Opera are not supported at this time.
Most features will also work in Firefox and Chrome mobile.
</p>
<div className="submit-container">
<input type="submit" value="Register" />
</div>
</form>
</>;
}
function contents(_status: Status, _conference: Conference) {
switch (_status.state) {
case "notwaiting":
return registrationForm(_conference);
case "waiting":
return <LoadingSpinner message="Registering, please wait" />
case "registered":
return <>
<p>
You have successfully registered. Please return to the login
page to continue.
</p>
{goToSignInButton}
</>;
}
}
return <section aria-labelledby="page-title" tabIndex={0} className="register-form">
{loadFailed
? <p> Failed to load conference. </p>
: conference === null
? <LoadingSpinner message="Loading, please wait" />
: <>
<div className="register-section-header">
<h1>{conference.name}</h1>
</div>
{contents(status, conference)}
</>
}
</section>;
}
|
define({
"builder": {
"layouts": {
"tabTitle": "Fanebasert",
"tabDescr": "Viser kart og annet innhold ved hjelp av faner med et valgfritt panel med beskrivende tekst.",
"tabItem": "Fane",
"tabItems": "Faner",
"sideTitle": "Trekkspill på siden",
"sideDescr": "Viser kart og annet innhold ved hjelp av en utvidbar kontroll som inneholder beskrivende tekst.",
"sideItem": "Oppføring",
"sideItems": "Oppføringer",
"bulletTitle": "Punktmerket",
"bulletDescr": "Viser kart og annet innhold ved hjelp av punkter med et valgfritt panel med beskrivende tekst.",
"bulletItem": "Punkt",
"bulletItems": "Punktmerking"
},
"common": {
"lblStatus1": "Publisert",
"lblStatus3": "Skjult"
},
"settingsLayoutOptions": {
"title": "Oppsettalternativer",
"lblDescription": "Beskrivelse",
"lblLegend": "Plassering av tegnforklaring",
"tooltipLegend": "Velg hvor du vil at tegnforklaringen for kartet skal vises. Du kan aktivere tegnforklaringen for et kart når du legger til eller konfigurerer kartet.",
"lblDropdown": "Rullegardinliste",
"lblBelowDesc": "Under beskrivelsen",
"lblOnPanel": "Som et panel",
"lblPanelDescAndLegend": "Beskrivelse og tegnforkaringspanel",
"lblPanelDescAndOrLegend": "Beskrivelse og/eller tegnforklaringspanel",
"lblPanelDesc": "Beskrivelsespanel",
"lblPanelLegend": "Tegnforklaringspanel",
"lblPanelAccordion": "Trekkspillpanel",
"cfgLeft": "Venstre",
"cfgRight": "Høyre",
"cfgSmall": "Liten",
"cfgMedium": "Middels",
"cfgLarge": "Stor",
"lblNumbering": "Vis numre",
"lblReverse": "Omvendt nummerering",
"canOverlapMap": "kan overlappe kartlokasjon"
},
"settingsMapOptions": {
"title": "Kartalternativer",
"lblOverview": "Oversiktskart",
"tooltipOverview": "Vis et lite oversiktskart sammen med hovedkartet.",
"lblLocate": "Finn-knapp",
"tooltipLocate": "Denne funksjonaliteten støttes av webleserne på de fleste mobile enheter og stasjonære datamaskiner (Internet Explorer 9).",
"lblGeocoder": "Adresse- eller stedssøk",
"tooltipGeocoder": "Brukes til å finne adresser og steder i kartene dine.",
"lblSync": "Synkronisert kartlokasjoner",
"tooltipSync": "Når dette er aktivert, brukes den opprinnelige lokasjonen i det første kartet i serien på alle kartene. Når brukerne navigerer i et kart, vises dette i alle kartene. Deaktiver alternativet for lokasjonen i hvert kart som skal være uavhengig."
},
"initPopup": {
"title": "Velkommen til"
},
"addEditPopup": {
"lblAdd": "Legg til",
"lblEdit": "Rediger",
"disabled": "Legg til er deaktivert fordi det maksimale antallet tillatte %LBL_LAYOUT% er nådd.",
"titleAdd": "Legg til",
"titleEdit": "Rediger",
"stepMainStageNextTooltip": "Skriv inn tittel og innhold for %LBL_LAYOUT%",
"titlePlaceholder": "%LBL_LAYOUT%-tittel..."
},
"textEditor": {
"placeholder1": "Legg til tekst, koblinger og små grafikkelementer her.",
"placeholder2": "Hvis denne er tom, skjules panelet."
},
"organizePopup": {
"title": "Organiser",
"lblHeader": "Dra og slipp %LBL_LAYOUT% for å organisere historien.",
"lblColTitle": "Tittel",
"lblColStatus": "Status",
"btnApplyWarning": "Bekreft at %NB% %LBL_LAYOUT% skal slettes",
"deleteTooltip": "Slett",
"firstSectionExplain": "(Startseksjonen kan ikke flyttes)"
},
"help": {
"lblHelp": "Hjelp",
"lblAdd": "Legg til",
"lblSettings": "Innstillinger",
"lblOrga": "Organiser",
"lblEdit": "Redigeringer",
"lblPublish": "Del",
"lblTips": "Tips",
"lblMore": "Vil du ha mer?",
"lblLink": "Gå til Esri-webområdet for historiekart.",
"content1Div1": "Du oppretter %TPL_NAME% ved å bruke Legg til-knappen til å legge til hvert kart og annet innhold i oppsettet. Annet innhold kan være bilder, videoer eller innebygde websider eller kode. Det kan for eksempel være du vil at brukerne skal se et introduksjonsbilde eller en introduksjonsvideo første gang de starter %TPL_NAME%, før de går videre og utforsker kartene.",
"content1Div2": "Når du klikker på Legg til-knappen, vises en dialogboks der du kan velge og konfigurere kartet og annet innhold du vil legge til. Du kan for eksempel angi lokasjonen kartet skal vise, aktivere tegnforklaringen for kartet osv.",
"content2Div1": "I dialogboksen Innstillinger kan du endre utseendet på %TPL_NAME%. Du kan blant annet endre oppsettet, velge et annet fargeoppsett og angi hvor tegnforklaringen for kartet skal vises.",
"content2Div2": "Du kan også bytte ut Esri-logoen i toppteksten i %TPL_NAME% med din egen logo for ditt varemerke. I tillegg kan du angi hvilken webside som skal åpnes når leserne klikker på logoen din, slik at de kan få mer informasjon.",
"content3Div1": "I dialogboksen Organiser kan du administrere %TPL_NAME%. I denne dialogboksen kan du endre rekkefølgen på serien ved å dra og slippe.",
"content3Div2": "Du kan også slette innhold eller skjule det. Muligheten til å skjule innhold er nyttig når du forbereder nytt innhold som ikke er klart til å tas med i fortellingskartet ennå.",
"content4Div1": "Har du funnet en feil eller ønsker å endre materialet? Ikke noe problem. Se etter redigeringsikonet i programmet for å gjøre endringer av innholdet. Du kommer til å bruke redigeringsfunksjonene mange ganger etter hvert som du utvikler %TPL_NAME%!",
"content5Div1": "Når du lagrer din %TPL_NAME%, er den som standard privat. Du kan bruke delingsknappen for å dele den med andre. Du kan dele din %TPL_NAME% offentlig, slik at den er tilgjengelig for alle.",
"content5Div2": "Avhengig av hvilken konto du har, kan det hende du også har muligheten til å dele din %TPL_NAME% bare med personer i organisasjonen din, slik at andre ikke har tilgang til den.",
"content6Div1": "Kartene i en serie synkroniseres som standard til å vise samme lokasjon. Det betyr at lokasjonen som vises i det første kartet, automatisk brukes i alle de andre kartene, og hvis en bruker zoomer inn eller panorerer til en annen lokasjon i kartet han eller hun ser på, så brukes denne lokasjonen i de andre kartene også.",
"content6Div2": "Hvis du for eksempel har en serie som viser forskjellige tematiske data for en by, kan brukerne zoom til sitt nabolag og deretter veksle mellom fanene for å se på kartene for det aktuelle området.",
"content6Div3": "Hvis du vil deaktivere lokasjonssynkroniseringen, går du til dialogboksen Innstillinger og fjerner avmerkingen for innstillingen i kategorien Kartalternativer.",
"content6AltDiv1": "Kartlokasjonen synkroniseres som standard. Når synkroniseringen er deaktivert, kan hvert enkelt kart vise ulike lokasjoner.",
"content6AltDiv2": "Synkronisering PÅ",
"content6AltDiv3": "Synkronisering AV",
"content6AltDiv4": "Hvis du vil deaktivere synkronisering, velger du Innstilinger > Kartalternativer og fjerner avmerkingen for Synkroniser kartlokasjoner."
},
"landing": {
"lblAdd": "Hva vil du kalle %LAYOUT_TITLE%-kartserien?",
"phAdd": "Skriv inn tittelen...",
"lblOR": "Eller",
"lblHelp": "Få en omvisning"
}
}
}); |
/*
* Copyright (c) 2017 Uber Technologies, Inc.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.uber.nullaway.dataflow;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.errorprone.VisitorState;
import com.google.errorprone.util.ASTHelpers;
import com.sun.source.tree.LiteralTree;
import com.sun.source.tree.MethodInvocationTree;
import com.sun.source.tree.Tree;
import com.sun.tools.javac.code.Symbol;
import com.sun.tools.javac.code.Type;
import com.uber.nullaway.NullabilityUtil;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Set;
import javax.annotation.Nullable;
import javax.lang.model.element.Element;
import javax.lang.model.element.ElementKind;
import javax.lang.model.element.Modifier;
import javax.lang.model.element.VariableElement;
import org.checkerframework.nullaway.dataflow.cfg.node.FieldAccessNode;
import org.checkerframework.nullaway.dataflow.cfg.node.IntegerLiteralNode;
import org.checkerframework.nullaway.dataflow.cfg.node.LocalVariableNode;
import org.checkerframework.nullaway.dataflow.cfg.node.LongLiteralNode;
import org.checkerframework.nullaway.dataflow.cfg.node.MethodAccessNode;
import org.checkerframework.nullaway.dataflow.cfg.node.MethodInvocationNode;
import org.checkerframework.nullaway.dataflow.cfg.node.Node;
import org.checkerframework.nullaway.dataflow.cfg.node.StringLiteralNode;
import org.checkerframework.nullaway.dataflow.cfg.node.SuperNode;
import org.checkerframework.nullaway.dataflow.cfg.node.ThisNode;
import org.checkerframework.nullaway.dataflow.cfg.node.TypeCastNode;
import org.checkerframework.nullaway.dataflow.cfg.node.VariableDeclarationNode;
import org.checkerframework.nullaway.dataflow.cfg.node.WideningConversionNode;
import org.checkerframework.nullaway.javacutil.TreeUtils;
/**
* Represents an extended notion of an access path, which we track for nullness.
*
* <p>Typically, access paths are of the form x.f.g.h, where x is a variable and f, g, and h are
* field names. Here, we also allow no-argument methods to appear in the access path, as well as
* method calls passed only statically constant parameters, so an AP can be of the form
* x.f().g.h([int_expr|string_expr]) in general.
*
* <p>We do not allow array accesses in access paths for the moment.
*/
public final class AccessPath implements MapKey {
/**
* A prefix added for elements appearing in method invocation APs which represent fields that can
* be proven to be class-initialization time constants (i.e. static final fields of a type known
* to be structurally immutable, such as io.grpc.Metadata.Key).
*
* <p>This prefix helps avoid collisions between common field names and common strings, e.g.
* "KEY_1" and the field KEY_1.
*/
private static final String IMMUTABLE_FIELD_PREFIX = "static final [immutable] field: ";
/**
* Encode a static final field as a constant argument on a method's AccessPathElement
*
* <p>The field must be of a type known to be structurally immutable, in addition to being
* declared static and final for this encoding to make any sense. We do not verify this here, and
* rather operate only on the field's fully qualified name, as this is intended to be a quick
* utility method.
*
* @param fieldFQN the field's Fully Qualified Name
* @return a string suitable to be included as part of the constant arguments of an
* AccessPathElement, assuming the field is indeed static final and of an structurally
* immutable type
*/
public static String immutableFieldNameAsConstantArgument(String fieldFQN) {
return IMMUTABLE_FIELD_PREFIX + fieldFQN;
}
private final Root root;
private final ImmutableList<AccessPathElement> elements;
/**
* if present, the argument to the map get() method call that is the final element of this path
*/
@Nullable private final MapKey mapGetArg;
AccessPath(Root root, List<AccessPathElement> elements) {
this.root = root;
this.elements = ImmutableList.copyOf(elements);
this.mapGetArg = null;
}
private AccessPath(Root root, List<AccessPathElement> elements, MapKey mapGetArg) {
this.root = root;
this.elements = ImmutableList.copyOf(elements);
this.mapGetArg = mapGetArg;
}
/**
* Construct the access path of a local.
*
* @param node the local
* @return access path representing the local
*/
public static AccessPath fromLocal(LocalVariableNode node) {
return new AccessPath(new Root(node.getElement()), ImmutableList.of());
}
/**
* Construct the access path of a variable declaration.
*
* @param node the variable declaration
* @return access path representing the variable declaration
*/
static AccessPath fromVarDecl(VariableDeclarationNode node) {
Element elem = TreeUtils.elementFromDeclaration(node.getTree());
return new AccessPath(new Root(elem), ImmutableList.of());
}
/**
* Construct the access path of a field access.
*
* @param node the field access
* @param apContext the current access path context information (see {@link
* AccessPath.AccessPathContext}).
* @return access path for the field access, or <code>null</code> if it cannot be represented
*/
@Nullable
static AccessPath fromFieldAccess(FieldAccessNode node, AccessPathContext apContext) {
List<AccessPathElement> elements = new ArrayList<>();
Root root = populateElementsRec(node, elements, apContext);
return (root != null) ? new AccessPath(root, elements) : null;
}
/**
* Construct the access path of a method call.
*
* @param node the method call
* @param apContext the current access path context information (see {@link
* AccessPath.AccessPathContext}).
* @return access path for the method call, or <code>null</code> if it cannot be represented
*/
@Nullable
static AccessPath fromMethodCall(
MethodInvocationNode node, @Nullable VisitorState state, AccessPathContext apContext) {
if (state != null && isMapGet(ASTHelpers.getSymbol(node.getTree()), state)) {
return fromMapGetCall(node, apContext);
}
return fromVanillaMethodCall(node, apContext);
}
@Nullable
private static AccessPath fromVanillaMethodCall(
MethodInvocationNode node, AccessPathContext apContext) {
List<AccessPathElement> elements = new ArrayList<>();
Root root = populateElementsRec(node, elements, apContext);
return (root != null) ? new AccessPath(root, elements) : null;
}
/**
* Construct the access path given a {@code base.element} structure.
*
* @param base the base expression for the access path
* @param element the final element of the access path (a field or method)
* @param apContext the current access path context information (see {@link
* AccessPath.AccessPathContext}).
* @return the {@link AccessPath} {@code base.element}
*/
@Nullable
public static AccessPath fromBaseAndElement(
Node base, Element element, AccessPathContext apContext) {
List<AccessPathElement> elements = new ArrayList<>();
Root root = populateElementsRec(base, elements, apContext);
if (root == null) {
return null;
}
elements.add(new AccessPathElement(element));
return new AccessPath(root, elements);
}
/**
* Construct the access path given a {@code base.method(CONS)} structure.
*
* <p>IMPORTANT: Be careful with this method, the argument list is not the variable names of the
* method arguments, but rather the string representation of primitive-type compile-time constants
* or the name of static final fields of structurally immutable types (see {@link
* #populateElementsRec(Node, List, AccessPathContext)}).
*
* <p>This is used by a few specialized Handlers to set nullability around particular paths
* involving constants.
*
* @param base the base expression for the access path
* @param method the last method call in the access path
* @param constantArguments a list of <b>constant</b> arguments passed to the method call
* @param apContext the current access path context information (see {@link
* AccessPath.AccessPathContext}).
* @return the {@link AccessPath} {@code base.method(CONS)}
*/
@Nullable
public static AccessPath fromBaseMethodAndConstantArgs(
Node base, Element method, List<String> constantArguments, AccessPathContext apContext) {
List<AccessPathElement> elements = new ArrayList<>();
Root root = populateElementsRec(base, elements, apContext);
if (root == null) {
return null;
}
elements.add(new AccessPathElement(method, constantArguments));
return new AccessPath(root, elements);
}
/**
* Construct the access path for <code>map.get(x)</code> from an invocation of <code>put(x)</code>
* or <code>containsKey(x)</code>.
*
* @param node a node invoking containsKey() or put() on a map
* @param apContext the current access path context information (see {@link
* AccessPath.AccessPathContext}).
* @return an AccessPath representing invoking get() on the same type of map as from node, passing
* the same first argument as is passed in node
*/
@Nullable
public static AccessPath getForMapInvocation(
MethodInvocationNode node, AccessPathContext apContext) {
// For the receiver type for get, use the declared type of the receiver of the containsKey()
// call.
// Note that this may differ from the containing class of the resolved containsKey() method,
// which
// can be in a superclass (e.g., LinkedHashMap does not override containsKey())
// assumption: map type will not both override containsKey() and inherit get()
return fromMapGetCall(node, apContext);
}
private static Node stripCasts(Node node) {
while (node instanceof TypeCastNode) {
node = ((TypeCastNode) node).getOperand();
}
return node;
}
@Nullable
private static MapKey argumentToMapKeySpecifier(Node argument, AccessPathContext apContext) {
// Required to have Node type match Tree type in some instances.
if (argument instanceof WideningConversionNode) {
argument = ((WideningConversionNode) argument).getOperand();
}
// A switch at the Tree level should be faster than multiple if checks at the Node level.
switch (argument.getTree().getKind()) {
case STRING_LITERAL:
return new StringMapKey(((StringLiteralNode) argument).getValue());
case INT_LITERAL:
return new NumericMapKey(((IntegerLiteralNode) argument).getValue());
case LONG_LITERAL:
return new NumericMapKey(((LongLiteralNode) argument).getValue());
case METHOD_INVOCATION:
MethodAccessNode target = ((MethodInvocationNode) argument).getTarget();
Node receiver = stripCasts(target.getReceiver());
List<Node> arguments = ((MethodInvocationNode) argument).getArguments();
// Check for int/long boxing.
if (target.getMethod().getSimpleName().toString().equals("valueOf")
&& arguments.size() == 1
&& receiver.getTree().getKind().equals(Tree.Kind.IDENTIFIER)
&& (receiver.toString().equals("Integer") || receiver.toString().equals("Long"))) {
return argumentToMapKeySpecifier(arguments.get(0), apContext);
}
// Fine to fallthrough:
default:
// Every other type of expression, including variables, field accesses, new A(...), etc.
return getAccessPathForNodeNoMapGet(argument, apContext); // Every AP is a MapKey too
}
}
@Nullable
private static AccessPath fromMapGetCall(MethodInvocationNode node, AccessPathContext apContext) {
Node argument = node.getArgument(0);
MapKey mapKey = argumentToMapKeySpecifier(argument, apContext);
if (mapKey == null) {
return null;
}
MethodAccessNode target = node.getTarget();
Node receiver = stripCasts(target.getReceiver());
List<AccessPathElement> elements = new ArrayList<>();
Root root = populateElementsRec(receiver, elements, apContext);
if (root == null) {
return null;
}
return new AccessPath(root, elements, mapKey);
}
/**
* Gets corresponding AccessPath for node, if it exists. Does <emph>not</emph> handle calls to
* <code>Map.get()</code>
*
* @param node AST node
* @param apContext the current access path context information (see {@link
* AccessPath.AccessPathContext}).
* @return corresponding AccessPath if it exists; <code>null</code> otherwise
*/
@Nullable
public static AccessPath getAccessPathForNodeNoMapGet(Node node, AccessPathContext apContext) {
return getAccessPathForNodeWithMapGet(node, null, apContext);
}
/**
* Gets corresponding AccessPath for node, if it exists. Handles calls to <code>Map.get()
* </code>
*
* @param node AST node
* @param state the visitor state
* @param apContext the current access path context information (see {@link
* AccessPath.AccessPathContext}).
* @return corresponding AccessPath if it exists; <code>null</code> otherwise
*/
@Nullable
public static AccessPath getAccessPathForNodeWithMapGet(
Node node, @Nullable VisitorState state, AccessPathContext apContext) {
if (node instanceof LocalVariableNode) {
return fromLocal((LocalVariableNode) node);
} else if (node instanceof FieldAccessNode) {
return fromFieldAccess((FieldAccessNode) node, apContext);
} else if (node instanceof MethodInvocationNode) {
return fromMethodCall((MethodInvocationNode) node, state, apContext);
} else {
return null;
}
}
/**
* Constructs an access path ending with the class field element in the argument. The receiver is
* the method receiver itself.
*
* @param element the receiver element.
* @return access path representing the class field
*/
public static AccessPath fromFieldElement(VariableElement element) {
Preconditions.checkArgument(
element.getKind().isField(),
"element must be of type: FIELD but received: " + element.getKind());
Root root = new Root();
return new AccessPath(root, Collections.singletonList(new AccessPathElement(element)));
}
private static boolean isBoxingMethod(Symbol.MethodSymbol methodSymbol) {
return methodSymbol.isStatic()
&& methodSymbol.getSimpleName().contentEquals("valueOf")
&& methodSymbol.enclClass().packge().fullname.contentEquals("java.lang");
}
@Nullable
private static Root populateElementsRec(
Node node, List<AccessPathElement> elements, AccessPathContext apContext) {
Root result;
if (node instanceof FieldAccessNode) {
FieldAccessNode fieldAccess = (FieldAccessNode) node;
if (fieldAccess.isStatic()) {
// this is the root
result = new Root(fieldAccess.getElement());
} else {
// instance field access
result = populateElementsRec(stripCasts(fieldAccess.getReceiver()), elements, apContext);
elements.add(new AccessPathElement(fieldAccess.getElement()));
}
} else if (node instanceof MethodInvocationNode) {
MethodInvocationNode invocation = (MethodInvocationNode) node;
AccessPathElement accessPathElement;
MethodAccessNode accessNode = invocation.getTarget();
if (invocation.getArguments().size() == 0) {
accessPathElement = new AccessPathElement(accessNode.getMethod());
} else {
List<String> constantArgumentValues = new ArrayList<>();
for (Node argumentNode : invocation.getArguments()) {
Tree tree = argumentNode.getTree();
if (tree == null) {
return null; // Not an AP
} else if (tree.getKind().equals(Tree.Kind.METHOD_INVOCATION)) {
// Check for boxing call
MethodInvocationTree methodInvocationTree = (MethodInvocationTree) tree;
if (methodInvocationTree.getArguments().size() == 1
&& isBoxingMethod(ASTHelpers.getSymbol(methodInvocationTree))) {
tree = methodInvocationTree.getArguments().get(0);
}
}
switch (tree.getKind()) {
case BOOLEAN_LITERAL:
case CHAR_LITERAL:
case DOUBLE_LITERAL:
case FLOAT_LITERAL:
case INT_LITERAL:
case LONG_LITERAL:
case STRING_LITERAL:
constantArgumentValues.add(((LiteralTree) tree).getValue().toString());
break;
case NULL_LITERAL:
// Um, probably not? Return null for now.
return null; // Not an AP
case MEMBER_SELECT: // check for Foo.CONST
case IDENTIFIER: // check for CONST
// Check for a constant field (static final)
Symbol symbol = ASTHelpers.getSymbol(tree);
if (symbol.getKind().equals(ElementKind.FIELD)) {
Symbol.VarSymbol varSymbol = (Symbol.VarSymbol) symbol;
// From docs: getConstantValue() returns the value of this variable if this is a
// static final field initialized to a compile-time constant. Returns null
// otherwise.
// This means that foo(FOUR) will match foo(4) iff FOUR=4 is a compile time
// constant :)
Object constantValue = varSymbol.getConstantValue();
if (constantValue != null) {
constantArgumentValues.add(constantValue.toString());
break;
}
// The above will not work for static final fields of reference type, since they are
// initialized at class-initialization time, not compile time. Properly handling
// such fields would further require proving deep immutability for the object type
// itself. We use a handler-augment list of safe types:
Set<Modifier> modifiersSet = varSymbol.getModifiers();
if (modifiersSet.contains(Modifier.STATIC)
&& modifiersSet.contains(Modifier.FINAL)
&& apContext.isStructurallyImmutableType(varSymbol.type)) {
String immutableFieldFQN =
varSymbol.enclClass().flatName().toString()
+ "."
+ varSymbol.flatName().toString();
constantArgumentValues.add(
immutableFieldNameAsConstantArgument(immutableFieldFQN));
break;
}
}
// Cascade to default, symbol is not a constant field
// fall through
default:
return null; // Not an AP
}
}
accessPathElement = new AccessPathElement(accessNode.getMethod(), constantArgumentValues);
}
result = populateElementsRec(stripCasts(accessNode.getReceiver()), elements, apContext);
elements.add(accessPathElement);
} else if (node instanceof LocalVariableNode) {
result = new Root(((LocalVariableNode) node).getElement());
} else if (node instanceof ThisNode) {
result = new Root();
} else if (node instanceof SuperNode) {
result = new Root();
} else {
// don't handle any other cases
result = null;
}
return result;
}
/**
* Creates an access path representing a Map get call, where the key is obtained by calling {@code
* next()} on some {@code Iterator}. Used to support reasoning about iteration over a map's key
* set using an enhanced-for loop.
*
* @param mapNode Node representing the map
* @param iterVar local variable holding the iterator
* @param apContext access path context
* @return access path representing the get call, or {@code null} if the map node cannot be
* represented with an access path
*/
@Nullable
public static AccessPath mapWithIteratorContentsKey(
Node mapNode, LocalVariableNode iterVar, AccessPathContext apContext) {
List<AccessPathElement> elems = new ArrayList<>();
Root root = populateElementsRec(mapNode, elems, apContext);
if (root != null) {
return new AccessPath(
root, elems, new IteratorContentsKey((VariableElement) iterVar.getElement()));
}
return null;
}
/**
* Creates an access path identical to {@code accessPath} (which must represent a map get), but
* replacing its map {@code get()} argument with {@code mapKey}
*/
public static AccessPath replaceMapKey(AccessPath accessPath, MapKey mapKey) {
return new AccessPath(accessPath.getRoot(), accessPath.getElements(), mapKey);
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (!(o instanceof AccessPath)) {
return false;
}
AccessPath that = (AccessPath) o;
if (!root.equals(that.root)) {
return false;
}
if (!elements.equals(that.elements)) {
return false;
}
return mapGetArg != null
? (that.mapGetArg != null && mapGetArg.equals(that.mapGetArg))
: that.mapGetArg == null;
}
@Override
public int hashCode() {
int result = root.hashCode();
result = 31 * result + elements.hashCode();
result = 31 * result + (mapGetArg != null ? mapGetArg.hashCode() : 0);
return result;
}
public Root getRoot() {
return root;
}
public ImmutableList<AccessPathElement> getElements() {
return elements;
}
@Nullable
public MapKey getMapGetArg() {
return mapGetArg;
}
@Override
public String toString() {
return "AccessPath{" + "root=" + root + ", elements=" + elements + '}';
}
private static boolean isMapGet(Symbol.MethodSymbol symbol, VisitorState state) {
return NullabilityUtil.isMapMethod(symbol, state, "get", 1);
}
public static boolean isContainsKey(Symbol.MethodSymbol symbol, VisitorState state) {
return NullabilityUtil.isMapMethod(symbol, state, "containsKey", 1);
}
public static boolean isMapPut(Symbol.MethodSymbol symbol, VisitorState state) {
return NullabilityUtil.isMapMethod(symbol, state, "put", 2)
|| NullabilityUtil.isMapMethod(symbol, state, "putIfAbsent", 2);
}
/**
* root of an access path; either a variable {@link javax.lang.model.element.Element} or <code>
* this</code> (enclosing method receiver)
*/
public static final class Root {
/** does this represent the receiver? */
private final boolean isMethodReceiver;
@Nullable private final Element varElement;
Root(Element varElement) {
this.isMethodReceiver = false;
this.varElement = Preconditions.checkNotNull(varElement);
}
/** for case when it represents the receiver */
Root() {
this.isMethodReceiver = true;
this.varElement = null;
}
/**
* Get the variable element of this access path root, if not representing <code>this</code>.
*
* @return the variable, if not representing 'this'
*/
public Element getVarElement() {
return Preconditions.checkNotNull(varElement);
}
/**
* Check whether this access path root represents the receiver (i.e. <code>this</code>). s
*
* @return <code>true</code> if representing 'this', <code>false</code> otherwise
*/
public boolean isReceiver() {
return isMethodReceiver;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
Root root = (Root) o;
if (isMethodReceiver != root.isMethodReceiver) {
return false;
}
return varElement != null ? varElement.equals(root.varElement) : root.varElement == null;
}
@Override
public int hashCode() {
int result = (isMethodReceiver ? 1 : 0);
result = 31 * result + (varElement != null ? varElement.hashCode() : 0);
return result;
}
@Override
public String toString() {
return "Root{" + "isMethodReceiver=" + isMethodReceiver + ", varElement=" + varElement + '}';
}
}
private static final class StringMapKey implements MapKey {
private final String key;
public StringMapKey(String key) {
this.key = key;
}
@Override
public int hashCode() {
return this.key.hashCode();
}
@Override
public boolean equals(Object obj) {
if (obj instanceof StringMapKey) {
return this.key.equals(((StringMapKey) obj).key);
}
return false;
}
}
private static final class NumericMapKey implements MapKey {
private final long key;
public NumericMapKey(long key) {
this.key = key;
}
@Override
public int hashCode() {
return Long.hashCode(this.key);
}
@Override
public boolean equals(Object obj) {
if (obj instanceof NumericMapKey) {
return this.key == ((NumericMapKey) obj).key;
}
return false;
}
}
/**
* Represents all possible values that could be returned by calling {@code next()} on an {@code
* Iterator} variable
*/
public static final class IteratorContentsKey implements MapKey {
/**
* Element for the local variable holding the {@code Iterator}. We only support locals for now,
* as this class is designed specifically for reasoning about iterating over map keys using an
* enhanced-for loop over a {@code keySet()}, and for such cases the iterator is always stored
* locally
*/
private final VariableElement iteratorVarElement;
IteratorContentsKey(VariableElement iteratorVarElement) {
this.iteratorVarElement = iteratorVarElement;
}
public VariableElement getIteratorVarElement() {
return iteratorVarElement;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
IteratorContentsKey that = (IteratorContentsKey) o;
return iteratorVarElement.equals(that.iteratorVarElement);
}
@Override
public int hashCode() {
return iteratorVarElement.hashCode();
}
}
/**
* Represents a per-javac instance of an AccessPath context options.
*
* <p>This includes, for example, data on known structurally immutable types.
*/
public static final class AccessPathContext {
private final ImmutableSet<String> immutableTypes;
private AccessPathContext(ImmutableSet<String> immutableTypes) {
this.immutableTypes = immutableTypes;
}
public boolean isStructurallyImmutableType(Type type) {
return immutableTypes.contains(type.tsym.toString());
}
public static Builder builder() {
return new AccessPathContext.Builder();
}
/** class for building up instances of the AccessPathContext. */
public static final class Builder {
@Nullable private ImmutableSet<String> immutableTypes;
Builder() {}
/**
* Passes the set of structurally immutable types registered into this AccessPathContext.
*
* <p>See {@link com.uber.nullaway.handlers.Handler.onRegisterImmutableTypes} for more info.
*
* @param immutableTypes the immutable types known to our dataflow analysis.
*/
public Builder setImmutableTypes(ImmutableSet<String> immutableTypes) {
this.immutableTypes = immutableTypes;
return this;
}
/**
* Construct the immutable AccessPathContext instance.
*
* @return an access path context constructed from everything added to the builder
*/
public AccessPathContext build() {
return new AccessPathContext(immutableTypes);
}
}
}
}
|
from numpy import array
def array_statistics(mu5):
stats = {}
mu5 = array(mu5)
stats['sum'] = mu5.sum()
stats['mean'] = mu5.mean()
stats['max'] = mu5.max()
stats['min'] = mu5.min()
return stats
# Test the function
mu5 = array([[9, 0.0]])
print(array_statistics(mu5)) # Output: {'sum': 9.0, 'mean': 4.5, 'max': 9.0, 'min': 0.0} |
#!/usr/bin/bash
# Copyright (c) 2021. Huawei Technologies Co.,Ltd.ALL rights reserved.
# This program is licensed under Mulan PSL v2.
# You can use it according to the terms and conditions of the Mulan PSL v2.
# http://license.coscl.org.cn/MulanPSL2
# THIS PROGRAM IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
# EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
# MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
# See the Mulan PSL v2 for more details.
# #############################################
# @Author : Classicriver_jia
# @Contact : classicriver_jia@foxmail.com
# @Date : 2020-4-9
# @License : Mulan PSL v2
# @Desc : Generating HTML output
# #############################################
source ${OET_PATH}/libs/locallibs/common_lib.sh
function pre_test() {
LOG_INFO "Start to prepare the test environment."
DNF_INSTALL powertop
LOG_INFO "End to prepare the test environment."
}
function run_test() {
LOG_INFO "Start to run test."
powertop --html=htmlfile.html
CHECK_RESULT $?
grep powertop htmlfile.html
CHECK_RESULT $?
LOG_INFO "End to run test."
}
function post_test() {
LOG_INFO "Start to restore the test environment."
rm -rf htmlfile.html
DNF_REMOVE
LOG_INFO "End to restore the test environment."
}
main $@
|
import random
from multiprocessing import Pool
def multiprocessing_task(n):
random_list = []
for i in range(n):
random_list.append(random.randint(1, 100))
return random_list
def main():
pool = Pool(3)
random_list = pool.map(multiprocessing_task, [10])
print("The random list of length 10 is:", random_list[0])
if __name__ == "__main__":
main() |
<filename>packages/webclient/webpack.vendors.dll.config.js
'use strict';
var isProduction = process.env.NODE_ENV === 'production';
var PORT = 8080
console.log('..running ' + process.env.NODE_ENV + ' build');
console.log(process.argv);
var path = require('path'),
webpack = require('webpack'),
ExtractTextPlugin = require('extract-text-webpack-plugin'),
autoprefixer = require('autoprefixer');
var config = {
entry: {
vendors: [
'react',
'react-dom',
'react-router',
'redux',
'react-redux',
'styled-components'
]
},
output: {
path: path.join(__dirname, 'dist'),
filename: '[name].dll.js',
library: '[name]'
},
module: {
preLoaders: [{
test: /\.tsx?$/,
loader: 'tslint',
include: [
path.join(__dirname),
],
}],
loaders: [{
test: /\.tsx?$/,
include: path.resolve(__dirname),
exclude: /node_modules/,
loaders: ['ts']
},
{
test: /\.json$/,
loaders: ['json'],
include: [
path.join(__dirname),
]
},
{ test: /\.woff2(\?v=\d+\.\d+\.\d+)?$/, loader: "url?limit=10000&mimetype=application/font-woff" },
{ test: /\.woff(\?v=\d+\.\d+\.\d+)?$/, loader: "url?limit=10000&mimetype=application/font-woff" },
{ test: /\.ttf(\?v=\d+\.\d+\.\d+)?$/, loader: "url?limit=10000&mimetype=application/octet-stream" },
{ test: /\.eot(\?v=\d+\.\d+\.\d+)?$/, loader: "file" },
{ test: /\.svg(\?v=\d+\.\d+\.\d+)?$/, loader: "url?limit=10000&mimetype=image/svg+xml" }
].concat(isProduction ? [{
test: /\.less$/,
loader: 'style-loader!css-loader!postcss-loader!less-loader',
include: [
path.join(__dirname),
]
}] : [{
test: /\.less$/,
loader: 'style-loader!css-loader!postcss-loader!less-loader',
include: [
path.join(__dirname),
]
}])
},
ts: {
compiler: 'typescript',
configFileName: path.resolve(__dirname, './tsconfig.json')
// do not emit declarations since we are bundling
// compilerOptions: { declaration: false },
},
resolve: {
root: [
path.resolve(__dirname),
],
extensions: ['', '.js', '.ts', '.tsx']
},
postcss: function() {
return [autoprefixer({ browsers: ['last 2 versions'] })];
},
plugins: [
new webpack.HotModuleReplacementPlugin(),
//https://github.com/moment/moment/issues/1435#issuecomment-232687733
new webpack.IgnorePlugin(/^\.\/locale$/, /moment$/),
new webpack.DllPlugin({
path: path.join(__dirname, "dist", "[name].dll-manifest.json"),
name: "[name]"
})
].concat(isProduction ? [
new webpack.DefinePlugin({
'process.env': {
'NODE_ENV': JSON.stringify('production')
}
}),
new webpack.optimize.UglifyJsPlugin({
compress: {
warnings: true
}
})
] : [])
};
if (!isProduction) {
config.devtool = '#source-map';
config.bail = false;
config.cache = true;
config.debug = true;
}
module.exports = config; |
#!/usr/bin/env bash
# Test with both python 2 and 3
# Assuming you have py.test installed for both python2 and 3
p2dir=$(dirname $(command which python2))
${p2dir}/py.test --cov=ldtable tests.py
p3dir=$(dirname $(command which python3))
${p3dir}/py.test --cov=ldtable tests.py
|
var title = document.querySelector('#title')
var titleDisabled = document.querySelector('#title-disabled');
(function() {
var doc = window.editor;
function FileSystem(doc) {
var savestatus = document.querySelector('#savestatus');
var dataString = localStorage.getItem('docsData');
if(dataString) {
this.docsData = JSON.parse(dataString);
} else {
this.docsData = [];
}
var fs = this;
this.open = function(file) {
title.value = file.title
titleDisabled.value = file.title + file.extension;
doc.setText(file.content);
fs.openFile = file;
}
this.save = function(force) {
if(force || fs.openFile.content != doc.getDocumentAsPlainText()
|| fs.openFile.title != title.value) {
fs.openFile.content = doc.getDocumentAsPlainText();
fs.openFile.title = title.value;
localStorage.setItem('docsData', JSON.stringify(fs.docsData));
console.log('Document saved to localStorage');
} else {
console.log('Document not saved to localStorage since nothing changed');
}
var date = new Date();
var hour = (((date.getHours() > 12 ? date.getHours() - 12 : date.getHours())) || 12).toString();
if(hour.length == 1) hour = '0' + hour;
var min = (date.getMinutes()).toString();
if(min.length == 1) min = '0' + min;
savestatus.innerHTML = `Saved at ${hour}:${min} ${(date.getHours() > 12) ? 'PM' : 'AM'}`;
}
this.new = function(text) {
var newfile = {
'title': 'untitled',
'extension': '.txt',
'content': text || ''
};
this.docsData.push(newfile);
this.open(newfile);
this.save(true);
}
if(this.docsData[0]) {
this.open(this.docsData[0]);
} else {
this.new('document.addEventListener(\'copy\', function(e) {\n e.clipboardData.setData(\'text/plain\', \'Hello, world!\');\n e.clipboardData.setData(\'text/html\', \'<b>Hello, world!</b>\');\n e.preventDefault(); // We want our data, not data from any selection, to be written to the clipboard\n});');
}
this.saveTimer = {
interval: 3000,
timer: null,
reset: function() {
if(fs.saveTimer.timer) fs.saveTimer.timer = clearTimeout(fs.saveTimer.timer);
fs.saveTimer.timer = setTimeout(fs.save, fs.saveTimer.interval);
console.log(`Save timer reset to ${fs.saveTimer.interval}ms`);
savestatus.innerHTML = 'Waiting to save';
}
}
var scrollTitle = function() {
setTimeout(function() {
titleDisabled.scrollLeft = title.scrollLeft;
}, 0);
}
title.addEventListener('input', function(e) {
titleDisabled.value = title.value + fs.openFile.extension;
fs.save();
});
title.addEventListener('keyup', scrollTitle);
title.addEventListener('scroll', scrollTitle);
title.addEventListener('mousewheel', scrollTitle);
var file = document.querySelector('#file');
file.querySelector('.button').addEventListener('click', function(e) {
if(file.classList.contains('open')) {
file.classList.remove('open');
} else {
let menu = file.querySelector('.menu');
while(menu.firstChild) {
menu.removeChild(menu.firstChild);
}
for(let i = 0; i < fs.docsData.length; i++) {
let li = document.createElement('li');
let icon = document.createElement('img');
icon.setAttribute('src', 'icons/file.svg')
li.appendChild(icon);
li.appendChild(document.createTextNode(fs.docsData[i].title));
let ext = document.createElement('span');
ext.appendChild(document.createTextNode(`${fs.docsData[i].extension} \u2014 ${fs.docsData[i].content.length} bytes`));
li.appendChild(ext);
menu.appendChild(li);
li.addEventListener('click', function(e) {
fs.save();
fs.open(fs.docsData[i]);
file.classList.remove('open');
});
if(fs.docsData[i] == fs.openFile) {
li.classList.add('openFile');
}
}
let li = document.createElement('li');
let icon = document.createElement('img');
icon.setAttribute('src', 'icons/plus.svg')
li.appendChild(icon);
li.appendChild(document.createTextNode('New'));
menu.appendChild(li);
li.addEventListener('click', function(e) {
fs.new()
file.classList.remove('open');
});
file.classList.add('open');
}
});
document.querySelector('#delete').addEventListener('click', function(e) {
var index = fs.docsData.indexOf(fs.openFile);
fs.docsData.splice(index, 1);
fs.save(true);
if(fs.docsData.length) {
fs.open(fs.docsData[0]);
} else {
fs.new();
}
});
document.querySelector('#download').addEventListener('click', function(e) {
// from http://stackoverflow.com/a/30832210
var a = document.createElement("a"),
file = new Blob([doc.getDocumentAsPlainText()], {type: 'text/plain'}),
url = URL.createObjectURL(file);
a.href = url;
a.download = fs.openFile.title + fs.openFile.extension;
document.body.appendChild(a);
a.click();
setTimeout(function() {
document.body.removeChild(a);
window.URL.revokeObjectURL(url);
}, 0);
});
}
doc.fileSystem = new FileSystem(doc);
})();
|
from __future__ import print_function
from __future__ import absolute_import
from __future__ import division
from math import fabs
from compas_plotters.core import draw_xpoints_xy
from compas_plotters.core import draw_xlines_xy
from compas_plotters.core import draw_xarrows_xy
from compas_plotters.core import draw_xpolygons_xy
from compas.utilities import color_to_colordict
from compas.utilities import is_color_light
from compas_plotters.core import size_to_sizedict
import matplotlib.pyplot as plt
__author__ = ['<NAME>']
__email__ = '<EMAIL>'
__all__ = ['Viewer']
class Viewer(object):
""""""
def __init__(self, form, force, delay_setup=True, figsize=(16, 9)):
self.form = form
self.force = force
self.fig = None
self.ax1 = None
self.ax2 = None
self.default_facecolor = '#ffffff'
self.default_edgecolor = '#000000'
self.default_vertexcolor = '#ffffff'
self.default_vertexsize = 0.1
self.default_edgewidth = 1.0
self.default_compressioncolor = '#0000ff'
self.default_tensioncolor = '#ff0000'
self.default_externalforcecolor = '#00ff00'
self.default_externalforcewidth = 2.0
self.default_textcolor = '#000000'
self.default_fontsize = 8
self.default_pointsize = 0.1
self.default_linewidth = 1.0
self.default_pointcolor = '#ffffff'
self.default_linecolor = '#000000'
self.default_linestyle = '-'
self.figsize = figsize
if not delay_setup:
self.setup()
def setup(self):
self.fig = plt.figure(figsize=self.figsize, tight_layout=True, dpi=96)
self.ax1 = self.fig.add_subplot('121')
self.ax2 = self.fig.add_subplot('122')
self.ax1.set_aspect('equal')
self.ax2.set_aspect('equal')
self.ax1.set_xticks([])
self.ax1.set_yticks([])
self.ax1.set_xmargin(1.0)
self.ax1.set_ymargin(1.0)
self.ax1.set_xlim(-1.0, 11.0)
self.ax1.set_ylim(-1.0, 11.0)
self.ax2.set_xticks([])
self.ax2.set_yticks([])
self.ax2.set_xmargin(1.0)
self.ax2.set_ymargin(1.0)
self.ax2.set_xlim(-1.0, 11.0)
self.ax2.set_ylim(-1.0, 11.0)
self.fig.patch.set_facecolor('white')
self.ax1.axis('off')
self.ax2.axis('off')
def draw_form(self,
vertices_on=True,
edges_on=True,
faces_on=False,
forces_on=True,
external_on=True,
arrows_on=False,
vertexcolor=None,
edgecolor=None,
facecolor=None,
edgelabel=None,
vertexlabel=None,
facelabel=None,
vertexsize=None,
forcescale=1.0,
lines=None,
points=None):
""""""
# preprocess
vertexlabel = vertexlabel or {}
edgelabel = edgelabel or {}
facelabel = facelabel or {}
vertexsize = size_to_sizedict(vertexsize, self.form.vertices(), self.default_vertexsize)
vertexcolor = color_to_colordict(vertexcolor, self.form.vertices(), self.default_vertexcolor)
edgecolor = color_to_colordict(edgecolor, self.form.edges(), self.default_edgecolor)
facecolor = color_to_colordict(facecolor, self.form.faces(), self.default_facecolor)
# scale and position
x = self.form.vertices_attribute('x')
y = self.form.vertices_attribute('y')
if lines:
x += [line['start'][0] for line in lines]
x += [line['end'][0] for line in lines]
y += [line['start'][1] for line in lines]
y += [line['end'][1] for line in lines]
xmin, ymin = min(x), min(y)
xmax, ymax = max(x), max(y)
dx, dy = -xmin, -ymin
scale = max(fabs(xmax - xmin) / 10.0, fabs(ymax - ymin) / 10.0)
# vertices
if vertices_on:
_points = []
for key, attr in self.form.vertices(True):
bgcolor = vertexcolor[key]
_points.append({
'pos': [(attr['x'] + dx) / scale, (attr['y'] + dy) / scale],
'radius': vertexsize[key],
'facecolor': vertexcolor[key],
'edgecolor': self.default_edgecolor,
'linewidth': self.default_edgewidth * 0.5,
'text': None if key not in vertexlabel else str(vertexlabel[key]),
'textcolor': '#000000' if is_color_light(bgcolor) else '#ffffff',
'fontsize': self.default_fontsize,
})
draw_xpoints_xy(_points, self.ax1)
# edges
if edges_on:
leaves = set(self.form.leaves())
_lines = []
_arrows = []
for (u, v), attr in self.form.edges(True):
sp, ep = self.form.edge_coordinates(u, v, 'xy')
sp = ((sp[0] + dx) / scale, (sp[1] + dy) / scale)
ep = ((ep[0] + dx) / scale, (ep[1] + dy) / scale)
if external_on:
if u in leaves or v in leaves:
text = None if (u, v) not in edgelabel else str(edgelabel[(u, v)])
_arrows.append({
'start': sp,
'end': ep,
'width': self.default_externalforcewidth if not attr['is_ind'] else self.default_edgewidth * 3,
'color': self.default_externalforcecolor if not attr['is_ind'] else '#000000',
'text': text,
'fontsize': self.default_fontsize
})
else:
if forces_on:
width = forcescale * fabs(attr['f'])
color = self.default_tensioncolor if attr['f'] > 0 else self.default_compressioncolor
text = None if (u, v) not in edgelabel else str(edgelabel[(u, v)])
_lines.append({
'start': sp,
'end': ep,
'width': width,
'color': color,
'text': text,
'fontsize': self.default_fontsize
})
_arrows.append({
'start': sp,
'end': ep,
'width': self.default_edgewidth
})
else:
if forces_on:
width = forcescale * fabs(attr['f'])
color = self.default_tensioncolor if attr['f'] > 0 else self.default_compressioncolor
text = None if (u, v) not in edgelabel else str(edgelabel[(u, v)])
_lines.append({
'start': sp,
'end': ep,
'width': width,
'color': color,
'text': text,
'fontsize': self.default_fontsize
})
_arrows.append({
'start': sp,
'end': ep,
'width': self.default_edgewidth if not attr['is_ind'] else self.default_edgewidth * 3
})
if _arrows:
if arrows_on:
draw_xarrows_xy(_arrows, self.ax1)
else:
draw_xlines_xy(_arrows, self.ax1)
if _lines:
draw_xlines_xy(_lines, self.ax1, alpha=0.5)
# faces
if faces_on:
_face_polygons = []
for fkey in self.form.faces():
vkeys = [vkey for vkey in self.form.face_vertices(fkey)]
polygon_vertices = [self.form.vertex_coordinates(vkey, axes='xy') for vkey in vkeys]
polygon_vertices = [[(x + dx) / scale, (y + dy) / scale] for (x, y) in polygon_vertices] # scale the polygon
_face_polygons.append({
'points': polygon_vertices,
'facecolor': '#e5e5e5',
'edgecolor': '#ffffff',
'edgewidth': 10.0,
'text': str(fkey) if fkey not in facelabel else str(facelabel[fkey]),
'fontsize': self.default_fontsize * 2, # TEMP! TO DIFFER FROM OTHER LABELS
'textcolor': self.default_textcolor,
})
if _face_polygons:
draw_xpolygons_xy(_face_polygons, self.ax1)
# points
if points:
_points = []
for point in points:
x, y, _ = point['pos']
_points.append({
'pos': [(x + dx) / scale, (y + dy) / scale],
'text': point.get('text', ''),
'radius': point.get('size', self.default_pointsize),
'textcolor': point.get('textcolor', self.default_textcolor),
'facecolor': point.get('facecolor', self.default_pointcolor),
'edgecolor': point.get('edgecolor', self.default_linecolor),
'fontsize': self.default_fontsize
})
draw_xpoints_xy(_points, self.ax1)
# lines
if lines:
_lines = {}
style = lines[0].get('style', self.default_linestyle)
for line in lines:
temp = line.get('style', self.default_linestyle)
if temp == style:
if temp not in _lines:
_lines[temp] = []
else:
_lines[temp] = []
style = temp
_lines[temp].append({
'start': [(line['start'][0] + dx) / scale, (line['start'][1] + dy) / scale],
'end': [(line['end'][0] + dx) / scale, (line['end'][1] + dy) / scale],
'width': line.get('width', self.default_linewidth),
'color': line.get('color', self.default_linecolor),
'text': line.get('text', ''),
'textcolor': line.get('textcolor', self.default_textcolor),
'fontsize': self.default_fontsize
})
for style in _lines:
draw_xlines_xy(_lines[style], self.ax1, linestyle=style)
def draw_force(self,
vertices_on=True,
edges_on=True,
faces_on=False,
forces_on=True,
arrows_on=False,
vertexcolor=None,
edgecolor=None,
facecolor=None,
edgelabel=None,
facelabel=None,
vertexlabel=None,
vertexsize=None,
lines=None,
points=None):
""""""
# preprocess
vertexlabel = vertexlabel or {}
edgelabel = edgelabel or {}
facelabel = facelabel or {}
vertexsize = size_to_sizedict(vertexsize, self.force.vertices(), self.default_vertexsize)
vertexcolor = color_to_colordict(vertexcolor, self.force.vertices(), self.default_vertexcolor)
edgecolor = color_to_colordict(edgecolor, self.force.edges(), self.default_edgecolor)
# scale and position
x = self.force.vertices_attribute('x')
y = self.force.vertices_attribute('y')
if lines:
x += [line['start'][0] for line in lines]
x += [line['end'][0] for line in lines]
y += [line['start'][1] for line in lines]
y += [line['end'][1] for line in lines]
xmin, ymin = min(x), min(y)
xmax, ymax = max(x), max(y)
dx, dy = -xmin, -ymin
scale = max(fabs(xmax - xmin) / 10.0, fabs(ymax - ymin) / 10.0)
# vertices
if vertices_on:
_points = []
for key, attr in self.force.vertices(True):
bgcolor = vertexcolor[key]
_points.append({
'pos': ((attr['x'] + dx) / scale, (attr['y'] + dy) / scale),
'radius': vertexsize[key],
'facecolor': bgcolor,
'edgecolor': self.default_edgecolor,
'linewidth': self.default_edgewidth * 0.5,
'text': None if key not in vertexlabel else str(vertexlabel[key]),
'textcolor': '#000000' if is_color_light(bgcolor) else '#ffffff',
'fontsize': self.default_fontsize
})
draw_xpoints_xy(_points, self.ax2)
# edges
if edges_on:
leaves = set(self.form.leaves())
_arrows = []
for (u, v), attr in self.force.edges(True):
sp, ep = self.force.edge_coordinates(u, v, 'xy')
sp = ((sp[0] + dx) / scale, (sp[1] + dy) / scale)
ep = ((ep[0] + dx) / scale, (ep[1] + dy) / scale)
form_u, form_v = self.force.dual_edge((u, v))
text = None if (u, v) not in edgelabel else str(edgelabel[(u, v)])
if form_u in leaves or form_v in leaves:
_arrows.append({
'start': sp,
'end': ep,
'color': self.default_externalforcecolor if not self.form.edge_attribute((form_u, form_v), 'is_ind') else '#000000',
'width': self.default_externalforcewidth if not self.form.edge_attribute((form_u, form_v), 'is_ind') else self.default_edgewidth * 3,
'text': text,
'fontsize': self.default_fontsize,
})
else:
_arrows.append({
'start': sp,
'end': ep,
'color': self.default_edgecolor,
'width': self.default_edgewidth,
'text': text,
'fontsize': self.default_fontsize,
})
if arrows_on:
draw_xarrows_xy(_arrows, self.ax2)
else:
draw_xlines_xy(_arrows, self.ax2)
# lines
if lines:
_lines = {}
style = lines[0].get('style', self.default_linestyle)
for line in lines:
temp = line.get('style', self.default_linestyle)
if temp == style:
if temp not in _lines:
_lines[temp] = []
else:
_lines[temp] = []
style = temp
_lines[temp].append({
'start': [(line['start'][0] + dx) / scale, (line['start'][1] + dy) / scale],
'end': [(line['end'][0] + dx) / scale, (line['end'][1] + dy) / scale],
'width': line.get('width', self.default_linewidth),
'color': line.get('color', self.default_linecolor),
'text': line.get('text', ''),
'textcolor': line.get('textcolor', self.default_textcolor),
'fontsize': self.default_fontsize
})
for style in _lines:
draw_xlines_xy(_lines[style], self.ax2, linestyle=style)
# faces
if faces_on:
_face_polygons = []
for fkey in self.force.faces():
vkeys = [vkey for vkey in self.force.face_vertices(fkey)]
polygon_vertices = [self.force.vertex_coordinates(vkey, axes='xy') for vkey in vkeys]
polygon_vertices = [[(x + dx) / scale, (y + dy) / scale] for (x, y) in polygon_vertices] # scale the polygon
_face_polygons.append({
'points': polygon_vertices,
'facecolor': '#e5e5e5',
'edgecolor': '#ffffff',
'edgewidth': 10.0,
'text': str(fkey) if fkey not in facelabel else str(facelabel[fkey]),
'fontsize': self.default_fontsize * 2, # TEMP! TO DIFFER FROM OTHER LABELS
'textcolor': self.default_textcolor,
})
if _face_polygons:
draw_xpolygons_xy(_face_polygons, self.ax2)
def show(self):
plt.show()
def save(self, filepath, **kwargs):
"""Saves the plot to a file.
Parameters
----------
filepath : str
Full path of the file.
Notes
-----
For an overview of all configuration options, see [1]_.
References
----------
.. [1] https://matplotlib.org/2.0.2/api/pyplot_api.html#matplotlib.pyplot.savefig
"""
plt.savefig(filepath, **kwargs)
# ==============================================================================
# Main
# ==============================================================================
if __name__ == '__main__':
pass
|
#!/bin/bash
#
# Description : Autologin
# Author : Jose Cerrejon Gonzalez (ulysess@gmail_dot._com)
# Version : 1.3 (16/Mar/15)
# Compatible : Raspberry Pi 1 & 2 (tested), ODROID-C1 (tested)
#
clear
. ../helper.sh || . ./scripts/helper.sh || . ./helper.sh || wget -q 'https://github.com/jmcerrejon/PiKISS/raw/master/scripts/helper.sh'
check_board || { echo "Missing file helper.sh. I've tried to download it for you. Try to run the script again." && exit 1; }
fn_autologin_RPi(){
# Add comment to 1:2345:respawn:/sbin/getty...
sudo sed -i '/1:2345/s/^/#/' /etc/inittab
# Insert new file on pattern position
sudo sed -i 's/.*tty1.*/&\n1:2345:respawn:\/bin\/login -f pi tty1 <\/dev\/tty1> \/dev\/tty1 2>\&1/' /etc/inittab
}
fn_autologin_ODROID(){
sudo sed -i '/38400/s/^/#/' /etc/init/tty1.conf
sudo sed -i 's/.*38400.*/&\nexec \/bin\/login -f '$USER' < \/dev\/tty1 > \/dev\/tty1 2>\&1/' /etc/init/tty1.conf
}
if [[ ${MODEL} == 'Raspberry Pi' ]]; then
fn_autologin_RPi
elif [[ ${MODEL} == 'ODROID-C1' ]]; then
fn_autologin_ODROID
fi
read -p "Done!. Warning: Your distro have free access and no need to login on boot now!. Press [Enter] to continue..."
|
import random
import string
def generate_random_password():
# Choose a length for the password
length = 8
# Create the password
password = ''.join(random.choice(string.ascii_letters + string.digits) for i in range(length))
print("Generated Password:", password)
return password
generate_random_password() |
public List<Lancamentos> BuscarPorCategoria(int categoria)
{
using (OpFlixContext ctx = new OpFlixContext())
{
// Query the database to retrieve "Lancamentos" based on the given category ID
List<Lancamentos> lancamentosPorCategoria = ctx.Lancamentos
.Where(l => l.CategoryId == categoria)
.ToList();
return lancamentosPorCategoria;
}
} |
public class Test {
public static void main(String[] args) {
int n = 15;
for (int i = 1; i <= 100; i++) {
if (i % n == 0) {
System.out.println(i);
}
}
}
} |
#clang++ -S -c erg.cpp -std=c++14 -masm=intel -o bin/erg.s
objdump --disassemble bin/erg -w -l -Mintel-mnemonic > bin/erg.asm |
app.get('/items/:userId', (req, res) => {
const userId = req.params.userId;
Items.find({ userId: userId }, (err, items) => {
if (err) {
console.log(err);
res.status(500).send("Error retrieving list of items");
}
res.json(items);
});
}); |
<gh_stars>0
function add() {
const firstInput = parseInt(document.getElementById("firstInput").value);
const secondInput = parseInt(document.getElementById("secondInput").value);
let result = firstInput + secondInput;
document.getElementById("result").innerText = result;
}
function subctract() {
const firstInput = parseInt(document.getElementById("firstInput").value);
const secondInput = parseInt(document.getElementById("secondInput").value);
let result = firstInput - secondInput;
document.getElementById("result").innerText = result;
}
function multiply() {
const firstInput = parseInt(document.getElementById("firstInput").value);
const secondInput = parseInt(document.getElementById("secondInput").value);
let result = firstInput * secondInput;
document.getElementById("result").innerText = result;
}
function divide() {
const firstInput = parseInt(document.getElementById("firstInput").value);
const secondInput = parseInt(document.getElementById("secondInput").value);
if (secondInput === 0) {
alert("Cannot Divide By Zero!");
} else {
let result = firstInput / secondInput;
document.getElementById("result").innerText = result;
}
}
function reset() {
document.getElementById("firstInput").value = '';
document.getElementById("secondInput").value = '';
} |
const proffys = [
{
name: '<NAME>',
avatar: "https://avatars.githubusercontent.com/u/63321040?v=4",
whatsapp: '11996324674',
bio: 'Entusiasta das melhores tecnologias avançadas. Apaixonado pelas tecnologias JavaScript, React JS, React Native e mudar a vida das pessoas através da tecnologia. Mais de 200 pessoas ja passaram pela experiência técnológica.',
subject: 'Desenvolvedor',
cost: '20',
weekday: [0],
time_from: [720],
time_to: [1120]
},
{
name: '<NAME>',
avatar: "https://static1.purepeople.com.br/articles/5/29/58/85/@/3349900-angelina-jolie-mantem-o-ambiente-saudave-amp_fixed_height_big-2.jpg",
whatsapp: '11996324674',
bio: 'I trust in the web and work for make it better! Software Engineer Personal computer | GitHub Star ⭐ | Microsoft MVP 🏆 | Creator 🎥 .',
subject: 'Professora de Inglês',
cost: '60',
weekday: [1],
time_from: [720],
time_to: [1120]
},
{
name: '<NAME>',
avatar: "https://avatars.githubusercontent.com/u/6643122?v=4",
whatsapp: '11996324674',
bio: 'Sênior Web Dev & Instructor focused on helping people start programming for web - #html #css #javascript #sql #react #nodejs #fullstack.',
subject: 'Sênior Web Dev',
cost: '100',
weekday: [1],
time_from: [320],
time_to: [1580]
}
]; |
package model
import (
"database/sql"
"time"
_ "github.com/mattn/go-sqlite3"
)
type sqliteHandler struct {
db *sql.DB
}
func (s *sqliteHandler) GetTodos(sessionId string) []*Todo {
todos := []*Todo{}
rows, err := s.db.Query("SELECT id, name, completed, createdAt FROM todos sessionId=?, sessionId")
if err != nil {
panic(err)
}
defer rows.Close()
for rows.Next() {
var todo Todo
rows.Scan(&todo.ID, &todo.Name, &todo.Completed, &todo.CreatedAt)
todos = append(todos, &todo)
}
return todos
}
func (s *sqliteHandler) AddTodo(name string, sessionId string) *Todo {
stmt, err := s.db.Prepare("INSERT INTO todos (sessionId, name, completed, createdAt) VALUES (?, ?, ?, datetime('now'))")
if err != nil {
panic(err)
}
rst, err := stmt.Exec(name, false)
if err != nil {
panic(err)
}
id, _ := rst.LastInsertId()
var todo Todo
todo.ID = int(id)
todo.Name = name
todo.Completed = false
todo.CreatedAt = time.Now()
return &todo
}
func (s *sqliteHandler) RemoveTodo(id int) bool {
stmt, err := s.db.Prepare("DELETE FROM todos WHERE id=?")
if err != nil {
panic(err)
}
rst, err := stmt.Exec(id)
if err != nil {
panic(err)
}
cnt, _ := rst.RowsAffected()
return cnt > 0
}
func (s *sqliteHandler) CompleteTodo(id int, complete bool) bool {
stmt, err := s.db.Prepare("UPDATE todos SET completed=? WHERE id=?")
if err != nil {
panic(err)
}
rst, err := stmt.Exec(complete, id)
if err != nil {
panic(err)
}
cnt, _ := rst.RowsAffected()
return cnt > 0
}
func (s *sqliteHandler) Close() {
s.db.Close()
}
func newSqliteHandler(filepath string) DBHandler {
database, err := sql.Open("sqlite3", filepath)
if err != nil {
panic(err)
}
statement, _ := database.Prepare(
`CREATE TABLE IF NOT EXISTS todos (
id INTEGER PRIMARY KEY AUTOINCREMENT,
sessionId STRING,
name TEXT,
completed BOOLEAN,
createdAt DATETIME
);
CREATE INDEX IF NOT EXISTS sessionIdIndexOnTodos ON todos (
sessionId ASC
);`)
statement.Exec()
return &sqliteHandler{db: database}
}
|
#!/usr/bin/env bash
./bin/token-shout --log-level debug \
--log-dir /tmp \
start \
--http-listen-addr 0.0.0.0:8081 \
--rpc-addr ws://192.168.0.101:9546 \
--receivers-conf-path /tmp/receivers \
--watch-list eth,usdt,dusd \
--eth-wallet-dir /tmp/wallets \
--eth-watch-interval 20s \
--erc20-contracts-dir /tmp/contracts
|
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import { rgbDef } from './color_types';
export function calculateLuminance(r: number, g: number, b: number): number {
const a = [r, g, b].map((v) => {
v /= 255;
return v <= 0.03928 ? v / 12.92 : Math.pow((v + 0.055) / 1.055, 2.4);
});
return a[0] * 0.2126 + a[1] * 0.7152 + a[2] * 0.0722;
}
export function calculateContrast(rgb1: rgbDef, rgb2: rgbDef): number {
let contrast =
(calculateLuminance(rgb1[0], rgb1[1], rgb1[2]) + 0.05) /
(calculateLuminance(rgb2[0], rgb2[1], rgb2[2]) + 0.05);
if (contrast < 1) {
contrast = 1 / contrast;
}
return contrast;
}
|
#!/bin/bash -e
# Copyright 2021 WSO2 Inc. (http://wso2.org)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ----------------------------------------------------------------------------
# Execution script for ballerina performance tests
# ----------------------------------------------------------------------------
set -e
source base-scenario.sh
jmeter -n -t "$scriptsDir/"http-post-request.jmx -l "$resultsDir/"original.jtl -Jusers=10 -Jduration=1200 -Jhost=bal.perf.test -Jport=80 -Jprotocol=http -Jpath=serv $payload_flags
|
#!/bin/bash
# Script to deploy a very simple web application.
# The web app has a customizable image and some text.
cat << EOM > /var/www/html/index.html
<html>
<head><title>Meow!</title></head>
<body>
<div style="width:800px;margin: 0 auto">
<!-- BEGIN -->
<center><img src="http://${PLACEHOLDER}/${WIDTH}/${HEIGHT}"></img></center>
<center><h2>Meow World!</h2></center>
Welcome to ${PREFIX}'s app. THIS IS THE BEST COMPANY EVER.
<!-- END -->
</div>
</body>
</html>
EOM
echo "Script complete."
|
require 'test_helper'
class Spree::PagesIntegrationTest < ActiveSupport::IntegrationCase
setup do
Spree::Page.destroy_all
@images = Dir[File.expand_path("../../../../lib/tasks/sample", __FILE__) + "/*.jpg"]
end
context "the homepage" do
setup do
@home = Spree::Page.create(:title => "Home", :meta_title => "Welcome to our homepage!", :path => "/")
@home.contents.first.update_attributes(:body => "This is a test", :context => "main")
@home.contents.create(:title => "Some might say...", :body => "This is another test", :context => "intro")
@images.each { |image|
image = File.open(image)
@home.images.create(:attachment => image, :alt => "Sailing", :viewable => @home)
image.close
}
visit "/"
end
should "have proper page title" do
assert_title "Spree Demo Site - Welcome to our homepage!"
end
should "have proper contents" do
within ".left .content-main" do
assert_seen "Home", :within => "h1.title"
assert_seen "This is a test", :within => "p"
end
within ".intro .content-main" do
assert_seen "Some might say...", :within => "h1.title"
assert_seen "This is another test", :within => "p"
end
end
should "have a images in slideshow" do
within "#content .slideshow" do
@home.images.each do |img|
assert has_xpath?("//img[@src='#{img.attachment.url(:slide)}']")
end
end
end
end
context "any other page" do
setup do
@page = Spree::Page.create(:title => "Some Page", :meta_description => "This is the description", :meta_keywords => "just, a, keyword", :path => "/some-page")
end
should "have proper meta tags" do
visit @page.path
assert_title "Spree Demo Site - Some Page"
assert_meta :description, "This is the description"
assert_meta :keywords, "just, a, keyword"
end
context "with content that doesn't have an image" do
setup do
@page.contents.first.update_attributes(:body => "OMG it really is a page")
end
should "have proper content" do
visit @page.path
within ".content-main" do
assert_seen "Some Page", :within => "h1.title"
assert_seen "OMG it really is a page", :within => "p"
end
end
should "hide title is specified" do
@page.contents.first.update_attributes(:hide_title => true)
visit @page.path
within ".content-main" do
assert !has_selector?("h1.title")
assert_seen "OMG it really is a page", :within => "p"
end
end
end
context "with content that has an image" do
setup do
@content = @page.contents.first
@content.update_attributes(:body => "OMG it really is a page", :attachment => File.open(@images.first))
end
should "have proper content" do
visit @page.path
within ".content-left" do
assert has_xpath?("//img[@src='#{@content.attachment.url(:medium)}']")
end
within ".content-right" do
assert_seen "Some Page", :within => "h1.title"
assert_seen "OMG it really is a page", :within => "p"
end
end
end
context "with several existing pages" do
setup do
Spree::Page.destroy_all
@home_page = Factory.create(:spree_page, :title => "Home", :path => "/")
@another_home = Factory.create(:spree_page, :title => "Another Home", :path => "/home")
@about_page = Factory.create(:spree_page, :title => "About", :path => "/about-us")
@nested_page = Factory.create(:spree_page, :title => "Our Services", :path => "/about-us/services")
end
should "get the homepage" do
visit "/"
assert_title @home_page.title
end
should "get the page called home" do
visit "/home"
assert_title @another_home.title
end
should "get the about page" do
visit "/about-us"
assert_title @about_page.title
end
should "get a nested page" do
visit "/about-us/services"
assert_title @nested_page.title
end
should "render 404" do
visit "/a/page/that/doesnt/exist"
assert_seen "Error"
end
end
end
end
|
<reponame>jfhr/minesweeper<filename>src/app/game-options-component/game-options.component.ts<gh_stars>0
import {Component, EventEmitter, Input, Output} from '@angular/core';
import {GameState, MinesweeperOptions} from '../minesweeper-service/minesweeper.service';
@Component({
selector: 'app-game-options',
templateUrl: './game-options.component.html',
styleUrls: ['./game-options.component.css']
})
export class GameOptionsComponent {
@Input() public options: MinesweeperOptions;
@Input() public gameState: GameState;
@Output() public submitGame = new EventEmitter();
public isGameWon = false;
public isGameLost = false;
public isNewGame = false;
public initialize() {
if (this.gameState === GameState.Lost) {
this.isGameLost = true;
} else if (this.gameState === GameState.Won) {
this.isGameWon = true;
} else {
this.newGame();
}
}
public newGame() {
this.isGameWon = false;
this.isGameLost = false;
if (this.options === undefined) {
this.options = new MinesweeperOptions();
}
this.isNewGame = true;
}
public submitOptions() {
this.submitGame.emit(this.options);
return false;
}
}
|
#!/usr/bin/env bash
# Utility script to download and build LLVM & clang
#
# Copyright Contributors to the Open Shading Language project.
# SPDX-License-Identifier: BSD-3-Clause
# https://github.com/AcademySoftwareFoundation/OpenShadingLanguage
# Exit the whole script if any command fails.
set -ex
echo "Building LLVM"
uname
if [[ `uname` == "Linux" ]] ; then
LLVM_VERSION=${LLVM_VERSION:=13.0.0}
LLVM_INSTALL_DIR=${LLVM_INSTALL_DIR:=${PWD}/llvm-install}
if [[ "$GITHUB_WORKFLOW" != "" ]] ; then
LLVM_DISTRO_NAME=${LLVM_DISTRO_NAME:=ubuntu-18.04}
elif [[ "$TRAVIS_DIST" == "trusty" ]] ; then
LLVM_DISTRO_NAME=${LLVM_DISTRO_NAME:=ubuntu-14.04}
elif [[ "$TRAVIS_DIST" == "xenial" ]] ; then
LLVM_DISTRO_NAME=${LLVM_DISTRO_NAME:=ubuntu-16.04}
elif [[ "$TRAVIS_DIST" == "bionic" ]] ; then
LLVM_DISTRO_NAME=${LLVM_DISTRO_NAME:=ubuntu-18.04}
else
LLVM_DISTRO_NAME=${LLVM_DISTRO_NAME:=error}
fi
LLVMTAR=clang+llvm-${LLVM_VERSION}-x86_64-linux-gnu-${LLVM_DISTRO_NAME}.tar.xz
echo LLVMTAR = $LLVMTAR
if [[ "$LLVM_VERSION" == "10.0.0" ]] || [[ "$LLVM_VERSION" == "11.0.0" ]] \
|| [[ "$LLVM_VERSION" == "11.1.0" ]] || [[ "$LLVM_VERSION" == "12.0.0" ]] \
|| [[ "$LLVM_VERSION" == "13.0.0" ]] || [[ "$LLVM_VERSION" == "14.0.0" ]] ;
then
# new
curl --location https://github.com/llvm/llvm-project/releases/download/llvmorg-${LLVM_VERSION}/${LLVMTAR} -o $LLVMTAR
else
curl --location http://releases.llvm.org/${LLVM_VERSION}/${LLVMTAR} -o $LLVMTAR
fi
ls -l $LLVMTAR
tar xf $LLVMTAR
rm -f $LLVMTAR
echo "Installed ${LLVM_VERSION} in ${LLVM_INSTALL_DIR}"
mkdir -p $LLVM_INSTALL_DIR && true
mv clang+llvm*/* $LLVM_INSTALL_DIR
export LLVM_DIRECTORY=$LLVM_INSTALL_DIR
export PATH=${LLVM_INSTALL_DIR}/bin:$PATH
# ls -a $LLVM_DIRECTORY
fi
|
#!/bin/bash
#--- run one instance of relay (along with the server)
CILTREES=$1
shift
function doAnalysis () {
CONFIG=client.cfg
LOG=$CILTREES/log.relay
./server.sh $CILTREES -cc $CONFIG > /dev/null 2>&1 &
./relay.sh $CILTREES -su $CONFIG $@ | tee $LOG 2>&1
killall server.exe
}
doAnalysis $@
|
func totalWordOccurrences(word: String, in strings: [String]) -> Int {
var totalCount = 0
let lowercasedWord = word.lowercased()
for str in strings {
let lowercasedStr = str.lowercased()
var index = lowercasedStr.startIndex
while index < lowercasedStr.endIndex {
if let range = lowercasedStr[index...].range(of: "\\b\(lowercasedWord)\\b", options: .regularExpression) {
totalCount += 1
index = range.upperBound
} else {
index = lowercasedStr.index(after: index)
}
}
}
return totalCount
}
let strings = ["The cat is on the mat.", "Cats are cute animals.", "The cathedral is beautiful."]
let word = "cat"
let result = totalWordOccurrences(word: word, in: strings) // Output: 3 |
#!/usr/bin/env python
from distutils.core import setup
setup(
name="nexusutils",
version="1.0",
description="Python NeXus Utilities",
url="https://github.com/ess-dmsc/python-nexus-utilities",
packages=["nexusutils"],
install_requires=[
"appdirs",
"h5py",
"numexpr",
"numpy",
"packaging",
"pyparsing",
"matplotlib",
"tabulate",
],
)
|
/*
* @Date: 2022-04-08 09:44:21
* @LastEditors: huangzh873
* @LastEditTime: 2022-04-08 09:47:28
* @FilePath: /vt-cesium2.0/src/components/jt-toolbar/config/contents/effect/index.ts
*/
import { Content } from '../Types'
import groups from './groups'
const content: Content = {
name: '效果',
groups,
}
export default content
|
/* eslint-env jest */
const Assert = require('chai').assert
const Parsers = require('../../../util/parser/parsers')
describe('LegislativeActivityXmlParser.js', () => {
let underTest
let xml
beforeAll(() => {
xml = '<rss version="2.0">\n' +
'<channel>\n' +
'<title>Custom RSS Feed</title>\n' +
'<language>en</language>\n' +
'<description>Custom RSS Feed/Fil RSS personnalisé</description>\n' +
'<link>https://www.parl.ca/LegisInfo/</link>\n' +
'<atom:link xmlns:atom="http://www.w3.org/2005/Atom" href="https://www.parl.ca/LegisInfo/RSSFeed.aspx?download=rss&Language=E&Mode=1&Source=LegislativeFilteredBills&AllBills=1&HOCEventTypes=60110,60111,60146,60306,60122,60115,60119,60121,60124,60125,60126,60127,60285,60145,60307,60128,60131,60132,60133,60134,60174,60112,60163,60304,60303,60139,60144,60136,60138,60142&SenateEventTypes=60109,60110,60111,60115,60118,60119,60120,60123,60124,60305,60286,60130,60129,60302,60131,60132,60133,60134,60147,60304,60303,60140,60143,60135,60137,60141,60149&RelInfo=MajSpeach,LOPLegSum,JRNLArt,DepartmentPressRel,SpeakRule,NewsArt,PartyPressRel,ComingIntoForce" rel="self" type="application/rss+xml"/>\n' +
'<item>\n' +
'<title>C-4, Debate at 3rd Reading in the House of Commons</title>\n' +
'<link>\n' +
'https://www.parl.ca/LegisInfo/BillDetails.aspx?Language=E&billId=10615191\n' +
'</link>\n' +
'<description>\n' +
'C-4, An Act to implement the Agreement between Canada, the United States of America and the United Mexican States\n' +
'</description>\n' +
'<pubDate>Tue, 10 Mar 2020 00:00:00 EST</pubDate>\n' +
'</item>\n' +
'<item>\n' +
'<title>C-4, Debate at 3rd Reading in the House of Commons</title>\n' +
'<link>\n' +
'https://www.parl.ca/LegisInfo/BillDetails.aspx?Language=E&billId=10615191\n' +
'</link>\n' +
'<description>\n' +
'C-4, An Act to implement the Agreement between Canada, the United States of America and the United Mexican States\n' +
'</description>\n' +
'<pubDate>Tue, 10 Mar 2020 00:00:00 EST</pubDate>\n' +
'</item>' +
'</channel>' +
'</rss>'
})
beforeEach(() => {
underTest = new Parsers.LegislativeActivityXmlParser(xml)
})
test('LegislativeActivityXmlParser.js::generateNewParser creates a new Legislative activity Parser', () => {
const parser = underTest.generateNewParser(underTest.xml)
Assert.equal(parser.xml, underTest.getXmlInTag('channel'))
})
test('LegislativeActivityXmlParser.js::hasData returns true on has content, false otherwise', () => {
Assert(underTest.hasData())
Assert(underTest.parser.hasData())
Assert.isFalse(underTest.generateNewParser('<rss><channel>channel stuff</channel></rss>').hasData())
})
test('LegislativeActivityXmlParser.js::buildJson returns valid legislative activity', () => {
const activities = underTest.getAllFromXml()
Assert.equal(activities.length, 2)
Assert.equal(activities[0].yes, 0)
Assert.equal(activities[0].no, 0)
Assert.equal(activities[0].title, 'C-4, Debate at 3rd Reading in the House of Commons')
Assert.equal(activities[1].yes, 0)
Assert.equal(activities[1].no, 0)
Assert.equal(activities[1].title, 'C-4, Debate at 3rd Reading in the House of Commons')
})
})
|
package com.pharmacySystem.service.implementations;
import java.time.LocalDate;
import java.util.Date;
import java.util.HashSet;
import java.util.Set;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import com.pharmacySystem.DTOs.MedicineOrderDTO;
import com.pharmacySystem.DTOs.MedicineOrderDisplayDTO;
import com.pharmacySystem.DTOs.PharmacyMedicineCreateDTO;
import com.pharmacySystem.mappers.MedicineOrderMapper;
import com.pharmacySystem.model.medicine.Medicine;
import com.pharmacySystem.model.medicineOrder.MedicineOffer;
import com.pharmacySystem.model.medicineOrder.MedicineOrder;
import com.pharmacySystem.model.medicineOrder.MedicineOrderItem;
import com.pharmacySystem.model.medicineOrder.OrderStatus;
import com.pharmacySystem.model.pharmacy.Pharmacy;
import com.pharmacySystem.repository.MedicineOfferRepository;
import com.pharmacySystem.repository.MedicineOrderItemRepository;
import com.pharmacySystem.repository.MedicineOrderRepository;
import com.pharmacySystem.repository.MedicineRepository;
import com.pharmacySystem.repository.PharmacyMedicineRepository;
import com.pharmacySystem.repository.PharmacyRepository;
import com.pharmacySystem.service.interfaces.IMedicineOrderService;
import com.pharmacySystem.service.interfaces.IPharmacyMedicineService;
@Service
public class MedicineOrderService implements IMedicineOrderService {
@Autowired
private MedicineOrderRepository medicineOrderRepository;
@Autowired
private MedicineOrderItemRepository medicineOrderItemRepository;
@Autowired
private PharmacyRepository pharmacyRepository;
@Autowired
private MedicineRepository medicineRepository;
@Autowired
private PharmacyMedicineRepository pharmacyMedicineRepository;
@Autowired
private IPharmacyMedicineService pharmacyMedicineService;
@Autowired
private MedicineOfferRepository medicineOfferRepository;
@Override
public MedicineOrderDisplayDTO createMedicineOrder(MedicineOrderDTO medicineOrderDTO, long pharmacyId) {
Pharmacy pharmacy = pharmacyRepository.findById(pharmacyId).orElse(null);
if(pharmacy != null) {
MedicineOrder medicineOrder = new MedicineOrder();
medicineOrder.setDueDate(medicineOrderDTO.getDueDate());
medicineOrder.setMedicineOrderItems(createMedicineOrderItems(medicineOrderDTO, pharmacyId));
medicineOrder.setPharmacy(pharmacy);
medicineOrder.setStatus(OrderStatus.PENDING);
System.out.println("Pharmacy: "+pharmacy.getName());
medicineOrderRepository.save(medicineOrder);
return MedicineOrderMapper.medicineOrderDisplayDTOFromMedicineOrder(medicineOrder);
}
return null;
}
private Set<MedicineOrderItem> createMedicineOrderItems(MedicineOrderDTO medicineOrderDTO, long pharmacyId) {
Set<MedicineOrderItem> medicineOrderItems = new HashSet<MedicineOrderItem>();
Date startDate = new Date();
Date endDate = new Date();
System.out.println("");
for(int i = 0; i < medicineOrderDTO.getMedicineIds().size(); i++) {
MedicineOrderItem medicineOrderItem = new MedicineOrderItem();
Object[] medicineIds = medicineOrderDTO.getMedicineIds().toArray();
Medicine medicine = medicineRepository.findById((Long) medicineIds[i]).orElse(null);
Object[] amount = medicineOrderDTO.getAmount().toArray();
if(pharmacyMedicineRepository.findByPharmacyAndMedicineId(pharmacyId, medicine.getId()) == null) {
PharmacyMedicineCreateDTO pharmacyMedicineCreateDTO = new PharmacyMedicineCreateDTO((int) 0,2000,
startDate,endDate, medicine.getId());
pharmacyMedicineService.addMedicineToPharmacy(pharmacyMedicineCreateDTO, pharmacyId);
}
medicineOrderItem.setMedicine(medicine);
medicineOrderItem.setAmountToOrder((int) amount[i]);
medicineOrderItems.add(medicineOrderItem);
medicineOrderItemRepository.save(medicineOrderItem);
}
return medicineOrderItems;
}
@Override
public Set<MedicineOrderDisplayDTO> findAllByPharmacy(long pharmacyId) {
Set<MedicineOrder> medicineOrders = medicineOrderRepository.findAllByPharmacy(pharmacyId);
return MedicineOrderMapper.createMedicineOrderDisplayDTOsFromMedicineOrders(medicineOrders);
}
@Override
public MedicineOrderDisplayDTO updateMedicineOrder(long pharmacyId,
MedicineOrderDisplayDTO medicineOrderDisplayDTO) {
MedicineOrder medicineOrder = medicineOrderRepository.findById(medicineOrderDisplayDTO.getId()).orElse(null);
Set<MedicineOffer> medicineOffer = medicineOfferRepository.allMedicineOffersForMedicineOrder(medicineOrder.getPharmacy().getId(), medicineOrderDisplayDTO.getId());
if(medicineOffer.isEmpty()) {
if(medicineOrder != null) {
medicineOrder.setDueDate(LocalDate.parse(medicineOrderDisplayDTO.getDueDate()));
Set<MedicineOrderItem> medicineOrderItems = new HashSet<>();
for(int i = 0; i < medicineOrder.getMedicineOrderItems().size(); i++) {
Medicine medicine = medicineRepository.findById(medicineOrderDisplayDTO.getMedicineId().get(i)).orElse(null);
if(medicine == null) {
return null;
}else {
MedicineOrderItem medicineOrderItem = new MedicineOrderItem(medicineOrder.getId(), medicine, medicineOrderDisplayDTO.getAmount().get(i));
medicineOrderItems.add(medicineOrderItem);
}
}
medicineOrderItemRepository.saveAll(medicineOrderItems);
medicineOrder.setMedicineOrderItems(medicineOrderItems);
medicineOrderRepository.save(medicineOrder);
return MedicineOrderMapper.medicineOrderDisplayDTOFromMedicineOrder(medicineOrder);
}
}
return null;
}
@Override
public MedicineOrderDisplayDTO deleteMedicineOrder(long pharmacyId, long id) {
MedicineOrder medicineOrder = medicineOrderRepository.findById(id).orElse(null);
Set<MedicineOffer> medicineOffer = medicineOfferRepository.allMedicineOffersForMedicineOrder(pharmacyId, id);
if(medicineOffer.isEmpty()) {
if(medicineOrder != null) {
medicineOrder.setPharmacy(null);
medicineOrderRepository.save(medicineOrder);
return MedicineOrderMapper.medicineOrderDisplayDTOFromMedicineOrderDelete(medicineOrder.getId(), medicineOrder.getDueDate().toString(), medicineOrder.getStatus());
}
}
return null;
}
@Override
public MedicineOrderDisplayDTO findMedicineOrderById(long id) {
MedicineOrder medicineOrder = medicineOrderRepository.findById(id).orElse(null);
if(medicineOrder != null) {
return MedicineOrderMapper.medicineOrderDisplayDTOFromMedicineOrder(medicineOrder);
}
return null;
}
}
|
TERMUX_PKG_HOMEPAGE=https://github.com/fornwall/apksigner
TERMUX_PKG_DESCRIPTION="APK signing tool"
TERMUX_PKG_LICENSE="Apache-2.0"
TERMUX_PKG_VERSION=0.7
TERMUX_PKG_REVISION=1
TERMUX_PKG_SHA256=340560c4f75af3501f037452bcf184fa48fd18bc877a4cce9a51a3fa047b4b38
TERMUX_PKG_SRCURL=https://github.com/fornwall/apksigner/archive/v${TERMUX_PKG_VERSION}.tar.gz
TERMUX_PKG_BUILD_IN_SRC=yes
TERMUX_PKG_PLATFORM_INDEPENDENT=true
termux_step_make() {
mkdir -p $TERMUX_PREFIX/share/{dex,man/man1}
cp apksigner.1 $TERMUX_PREFIX/share/man/man1/
GRADLE_OPTS=" -Dorg.gradle.daemon=false" ./gradlew
$TERMUX_D8 \
--classpath $ANDROID_HOME/platforms/android-$TERMUX_PKG_API_LEVEL/android.jar \
--release \
--min-api $TERMUX_PKG_API_LEVEL \
--output $TERMUX_PKG_TMPDIR \
./build/libs/src-all.jar
cd $TERMUX_PKG_TMPDIR
jar cf apksigner.jar classes.dex
mv apksigner.jar $TERMUX_PREFIX/share/dex/apksigner.jar
echo '#!/bin/sh' > $TERMUX_PREFIX/bin/apksigner
echo "dalvikvm -cp $TERMUX_PREFIX/share/dex/apksigner.jar net.fornwall.apksigner.Main \$@" >> $TERMUX_PREFIX/bin/apksigner
chmod +x $TERMUX_PREFIX/bin/apksigner
}
|
#include <stdio.h>
#include <string.h>
#include <stdlib.h>
#include "../include/simulator.h"
/* ******************************************************************
ALTERNATING8BIT AND GO-BACK-N NETWORK EMULATOR: VERSION 1.1 J.F.Kurose
This code should be used for PA2, unidirectional data transfer
protocols (from A to B). Network properties:
- one way network delay averages five time units (longer if there
are other messages in the channel for GBN), but can be larger
- packets can be corrupted (either the header or the data portion)
or lost, according to user-defined probabilities
- packets will be delivered in the order in which they were sent
(although some can be lost).
**********************************************************************/
#define TIMEOUT 20.0
//the base of the window
int base;
//the size of the window
int N;
int nextseqnum;
int nextacknum;
//the sending packet array on the A(sender) side
struct pkt sndPkt[1000];
//the receving buffer array on the B(receiver) side
struct pkt recvBufPkt[1000];
//array to save the timeout times based on the sim_time()
float timeoutArray[1000];
/********* STUDENTS WRITE THE NEXT SEVEN ROUTINES *********/
int calculate_checksum(packet)
struct pkt packet;
{
int checksum = 0;
checksum += packet.seqnum;
checksum += packet.acknum;
int sizeOfArray = strlen(packet.payload);
for(int i = 0; i < 20; i++){
checksum += packet.payload[i];
}
return checksum;
}
int vaildiate_checksum(packet)
struct pkt packet;
{
int expectedChecksum = calculate_checksum(packet);
return (expectedChecksum == packet.checksum);
}
/* called from layer 5, passed the data to be sent to other side */
void A_output(message)
struct msg message;
{
// printf("run A_output\n");
struct pkt sendingpkt;
strncpy(sendingpkt.payload, message.data, 20);
sendingpkt.seqnum = nextseqnum;
sendingpkt.checksum = calculate_checksum(sendingpkt);
sndPkt[nextseqnum] = sendingpkt;
timeoutArray[nextseqnum] = get_sim_time() + 20.0;
// printf("timeout sim time for pkt:%d is :%f\n",
// nextseqnum, timeoutArray[nextseqnum]);
//
// printf("A sending: %s, seq: %d, base: %d\n",
// sndPkt[nextseqnum].payload, sndPkt[nextseqnum].seqnum, base);
if(nextseqnum < base + N){
tolayer3(0, sndPkt[nextseqnum]);
if (nextseqnum == base){
starttimer(0, TIMEOUT);
// printf("start timer for seq: %d\n", nextseqnum);
// printf("cur time:%d\n", get_sim_time());
}
}
nextseqnum += 1;
//printf("\n");
}
/* called from layer 3, when a packet arrives for layer 4 */
void A_input(packet)
struct pkt packet;
{
// printf("run A_input\n");
// printf("sim time:%f\n", get_sim_time());
//
// printf("A receving ack: %d, nextseqnum is: %d, base: %d\n",
// packet.acknum, nextseqnum, base);
if (packet.acknum > nextseqnum){
return;
}
timeoutArray[packet.acknum] = 0.0;
if (base == packet.acknum){
stoptimer(0);
// printf("stop timer for seq: %d\n", base);
base ++;
for (int i = base; i < nextseqnum; i++){
if (timeoutArray[i] > 0){
starttimer(0, timeoutArray[i] - get_sim_time());
break;
}else if(timeoutArray[i] == 0.0){
base = i + 1;
}
}
}
//printf("\n");
}
/* called when A's timer goes off */
void A_timerinterrupt()
{
// printf("run A_timerinterrupt\n");
// printf("sim time:%f\n", get_sim_time());
starttimer(0, TIMEOUT);
// printf("start timer for seq: %d\n", base);
tolayer3(0, sndPkt[base]);
// printf("A resending: %s, seq: %d\n",
// sndPkt[base].payload, sndPkt[base].seqnum);
//printf("\n");
}
/* the following routine will be called once (only) before any other */
/* entity A routines are called. You can use it to do any initialization */
void A_init()
{
// printf("run A_init\n");
base = 0;
N = getwinsize();
nextseqnum = 0;
// printf("sim time:%f\n", get_sim_time());
for (int i = 0; i < 1000; i++){
timeoutArray[i] = -1.0;
}
}
/* Note that with simplex transfer from a-to-B, there is no B_output() */
/* called from layer 3, when a packet arrives for layer 4 at B*/
void B_input(packet)
struct pkt packet;
{
struct pkt ackPkt;
// printf("run B_input\n");
// printf("B receving: %s, seqnum: %d, current acknum: %d\n",
// packet.payload, packet.seqnum, nextacknum);
//compare checksum
int isCheckSumVaild = vaildiate_checksum(packet);
if (isCheckSumVaild){
//receving packet
//check if deplicate pkt
/*if the packet seq is equal to nextacknum, check if there is buffer,
if have, send the buffer to upper layer, and send back the acknum to A,
if have no buffer, just send the packet to layer5
*/
if (packet.seqnum == nextacknum){
tolayer5(1, packet.payload);
// printf("sending :%d to layer5: %s\n", packet.seqnum, packet.payload);
//check if there is buffer
for (int i = packet.seqnum + 1; i < packet.seqnum + N && i < 1000; i++){
if (recvBufPkt[i].acknum < 0){
break;
}
if (recvBufPkt[i].acknum >= 0){
// printf("sending :%d to layer5: %s\n",i, recvBufPkt[i].payload);
tolayer5(1, recvBufPkt[i].payload);
nextacknum += 1;
}
}
//send ack packet
ackPkt.acknum = packet.seqnum;
// printf("B sending ack: %d\n", ackPkt.acknum);
tolayer3(1, ackPkt);
nextacknum += 1;
}else if (packet.seqnum > nextacknum){
// printf("B add buffer seq: %d\n", packet.seqnum);
recvBufPkt[packet.seqnum] = packet;
ackPkt.acknum = packet.seqnum;
// printf("B sending ack: %d\n", ackPkt.acknum);
tolayer3(1, ackPkt);
}else{
//duplicate packet
ackPkt.acknum = packet.seqnum;
// printf("B sending ack: %d\n", ackPkt.acknum);
tolayer3(1, ackPkt);
}
}
//printf("\n");
}
/* the following rouytine will be called once (only) before any other */
/* entity B routines are called. You can use it to do any initialization */
void B_init()
{
// printf("run B_init\n");
for (int i = 0; i < 1000; i++){
recvBufPkt[i].acknum = -1;
}
}
|
<reponame>WebFlight/sleeper<filename>lib/parser.js
var parser = {
/**
* Sum 2 int's
* @param {int} a int
* @param {int} b int
* @return {int} Sum of ints
*/
sum: function(a, b) {
var sum = a + b;
return sum;
}
};
module.exports = parser;
|
#!/bin/bash
set -e
if [[ "$ACCEPT_EULA" != "Y" ]]; then
echo "ERROR: You must accept the EULA at https://octopus.com/company/legal by passing an environment variable 'ACCEPT_EULA=Y'"
exit 1
fi
# Tentacle Docker images only support once instance per container. Running multiple instances can be achieved by running multiple containers.
instanceName=Tentacle
configurationDirectory=/etc/octopus
applicationsDirectory=/home/Octopus/Applications
alreadyConfiguredSemaphore="$configurationDirectory/.configuredSemaphore"
internalListeningPort=10933
mkdir -p $configurationDirectory
mkdir -p $applicationsDirectory
if [ ! -f /usr/bin/tentacle ]; then
ln -s /opt/octopus/tentacle/Tentacle /usr/bin/tentacle
fi
if [ -f "$alreadyConfiguredSemaphore" ]; then
echo "Octopus Tentacle is already configured. Skipping reconfiguration."
echo "If you want to force reconfiguration, please delete the file $alreadyConfiguredSemaphore and re-launch the container."
exit 0
fi
function getPublicHostName() {
if [[ "$PublicHostNameConfiguration" == "PublicIp" ]]; then
curl https://api.ipify.org/
elif [[ "$PublicHostNameConfiguration" == "FQDN" ]]; then
hostname --fqdn
elif [[ "$PublicHostNameConfiguration" == "ComputerName" ]]; then
hostname
else
echo $CustomPublicHostName
fi
}
function validateVariables() {
if [[ -z "$ServerApiKey" ]]; then
if [[ -z "$ServerPassword" || -z "$ServerUsername" ]]; then
echo "Please specify either an API key or a username/password with the 'ServerApiKey' or 'ServerUsername'/'ServerPassword' environment variables" >&2
exit 1
fi
fi
if [[ -z "$ServerUrl" ]]; then
echo "Please specify an Octopus Server with the 'ServerUrl' environment variable" >&2
exit 1
fi
if [[ ! -z "$TargetWorkerPool" ]]; then
if [[ ! -z "$TargetEnvironment" ]]; then
echo "The 'TargetEnvironment' environment variable is not valid in combination with the 'TargetWorkerPool' variable" >&2
exit 1
fi
if [[ ! -z "$TargetRole" ]]; then
echo "The 'TargetRole' environment variable is not valid in combination with the 'TargetWorkerPool' variable" >&2
exit 1
fi
else
if [[ -z "$TargetEnvironment" ]]; then
echo "Please specify an environment name with the 'TargetEnvironment' environment variable" >&2
exit 1
fi
if [[ -z "$TargetRole" ]]; then
echo "Please specify a role name with the 'TargetRole' environment variable" >&2
exit 1
fi
fi
echo " - server endpoint '$ServerUrl'"
echo " - api key '##########'"
if [[ ! -z "$ServerPort" ]]; then
echo " - communication mode 'Polling' (Active)"
echo " - server port $ServerPort"
else
echo " - communication mode 'Listening' (Passive)"
echo " - registered port $ListeningPort"
fi
if [[ ! -z "$TargetWorkerPool" ]]; then
echo " - worker pool '$TargetWorkerPool'"
else
echo " - environment '$TargetEnvironment'"
echo " - role '$TargetRole'"
fi
echo " - host '$PublicHostNameConfiguration'"
if [[ ! -z "$TargetName" ]]; then
echo " - name '$TargetName'"
fi
if [[ ! -z "$TargetTenant" ]]; then
echo " - tenant '$TargetTenant'"
fi
if [[ ! -z "$TargetTenantTag" ]]; then
echo " - tenant tag '$TargetTenantTag'"
fi
if [[ ! -z "$TargetTenantedDeploymentParticipation" ]]; then
echo " - tenanted deployment participation '$TargetTenantedDeploymentParticipation'"
fi
if [[ ! -z "$Space" ]]; then
echo " - space '$Space'"
fi
}
function configureTentacle() {
tentacle create-instance --instance "$instanceName" --config "$configurationDirectory/tentacle.config"
echo "Setting directory paths ..."
tentacle configure --instance "$instanceName" --app "$applicationsDirectory"
echo "Configuring communication type ..."
if [[ ! -z "$ServerPort" ]]; then
tentacle configure --instance "$instanceName" --noListen "True"
else
tentacle configure --instance "$instanceName" --port $internalListeningPort --noListen "False"
fi
echo "Updating trust ..."
tentacle configure --instance "$instanceName" --reset-trust
echo "Creating certificate ..."
tentacle new-certificate --instance "$instanceName" --if-blank
}
function registerTentacle() {
echo "Registering with server ..."
local ARGS=()
if [[ ! -z "$TargetWorkerPool" ]]; then
ARGS+=('register-worker')
IFS=',' read -ra WORKER_POOLS <<< "$TargetWorkerPool"
for i in "${WORKER_POOLS[@]}"; do
ARGS+=('--workerpool' "$i")
done
else
ARGS+=('register-with')
if [[ ! -z "$TargetEnvironment" ]]; then
IFS=',' read -ra ENVIRONMENTS <<< "$TargetEnvironment"
for i in "${ENVIRONMENTS[@]}"; do
ARGS+=('--environment' "$i")
done
fi
if [[ ! -z "$TargetRole" ]]; then
IFS=',' read -ra ROLES <<< "$TargetRole"
for i in "${ROLES[@]}"; do
ARGS+=('--role' "$i")
done
fi
if [[ ! -z "$TargetTenant" ]]; then
IFS=',' read -ra TENANTS <<< "$TargetTenant"
for i in "${TENANTS[@]}"; do
ARGS+=('--tenant' "$i")
done
fi
if [[ ! -z "$TargetTenantTag" ]]; then
IFS=',' read -ra TENANTTAGS <<< "$TargetTenantTag"
for i in "${TENANTTAGS[@]}"; do
ARGS+=('--tenanttag' "$i")
done
fi
fi
ARGS+=(
'--instance' "$instanceName"
'--server' "$ServerUrl"
'--space' "$Space"
'--policy' "$MachinePolicy"
'--force')
if [[ ! -z "$ServerPort" ]]; then
ARGS+=(
'--comms-style' 'TentacleActive'
'--server-comms-port' $ServerPort)
else
ARGS+=(
'--comms-style' 'TentaclePassive'
'--publicHostName' $(getPublicHostName))
if [[ ! -z "$ListeningPort" && "$ListeningPort" != "$internalListeningPort" ]]; then
ARGS+=('--tentacle-comms-port' $ListeningPort)
fi
fi
if [[ ! -z "$ServerApiKey" ]]; then
echo "Registering Tentacle with API key"
ARGS+=('--apiKey' $ServerApiKey)
else
echo "Registering Tentacle with username/password"
ARGS+=(
'--username' "$ServerUsername"
'--password' "$ServerPassword")
fi
if [[ ! -z "$TargetName" ]]; then
ARGS+=('--name' "$TargetName")
fi
if [[ ! -z "$TargetTenantedDeploymentParticipation" ]]; then
ARGS+=('--tenanted-deployment-participation' "$TargetTenantedDeploymentParticipation")
fi
tentacle "${ARGS[@]}"
}
echo "==============================================="
echo "Configuring Octopus Deploy Tentacle"
validateVariables
echo "==============================================="
configureTentacle
registerTentacle
touch $alreadyConfiguredSemaphore
echo "Configuration successful."
echo ""
|
#!/bin/bash -eux
if [[ $PACKER_BUILDER_TYPE =~ qemu ]]; then
echo "==> Installing QEMU guest additions"
# Assume that we've installed all the prerequisites:
# kernel-headers-$(uname -r) kernel-devel-$(uname -r) gcc make perl
# from the install media via ks.cfg
# Except on Fedora 22 (which uses dnf)
if [ "${PKG_MGR}" == "dnf" ]; then
#${PKG_MGR} -y install kernel-headers-$(uname -r) kernel-devel-$(uname -r) gcc make perl
${PKG_MGR} -y install qemu-guest-agent spice-vdagent
elif [ "${PKG_MGR}" == "apt-get" ]; then
#${PKG_MGR} -y install kernel-headers-$(uname -r) kernel-devel-$(uname -r) gcc make perl
${PKG_MGR} -y install qemu-guest-agent spice-vdagent
fi
#echo "==> Removing packages needed for building guest tools"
#${PKG_MGR} -y remove gcc cpp kernel-devel kernel-headers perl
fi
|
<reponame>bgromov/pymetawear<gh_stars>10-100
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
:mod:'accelerometer_logging.py'
==================
Updated by dmatthes1982 <<EMAIL>>
Created by hbldh <<EMAIL>>
Created on 2018-04-20
"""
from __future__ import division
from __future__ import print_function
from __future__ import absolute_import
import time
from pymetawear.discover import select_device
from pymetawear.client import MetaWearClient
from pymetawear.exceptions import PyMetaWearException, PyMetaWearDownloadTimeout
address = select_device()
client = MetaWearClient(str(address), debug=False)
print("New client created: {0}".format(client))
settings = client.accelerometer.get_possible_settings()
print("Possible accelerometer settings of client:")
for k, v in settings.items():
print(k, v)
print("Write accelerometer settings...")
client.accelerometer.set_settings(data_rate=400, data_range=4.0)
settings = client.accelerometer.get_current_settings()
print("Accelerometer settings of client: {0}".format(settings))
client.accelerometer.high_frequency_stream = False
client.accelerometer.start_logging()
print("Logging accelerometer data...")
time.sleep(10.0)
client.accelerometer.stop_logging()
print("Logging stopped.")
print("Downloading data...")
download_done = False
n = 0
data = None
while (not download_done) and n < 3:
try:
data = client.accelerometer.download_log()
download_done = True
except PyMetaWearDownloadTimeout:
print("Download of log interrupted. Trying to reconnect...")
client.disconnect()
client.connect()
n += 1
if data is None:
raise PyMetaWearException("Download of logging data failed.")
for d in data:
print(d)
print("Disconnecting...")
client.disconnect()
|
<reponame>xoebus/cryptdo
// Package flag contains flag unmarshalers for the github.com/jessevdk/go-flags
// parsing library.
package flag
import (
"errors"
"strings"
)
// Ext represents a file extension being passed in by a user on the
// command-line.
type Ext string
// ErrEmptyExt is returned when the user provides an empty extension.
var ErrEmptyExt = errors.New("extension may not be empty")
// UnmarshalFlag parses the raw string from the command line into a usable
// object. It normalizes the extension to contain a leading dot. An error will
// be returned if an empty extension is passed.
func (e *Ext) UnmarshalFlag(value string) error {
if value == "" {
return ErrEmptyExt
}
*e = Ext(value)
if !strings.HasPrefix(value, ".") {
*e = "." + *e
}
return nil
}
|
__version__ = "2.1.2"
|
<filename>src/helper/MyDatatableImporter.js<gh_stars>0
import React, {Component} from "react";
import {FileUpload} from "primereact/fileupload";
const csv = require('csv');
export default class MyDatatableImporter extends Component {
static FILETYPE_JSON = "json";
static FILETYPE_CSV = "csv";
constructor(props) {
super(props);
}
handleParsedResources(resources){
if(this.props.onResourceParse){
this.props.onResourceParse(resources);
this.fileupload.clear();
}
}
myUploaderJSON(fileText){
console.log(fileText);
let resources = JSON.parse(fileText);
console.log(resources);
this.handleParsedResources(resources);
}
myUploaderCSV(fileText){
//TODO ask user which delimiter he has ?
let instance = this;
csv.parse(fileText,{delimiter: this.props.delimiter},function(err,output){
//console.log(output);
if(err){
console.log(err);
} else {
if(output){
let attributeFields = output[0];
let resources = [];
for(let i=1; i<output.length; i++){
let row = output[i];
let resource = {};
for(let j=0; j<attributeFields.length; j++){
let field = attributeFields[j];
let value = row[j];
resource[field] = value;
}
resources.push(resource);
}
instance.handleParsedResources(resources);
} else {
console.log("Unkown CSV Import error");
}
}
})
}
myUploader(event) {
console.log("Uploaded: "+event.files.length);
let amount = event.files.length;
if(amount===1){
let file = event.files[0];
console.log(file);
console.log(Object.keys(file));
console.log(file.name);
let fileName = file.name;
let extension = fileName.split('.').pop();
let uploader = null;
if(extension===MyDatatableImporter.FILETYPE_CSV){
uploader=this.myUploaderCSV.bind(this);
}
if(extension===MyDatatableImporter.FILETYPE_JSON){
uploader=this.myUploaderJSON.bind(this);
}
let reader = new FileReader();
reader.onload = function(event) {
if(!!uploader){
let result = event.target.result;
uploader(result);
}
}
reader.readAsText(file)
}
}
renderImportPanel(){
if(this.props.fileExtension){
return <FileUpload ref={(el) => this.fileupload = el} multiple={false} name="demo[]" url="./upload" customUpload uploadHandler={this.myUploader.bind(this)} accept={"."+this.props.fileExtension} />
} else {
return null;
}
}
render(){
return (
<div>
<div className="content-section introduction">
<div className="feature-intro">
<h1>Import Menu</h1>
<p>Please select your {this.props.fileExtension} file you want to Import</p>
</div>
</div>
<div className="content-section implementation">
{this.renderImportPanel()}
</div>
</div>
);
}
}
|
#!/bin/sh
env | grep MOJO | sed 's/^\(.*\)$/export \1/g' > /home/opencloset/.env.sh
env | grep PATH | sed 's/^\(.*\)$/export \1/g' >> /home/opencloset/.env.sh
env | grep OPENCLOSET | sed 's/^\(.*\)$/export \1/g' >> /home/opencloset/.env.sh
chmod o+x /home/opencloset/.env.sh
# Run the command on container startup
cron -f
|
#!/bin/bash
JULIABIN="julia"
while [[ $# -gt 0 ]]
do
key="$1"
case $key in
-j|--julia)
JULIABIN="$2"
shift
;;
*)
echo "unknown option: $key" >&2
exit
;;
esac
shift
done
$JULIABIN --startup-file=no --history-file=no -e \
"using LanguageServer; import SymbolServer; server = LanguageServer.LanguageServerInstance(stdin, stdout); server.runlinter = true; run(server);" \
<&0 >&1 &
PID=$!
while true; do
# quit if server exited
kill -0 "$PID" || break
# if the current process is orphan, kill it and its children
if [ $(ps -o ppid= -p "$$") -eq 1 ]; then
kill -9 "$PID" $(pgrep -P "$PID")
exit
fi
sleep 1
done
|
import os
import subprocess
# Step 1: Set the environment variable BUILD_GLPK to 1
os.environ['BUILD_GLPK'] = '1'
# Step 2: Run the Python setup script using the ${PYTHON} command and install the package
python_command = 'python' # Replace with the actual path to the Python executable if needed
setup_script = 'setup.py'
try:
subprocess.check_call([python_command, setup_script, 'install'])
print("Installation successful")
except subprocess.CalledProcessError as e:
print(f"Installation failed with error code {e.returncode}") |
#!/usr/bin/env bash
set -ex
source .bluemix/pipeline-COMMON.sh
source .bluemix/pipeline-CLOUDANT.sh
source .bluemix/pipeline-BLOCKCHAIN.sh
export CONTRACTS=$(ls contracts)
export APPS=$(ls apps)
if ls contracts/*/package.json > /dev/null 2>&1
then
export HAS_COMPOSER_CONTRACTS=true
fi
export REST_SERVER_URLS={}
function deploy_contracts {
for CONTRACT in ${CONTRACTS}
do
deploy_contract ${CONTRACT}
done
}
function deploy_contract {
CONTRACT=$1
if [ -f contracts/${CONTRACT}/package.json ]
then
deploy_composer_contract ${CONTRACT}
elif ls contracts/${CONTRACT}/*.go > /dev/null 2>&1
then
deploy_fabric_contract ${CONTRACT}
else
echo unrecognized contract type ${CONTRACT}
exit 1
fi
}
function deploy_composer_contract {
CONTRACT=$1
echo deploying composer contract ${CONTRACT}
pushd contracts/${CONTRACT}
BUSINESS_NETWORK_NAME=$(jq --raw-output '.name' package.json)
BUSINESS_NETWORK_VERSION=$(jq --raw-output '.version' package.json)
BUSINESS_NETWORK_ARCHIVES=$(ls dist/*.bna)
BUSINESS_NETWORK_CARD=admin@${BUSINESS_NETWORK_NAME}
for BUSINESS_NETWORK_ARCHIVE in ${BUSINESS_NETWORK_ARCHIVES}
do
if ! OUTPUT=$(composer network install -c ${BLOCKCHAIN_NETWORK_CARD} -a ${BUSINESS_NETWORK_ARCHIVES} 2>&1)
then
if [[ "${OUTPUT}" != *"already installed"* ]]
then
echo failed to install composer contract ${CONTRACT}
exit 1
fi
fi
while ! OUTPUT=$(composer network start -c ${BLOCKCHAIN_NETWORK_CARD} -n ${BUSINESS_NETWORK_NAME} -V ${BUSINESS_NETWORK_VERSION} -A ${BLOCKCHAIN_NETWORK_ENROLL_ID} -S ${BLOCKCHAIN_NETWORK_ENROLL_SECRET} -f adminCard.card 2>&1)
do
if [[ "${OUTPUT}" = *"REQUEST_TIMEOUT"* ]]
then
sleep 30
elif [[ "${OUTPUT}" = *"premature execution"* ]]
then
sleep 30
elif [[ "${OUTPUT}" = *"chaincode exists"* ]]
then
BUSINESS_NETWORK_UPGRADE=true
break
else
echo failed to start composer contract ${CONTRACT}
exit 1
fi
done
if [[ "${BUSINESS_NETWORK_UPGRADE}" = "true" ]]
then
while ! OUTPUT=$(composer network upgrade -c ${BLOCKCHAIN_NETWORK_CARD} -n ${BUSINESS_NETWORK_NAME} -V ${BUSINESS_NETWORK_VERSION} 2>&1)
do
if [[ "${OUTPUT}" = *"REQUEST_TIMEOUT"* ]]
then
sleep 30
elif [[ "${OUTPUT}" = *"premature execution"* ]]
then
sleep 30
elif [[ "${OUTPUT}" = *"version already exists for chaincode"* ]]
then
break
else
echo failed to upgrade composer contract ${CONTRACT}
exit 1
fi
done
else
if composer card list -c ${BUSINESS_NETWORK_CARD} > /dev/null 2>&1
then
composer card delete -c ${BUSINESS_NETWORK_CARD}
fi
composer card import -f adminCard.card -c ${BUSINESS_NETWORK_CARD}
fi
composer network ping -c ${BUSINESS_NETWORK_CARD}
done
popd
}
function deploy_fabric_contract {
CONTRACT=$1
echo deploying fabric contract ${CONTRACT}
pushd contracts/${CONTRACT}
source version.env
CHAINCODE_FILES=$(find . -name "*.go")
CHAINCODE_FILE_OPTS=""
CHANNEL=defaultchannel
for CHAINCODE_FILE in ${CHAINCODE_FILES}
do
CHAINCODE_FILE_OPTS="${CHAINCODE_FILE_OPTS} -F files[]=@${CHAINCODE_FILE}"
done
if ! OUTPUT=$(do_curl -X POST -u ${BLOCKCHAIN_KEY}:${BLOCKCHAIN_SECRET} ${CHAINCODE_FILE_OPTS} -F chaincode_id=${CHAINCODE_ID} -F chaincode_version=${CHAINCODE_VERSION} ${BLOCKCHAIN_URL}/api/v1/networks/${BLOCKCHAIN_NETWORK_ID}/chaincode/install)
then
if [[ "${OUTPUT}" != *"chaincode code"*"exists"* ]]
then
echo failed to install fabric contract ${CONTRACT}
exit 1
fi
fi
cat << EOF > request.json
{
"chaincode_id": "${CHAINCODE_ID}",
"chaincode_version": "${CHAINCODE_VERSION}",
"chaincode_arguments": "[\"12345\"]"
}
EOF
while ! OUTPUT=$(do_curl -X POST -H 'Content-Type: application/json' -u ${BLOCKCHAIN_KEY}:${BLOCKCHAIN_SECRET} --data-binary @request.json ${BLOCKCHAIN_URL}/api/v1/networks/${BLOCKCHAIN_NETWORK_ID}/channels/${CHANNEL}/chaincode/instantiate)
do
if [[ "${OUTPUT}" = *"Failed to establish a backside connection"* ]]
then
sleep 30
elif [[ "${OUTPUT}" = *"premature execution"* ]]
then
sleep 30
elif [[ "${OUTPUT}" = *"version already exists for chaincode"* ]]
then
break
else
echo failed to start fabric contract ${CONTRACT}
exit 1
fi
done
rm -f request.json
popd
}
function deploy_rest_servers {
for CONTRACT in ${CONTRACTS}
do
deploy_rest_server ${CONTRACT}
done
}
function deploy_rest_server {
CONTRACT=$1
if [ -f contracts/${CONTRACT}/package.json ]
then
deploy_composer_rest_server ${CONTRACT}
else
echo rest server not supported for contract type ${CONTRACT}
fi
}
function deploy_composer_rest_server {
CONTRACT=$1
echo deploying rest server for composer contract ${CONTRACT}
pushd contracts/${CONTRACT}
BUSINESS_NETWORK_NAME=$(jq --raw-output '.name' package.json)
BUSINESS_NETWORK_CARD=admin@${BUSINESS_NETWORK_NAME}
CF_APP_NAME=composer-rest-server-${BUSINESS_NETWORK_NAME}
cf push \
${CF_APP_NAME} \
--docker-image debashish2016/composer-rest-server-extended \
-i 1 \
-m 256M \
--no-start \
--no-manifest
cf set-env ${CF_APP_NAME} NODE_CONFIG "${NODE_CONFIG}"
cf set-env ${CF_APP_NAME} COMPOSER_CARD ${BUSINESS_NETWORK_CARD}
cf set-env ${CF_APP_NAME} COMPOSER_NAMESPACES required
cf set-env ${CF_APP_NAME} COMPOSER_WEBSOCKETS true
popd
}
function deploy_apps {
for APP in ${APPS}
do
deploy_app ${APP}
done
}
function deploy_app {
APP=$1
if [ -f apps/${APP}/manifest.yml ]
then
deploy_cf_app ${APP}
elif [ -f apps/${APP}/Dockerfile ]
then
deploy_docker_app ${APP}
else
echo unrecognized app type ${APP}
exit 1
fi
}
function deploy_cf_app {
APP=$1
echo deploying cloud foundry app ${APP}
pushd apps/${APP}
cf push ${APP} -i 1 -m 128M --no-start
cf bind-service ${APP} ${BLOCKCHAIN_SERVICE_INSTANCE} -c '{"permissions":"read-only"}'
popd
}
function deploy_docker_app {
APP=$1
echo deploying docker app ${APP}
pushd apps/${APP}
echo cannot deploy docker apps just yet
popd
}
function gather_rest_server_urls {
for CONTRACT in ${CONTRACTS}
do
gather_rest_server_url ${CONTRACT}
done
}
function gather_rest_server_url {
CONTRACT=$1
if [ -f contracts/${CONTRACT}/package.json ]
then
gather_composer_rest_server_url ${CONTRACT}
else
echo rest server not supported for contract type ${CONTRACT}
fi
}
function gather_composer_rest_server_url {
CONTRACT=$1
echo gathering rest server url for composer contract ${CONTRACT}
pushd contracts/${CONTRACT}
BUSINESS_NETWORK_NAME=$(jq --raw-output '.name' package.json)
CF_APP_NAME=composer-rest-server-${BUSINESS_NETWORK_NAME}
REST_SERVER_URL=$(cf app ${CF_APP_NAME} | grep routes: | awk '{print $2}')
export REST_SERVER_URLS=$(echo ${REST_SERVER_URLS} | jq ". + {\"${BUSINESS_NETWORK_NAME}\":\"https://${REST_SERVER_URL}\"}")
popd
}
function gather_app_urls {
for APP in ${APPS}
do
gather_app_url ${APP}
done
}
function gather_app_url {
APP=$1
if [ -f apps/${APP}/manifest.yml ]
then
gather_cf_app_url ${APP}
elif [ -f apps/${APP}/Dockerfile ]
then
gather_docker_app_url ${APP}
else
echo unrecognized app type ${APP}
exit 1
fi
}
function gather_cf_app_url {
APP=$1
echo gathering url for cloud foundry app ${APP}
pushd apps/${APP}
if [[ "${APP}" = "${BLOCKCHAIN_SAMPLE_APP}" ]]
then
export BLOCKCHAIN_SAMPLE_URL=$(cf app ${APP} | grep routes: | awk '{print $2}')
fi
popd
}
function gather_docker_app_url {
APP=$1
echo gathering url for docker app ${APP}
pushd apps/${APP}
echo cannot gather urls for docker apps just yet
popd
}
function start_rest_servers {
for CONTRACT in ${CONTRACTS}
do
start_rest_server ${CONTRACT}
done
}
function start_rest_server {
CONTRACT=$1
if [ -f contracts/${CONTRACT}/package.json ]
then
start_composer_rest_server ${CONTRACT}
else
echo rest server not supported for contract type ${CONTRACT}
fi
}
function start_composer_rest_server {
CONTRACT=$1
echo starting rest server for composer contract ${CONTRACT}
pushd contracts/${CONTRACT}
BUSINESS_NETWORK_NAME=$(jq --raw-output '.name' package.json)
CF_APP_NAME=composer-rest-server-${BUSINESS_NETWORK_NAME}
cf start ${CF_APP_NAME}
popd
}
function start_apps {
for APP in ${APPS}
do
start_app ${APP}
done
}
function start_app {
APP=$1
if [ -f apps/${APP}/manifest.yml ]
then
start_cf_app ${APP}
elif [ -f apps/${APP}/Dockerfile ]
then
start_docker_app ${APP}
else
echo unrecognized app type ${APP}
exit 1
fi
}
function start_cf_app {
APP=$1
echo starting cloud foundry app ${APP}
pushd apps/${APP}
cf set-env ${APP} REST_SERVER_URLS "${REST_SERVER_URLS}"
cf start ${APP}
popd
}
function start_docker_app {
APP=$1
echo starting docker app ${APP}
pushd apps/${APP}
echo cannot start docker apps just yet
popd
}
install_nodejs
if [[ "${HAS_COMPOSER_CONTRACTS}" = "true" ]]
then
install_composer
provision_cloudant
create_cloudant_database
configure_composer_wallet
fi
provision_blockchain
if [[ "${HAS_COMPOSER_CONTRACTS}" = "true" ]]
then
create_blockchain_network_card
fi
update_blockchain_deploy_status 1
deploy_contracts &
DEPLOY_CONTRACTS_PID=$!
deploy_rest_servers &
DEPLOY_REST_SERVERS_PID=$!
deploy_apps &
DEPLOY_APPS_PID=$!
wait ${DEPLOY_CONTRACTS_PID}
update_blockchain_deploy_status 2
wait ${DEPLOY_REST_SERVERS_PID}
update_blockchain_deploy_status 3
wait ${DEPLOY_APPS_PID}
update_blockchain_deploy_status 4
gather_rest_server_urls
update_blockchain_deploy_status 5
gather_app_urls
update_blockchain_deploy_status 6
start_rest_servers &
START_REST_SERVERS_PID=$!
start_apps &
START_APPS_PID=$!
wait ${START_REST_SERVERS_PID}
update_blockchain_deploy_status 7
wait ${START_APPS_PID}
update_blockchain_deploy_status 8
|
-- La fonction Count() permet de compter le nomùbre de lignes en correspondance avec nos critères
SELECT Emp.* FROM EMPLOYEE Emp
-- Compter le nombre de ligne dans notre table
SELECT Count(Emp.EMP_ID) AS Nb_Employes
FROM EMPLOYEE Emp
-- Affichage de la table ACCOUNT
SELECT * FROM ACCOUNT
-- Requête permettant de compter le nombre de client ayant au moins un compte dans l'agence
SELECT Count( DISTINCT Acc.CUST_ID) AS ClientDistinct
FROM ACCOUNT Acc
-- Lister les clients et leur compte (Par ID client)
SELECT Acc.CUST_ID,
Count(Acc.ACCOUNT_ID) AS NbComptes
FROM ACCOUNT Acc
GROUP BY Acc.CUST_ID |
package main
import (
"helloworld"
"routes"
"sayhi"
)
func main() {
sayhi.Sayhi()
helloworld.Print_Hello_World()
routes.Serve("9080")
}
|
#!/usr/bin/bash
# install.sh - prepare the dependencies for the run.sh
#
# It only handles installing from scratch and will probably fail on a subsequent run.
# It overuses the &&, &, and backslash line continuation so it could be easily converted
# into a Dockerfile, just by adding `RUN` directives (and `COPY requirements.txt .`).
set -euo pipefail
cd "$(dirname $0)"
curl -sL https://github.com/terraform-docs/terraform-docs/releases/download/v0.15.0/terraform-docs-v0.15.0-linux-amd64.tar.gz > terraform-docs.tar.gz & \
curl -sL https://github.com/tfsec/tfsec/releases/download/v0.34.0/tfsec-linux-amd64 > tfsec & \
curl -sL https://github.com/terraform-linters/tflint/releases/download/v0.29.0/tflint_linux_amd64.zip > tflint.zip & \
wait
echo Finished successfully all parallel downloads ------------------------------------------------------------------
tar zxf terraform-docs.tar.gz
rm terraform-docs.tar.gz
mv terraform-docs /usr/local/bin/
chmod +x tfsec
mv tfsec /usr/local/bin/
unzip tflint.zip
rm tflint.zip
mv tflint /usr/local/bin/
git --version
terraform-docs --version
tfsec --version
tflint --version
terraform version
echo "Also, the newest release: $(curl -s https://api.github.com/repos/terraform-docs/terraform-docs/releases/latest | grep -o -E "https://.+?-linux-amd64")"
echo "Also, the newest release: $(curl -s https://api.github.com/repos/tfsec/tfsec/releases/latest | grep -o -E "https://.+?tfsec-linux-amd64")"
echo "Also, the newest release: $(curl -s https://api.github.com/repos/terraform-linters/tflint/releases/latest | grep -o -E "https://.+?_linux_amd64.zip")"
python3 -m pip install -r requirements.txt
|
document.addEventListener("DOMContentLoaded", function(event) {
const showNavbar = (toggleId, navId, bodyId, headerId) =>{
const toggle = document.getElementById(toggleId),
nav = document.getElementById(navId),
bodypd = document.getElementById(bodyId),
headerpd = document.getElementById(headerId)
// Validate that all variables exist
if(toggle && nav && bodypd && headerpd){
toggle.addEventListener('click', ()=>{
// show navbar
nav.classList.toggle('show')
// change icon
toggle.classList.toggle('bx-x')
// add padding to body
bodypd.classList.toggle('body-pd')
// add padding to header
headerpd.classList.toggle('body-pd')
})
}
}
showNavbar('header-toggle','nav-bar','body-pd','header')
/*===== LINK ACTIVE =====*/
const linkColor = document.querySelectorAll('.nav_link')
function colorLink(){
if(linkColor){
linkColor.forEach(l=> l.classList.remove('active'))
this.classList.add('active')
}
}
linkColor.forEach(l=> l.addEventListener('click', colorLink))
// Your code to run since DOM is loaded and ready
});
document.addEventListener("DOMContentLoaded", function(){
window.addEventListener('scroll', function() {
if (window.scrollY > 50) {
document.getElementById('navbar_top').classList.add('fixed-top');
// add padding top to show content behind navbar
navbar_height = document.querySelector('.navbar').offsetHeight;
document.body.style.paddingTop = navbar_height + 'px';
} else {
document.getElementById('navbar_top').classList.remove('fixed-top');
// remove padding top from body
document.body.style.paddingTop = '0';
}
});
});
// DOMContentLoaded end |
var _quantize_8cpp =
[
[ "BOOST_FIXTURE_TEST_CASE", "_quantize_8cpp.xhtml#a8afe4d873af2ca2fa76942b05bc3e399", null ],
[ "BOOST_FIXTURE_TEST_CASE", "_quantize_8cpp.xhtml#a65910a5d3bb4d526dd587e72a2c7e916", null ],
[ "BOOST_FIXTURE_TEST_CASE", "_quantize_8cpp.xhtml#a8cd3a73f0826951c602e46ed49adf3e0", null ]
]; |
<reponame>TaiXuan91/md2html
// Usage
// command [source file or directories] [-o output directory]
// Setting
// Path separator
var pathSeparator='\\'
// Read command line.
const argsObj = require('command-line-parser')();
console.log('receive command line imformations:')
console.log(argsObj)
// Load renders.
// Load syntax highlight.
var hljs = require('highlight.js'); // https://highlightjs.org/
// Load markdown-it with highlight.js.
var mdpre = require('markdown-it')({
highlight: function (str, lang) {
if (lang && hljs.getLanguage(lang)) {
try {
return hljs.highlight(lang, str).value;
} catch (__) {}
}
return ''; // use external default escaping
}
});
// Load math render.
let kt = require('katex'),
tm = require('markdown-it-texmath').use(kt),
md = mdpre.use(tm);
// The style from VSCode and Markdown+Math plugin.
// I copied them from source code of Markdown+Math.
// I don't understand CSS and style of html.
const clipTmpl = (html,usrcss) => `<!doctype html><html><head><meta charset='utf-8'>
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/github-markdown-css/2.4.1/github-markdown.min.css">
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/9.11.0/styles/default.min.css">
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/KaTeX/0.8.3/katex.min.css">
<link rel="stylesheet" href="https://gitcdn.xyz/repo/goessner/mdmath/master/css/texmath.css">
<link rel="stylesheet" href="https://gitcdn.xyz/repo/goessner/mdmath/master/css/vscode-texmath.css">
${usrcss ? `<link rel="stylesheet" href="${usrcss}">` : ''}
</head><body class="markdown-body">
${html}
</body></html>`.replace('vscode-resource:','');
// Load file system.
var fs=require('fs');
//var ph = require('path');
// Judge whether it is a dir.
function exists(filePath){
return fs.existsSync(filePath);
}
function isDir(path){
return exists(path)&&fs.statSync(path).isDirectory();
}
// Check if it is a md file.
function isMarkdownFile(name){
const regex1=/.md$/;
const regex2=/.markdown$/;
if(regex1.exec(name)||regex2.exec(name)){
return true;
}
else{
return false;
}
}
// Filter out markdown files.
function filterOutMarkdownFile(nameList){
var tempList=new Array();
for(var index in nameList){
if(isMarkdownFile(nameList[index])){
tempList.push(nameList[index]);
}
}
return tempList;
}
// Get names of markdown files.
var totalNameList=new Array();
var aList=argsObj._args;
for (var nameIndex in aList){
if(!exists(aList[nameIndex])){
continue;
}
if(isDir(aList[nameIndex])){
var tempArray=fs.readdirSync(aList[nameIndex])
var tempArray2=new Array();
for(var relativePath in tempArray){
tempArray2.push(aList[nameIndex]+pathSeparator+tempArray[relativePath]);
}
totalNameList=totalNameList.concat(tempArray2)
}
else{
totalNameList.push(aList[nameIndex]);
}
}
console.log('Detected files:')
console.log(totalNameList)
console.log('Markdown files:')
console.log(filterOutMarkdownFile(totalNameList));
var markdownFiles=filterOutMarkdownFile(totalNameList);
// Detect output dir.
var outputPath=''
if(argsObj.o){
outputPath=argsObj.o;
}else{
outputPath='output'
}
if(!exists(outputPath)){
fs.mkdirSync(outputPath)
}
outputPath=outputPath+pathSeparator
for(var index in markdownFiles){
var tempFile=fs.readFileSync(markdownFiles[index])
var result=clipTmpl(md.render(tempFile.toString()))
var fileNewName=markdownFiles[index].replace(/.md$/,'.html')
// lint fileNewName
fileNewName=fileNewName.replace(/^\./,'')
fileNewName=fileNewName.replace(/^\\/,'')
fileNewName=fileNewName.replace(/\\/g,'_')
var savePath=outputPath+fileNewName
fs.writeFile(savePath,result,function(err){
if(err){
console.log(err);
}
})
}
// Generate index file
var indexFile='';
const indexLine = (fileName) => `* [${fileName}](${fileName}.html)\n`;
indexFile+='# Index\n\n';
for(var index in markdownFiles){
var fileNewName=markdownFiles[index].replace(/.md$/,'')
// lint fileNewName
fileNewName=fileNewName.replace(/^\./,'')
fileNewName=fileNewName.replace(/^\\/,'')
fileNewName=fileNewName.replace(/\\/g,'_')
indexFile+=indexLine(fileNewName)
}
// Output index markdown for debug.
fs.writeFile(outputPath+'index.md',indexFile,function(err){
if(err){
console.log(err);
}
})
// Compile index file
var resultIndex=clipTmpl(md.render(indexFile))
fs.writeFile(outputPath+'index.html',resultIndex,function(err){
if(err){
console.log(err);
}
})
|
const express = require('express');
const { BotFrameworkAdapter } = require('botbuilder');
const MyBot = require('./bot');
const myBot = new MyBot();
const app = express();
const adapter = new BotFrameworkAdapter({
appId: process.env.BOT_APP_ID,
appPassword: <PASSWORD>.BOT_APP_PASSWORD
});
adapter.onTurnError = async context => {
await context.sendActivity('Oops. Something went wrong!');
};
app.post('/api/messages', (req, res) => {
adapter.processActivity(req, res, async context => {
await myBot.onTurn(context);
});
});
app.listen(process.env.port || process.env.PORT || 3978);
console.log('Bot listening');
|
// Copyright 2018-2019 Espressif Systems (Shanghai) PTE LTD
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef _dsps_biquad_gen_H_
#define _dsps_biquad_gen_H_
#include "dsp_err.h"
#ifdef __cplusplus
extern "C"
{
#endif
// Common rules for all generated coefficients.
// The coefficients placed to the array as follows:
// coeffs[0] = b0;
// coeffs[1] = b1;
// coeffs[2] = b2;
// coeffs[3] = a1;
// coeffs[4] = a2;
// a0 - are not placed and expected always as == 1
/**
* @brief LPF IIR filter coefficients
* Coefficients for low pass 2nd order IIR filter (bi-quad)
* The implementation use ANSI C and could be compiled and run on any platform
*
* @param coeffs: result coefficients. b0,b1,b2,a1,a2, a0 are not placed to the array and expected by IIR as 1
* @param f: filter cut off frequency in range of 0..0.5 (normalized to sample frequency)
* @param qFactor: Q factor of filter
*
* @return
* - ESP_OK on success
* - One of the error codes from DSP library
*/
esp_err_t dsps_biquad_gen_lpf_f32(float *coeffs, float f, float qFactor);
/**
* @brief HPF IIR filter coefficients
*
* Coefficients for high pass 2nd order IIR filter (bi-quad)
* The implementation use ANSI C and could be compiled and run on any platform
*
* @param coeffs: result coefficients. b0,b1,b2,a1,a2, a0 are not placed to the array and expected by IIR as 1
* @param f: filter cut off frequency in range of 0..0.5 (normalized to sample frequency)
* @param qFactor: Q factor of filter
*
* @return
* - ESP_OK on success
* - One of the error codes from DSP library
*/
esp_err_t dsps_biquad_gen_hpf_f32(float *coeffs, float f, float qFactor);
/**
* @brief BPF IIR filter coefficients
*
* Coefficients for band pass 2nd order IIR filter (bi-quad)
* The implementation use ANSI C and could be compiled and run on any platform
*
* @param coeffs: result coefficients. b0,b1,b2,a1,a2, a0 are not placed to the array and expected by IIR as 1
* @param f: filter center frequency in range of 0..0.5 (normalized to sample frequency)
* @param qFactor: Q factor of filter
*
* @return
* - ESP_OK on success
* - One of the error codes from DSP library
*/
esp_err_t dsps_biquad_gen_bpf_f32(float *coeffs, float f, float qFactor);
/**
* @brief 0 dB BPF IIR filter coefficients
*
* Coefficients for band pass 2nd order IIR filter (bi-quad) with 0 dB gain in passband
* The implementation use ANSI C and could be compiled and run on any platform
*
* @param coeffs: result coefficients. b0,b1,b2,a1,a2, a0 are not placed to the array and expected by IIR as 1
* @param f: filter center frequency in range of 0..0.5 (normalized to sample frequency)
* @param qFactor: Q factor of filter
*
* @return
* - ESP_OK on success
* - One of the error codes from DSP library
*/
esp_err_t dsps_biquad_gen_bpf0db_f32(float *coeffs, float f, float qFactor);
/**
* @brief Notch IIR filter coefficients
*
* Coefficients for notch 2nd order IIR filter (bi-quad)
* The implementation use ANSI C and could be compiled and run on any platform
*
* @param coeffs: result coefficients. b0,b1,b2,a1,a2, a0 are not placed to the array and expected by IIR as 1
* @param f: filter notch frequency in range of 0..0.5 (normalized to sample frequency)
* @param gain: gain in stopband in dB
* @param qFactor: Q factor of filter
*
* @return
* - ESP_OK on success
* - One of the error codes from DSP library
*/
esp_err_t dsps_biquad_gen_notch_f32(float *coeffs, float f, float gain, float qFactor);
/**
* @brief Allpass 360 degree IIR filter coefficients
*
* Coefficients for all pass 2nd order IIR filter (bi-quad) with 360 degree phase shift
* The implementation use ANSI C and could be compiled and run on any platform
*
* @param coeffs: result coefficients. b0,b1,b2,a1,a2, a0 are not placed to the array and expected by IIR as 1
* @param f: filter notch frequency in range of 0..0.5 (normalized to sample frequency)
* @param qFactor: Q factor of filter
*
* @return
* - ESP_OK on success
* - One of the error codes from DSP library
*/
esp_err_t dsps_biquad_gen_allpass360_f32(float *coeffs, float f, float qFactor);
/**
* @brief Allpass 180 degree IIR filter coefficients
*
* Coefficients for all pass 2nd order IIR filter (bi-quad) with 180 degree phase shift
* The implementation use ANSI C and could be compiled and run on any platform
*
* @param coeffs: result coefficients. b0,b1,b2,a1,a2, a0 are not placed to the array and expected by IIR as 1
* @param f: filter notch frequency in range of 0..0.5 (normalized to sample frequency)
* @param qFactor: Q factor of filter
*
* @return
* - ESP_OK on success
* - One of the error codes from DSP library
*/
esp_err_t dsps_biquad_gen_allpass180_f32(float *coeffs, float f, float qFactor);
/**
* @brief peak IIR filter coefficients
*
* Coefficients for peak 2nd order IIR filter (bi-quad)
* The implementation use ANSI C and could be compiled and run on any platform
*
* @param coeffs: result coefficients. b0,b1,b2,a1,a2, a0 are not placed to the array and expected by IIR as 1
* @param f: filter notch frequency in range of 0..0.5 (normalized to sample frequency)
* @param qFactor: Q factor of filter
*
* @return
* - ESP_OK on success
* - One of the error codes from DSP library
*/
esp_err_t dsps_biquad_gen_peakingEQ_f32(float *coeffs, float f, float qFactor);
/**
* @brief low shelf IIR filter coefficients
*
* Coefficients for low pass Shelf 2nd order IIR filter (bi-quad)
* The implementation use ANSI C and could be compiled and run on any platform
*
* @param coeffs: result coefficients. b0,b1,b2,a1,a2, a0 are not placed to the array and expected by IIR as 1
* @param f: filter notch frequency in range of 0..0.5 (normalized to sample frequency)
* @param gain: gain in stopband in dB
* @param qFactor: Q factor of filter
*
* @return
* - ESP_OK on success
* - One of the error codes from DSP library
*/
esp_err_t dsps_biquad_gen_lowShelf_f32(float *coeffs, float f, float gain, float qFactor);
/**
* @brief high shelf IIR filter coefficients
*
* Coefficients for high pass Shelf 2nd order IIR filter (bi-quad)
* The implementation use ANSI C and could be compiled and run on any platform
*
* @param coeffs: result coefficients. b0,b1,b2,a1,a2, a0 are not placed to the array and expected by IIR as 1
* @param f: filter notch frequency in range of 0..0.5 (normalized to sample frequency)
* @param gain: gain in stopband in dB
* @param qFactor: Q factor of filter
*
* @return
* - ESP_OK on success
* - One of the error codes from DSP library
*/
esp_err_t dsps_biquad_gen_highShelf_f32(float *coeffs, float f, float gain, float qFactor);
#ifdef __cplusplus
}
#endif
#endif // _dsps_biquad_gen_H_ |
function memoizedFibonacci(n) {
// Create a cache object to store values
let cache = {};
// Helper function to find the nth terms
function fibonacci(n) {
// Return value from the cache if it already exists
if (cache[n]) {
return cache[n];
}
// Base cases for the recursion
if (n <= 0) {
return 0;
} else if (n === 1) {
return 1;
}
// Create the result for the nth term
let result = fibonacci(n - 1) + fibonacci(n - 2);
// Store the result in the cache before returning it
cache[n] = result;
return result;
}
return fibonacci(n);
}
console.log(memoizedFibonacci(95)); // 12200160415121876738 |
<gh_stars>0
import React from 'react'
import { Link } from 'react-router-dom'
import { Home, User } from 'react-feather'
function Header({title}){
return <div className="header">
<table><tbody><tr>
<td><Link to="/dashboard"><Home /></Link></td>
<th>{title}</th>
<td><Link to="/login"><User /></Link></td>
</tr></tbody></table>
</div>
}
export default Header |
#!/bin/sh
#SBATCH -A lu2020-2-7
#SBATCH -p lu
# time consumption HH:MM:SS
#SBATCH -t 10:00:00
#SBATCH -N 1
#SBATCH --tasks-per-node=1
# #SBATCH --exclusive
# name for script
#SBATCH -J snpec_sbc
# controll job outputs
#SBATCH -o lunarc_output/lunarc_output_snpec_sbc_%j.out
#SBATCH -e lunarc_output/lunarc_output_snpec_sbc_%j.err
# notification
#SBATCH --mail-user=samuel.wiqvist@matstat.lu.se
#SBATCH --mail-type=ALL
# load modules
ml load GCC/8.3.0
ml load CUDA/10.1.243
ml load OpenMPI/3.1.4
ml load PyTorch/1.6.0-Python-3.7.4
# run program
python /home/samwiq/snpla/'seq-posterior-approx-w-nf-dev'/'hodgkin_huxley'/run_script_sbc_snpe_c.py 1 10 snl 45
|
#!/bin/bash
# Determine the directory this script resides in
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
# shellcheck disable=SC1090
source "$SCRIPT_DIR/lib/header.sh"
# shellcheck disable=SC1090
source "$LIBRARY_DIRECTORY/docker/volume/volume.sh"
docker_volume_list
|
CATALINA_OPTS="-Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.port=8686 -Dcom.sun.management.jmxremote.ssl=false -Dcom.sun.management.jmxremote.authenticate=false -Djava.security.egd=file:/dev/./urandom" # USED FOR FASTER STARTUP OF TOMCAT. WITHOUT THIS, CAN TAKE UP TO 10 MINUTES OR EVEN MORE |
package heap;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.util.PriorityQueue;
import java.util.StringTokenizer;
/**
*
* @author minchoba
* 백준 2014번: 소수들의 곱
*
* @see https://www.acmicpc.net/problem/2014/
*
*/
public class Boj2014 {
public static void main(String[] args) throws Exception {
BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
StringTokenizer st = new StringTokenizer(br.readLine());
int K = Integer.parseInt(st.nextToken());
int N = Integer.parseInt(st.nextToken());
long[] primes = new long[K];
st = new StringTokenizer(br.readLine());
for (int i = 0; i < K; i++) {
primes[i] = Integer.parseInt(st.nextToken());
}
System.out.println(getValue(K, N, primes));
}
private static long getValue(int k, int n, long[] arr) {
PriorityQueue<Long> pq = new PriorityQueue<>();
for (int i = 0; i < k; i++) {
pq.offer(arr[i]);
}
for (int i = 0; i < n - 1; i++) { // n - 1번째 까지
long current = pq.poll();
for (int j = 0; j < k; j++) { // 곱 연산 처리해서 우선순위큐에 담아줌
pq.add(current * arr[j]);
if (current % arr[j] == 0) break; // 중복 제거
}
}
return pq.poll();
}
}
|
<reponame>antoine-amara/rome<gh_stars>1-10
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
import {Consumer, consumeUnknown} from '@romejs/consume';
import {
LSPDiagnostic,
LSPDiagnosticRelatedInformation,
LSPPosition,
LSPRange,
LSPResponseMessage,
LSPTextEdit,
} from './types';
import Master, {MasterClient} from '../Master';
import {
AbsoluteFilePath,
AbsoluteFilePathMap,
AbsoluteFilePathSet,
createAbsoluteFilePath,
} from '@romejs/path';
import {DiagnosticLocation, Diagnostics} from '@romejs/diagnostics';
import {Position} from '@romejs/parser-core';
import {Number0, ob1Coerce1To0, ob1Inc, ob1Number0} from '@romejs/ob1';
import {markupToPlainTextString} from '@romejs/string-markup';
import {
MasterQueryResponse,
PartialMasterQueryRequest,
} from '@romejs/core/common/bridges/MasterBridge';
import Linter from '../linter/Linter';
import MasterRequest, {EMPTY_SUCCESS_RESPONSE} from '../MasterRequest';
import {DEFAULT_CLIENT_REQUEST_FLAGS} from '@romejs/core/common/types/client';
import stringDiff, {Diffs, diffConstants} from '@romejs/string-diff';
import {JSONObject, JSONPropertyValue} from '@romejs/codec-json';
import {
Reporter,
ReporterProgress,
ReporterProgressBase,
ReporterProgressOptions,
} from '@romejs/cli-reporter';
type Status = 'IDLE' | 'WAITING_FOR_HEADERS_END' | 'WAITING_FOR_RESPONSE_END';
type Headers = {
length: number;
extra: Map<string, string>;
};
const HEADERS_END = '\r\n\r\n';
function parseHeaders(buffer: string): Headers {
const headers: Map<string, string> = new Map();
for (const line of buffer.split('\n')) {
const clean = line.trim();
const match = clean.match(/^(.*?): (.*?)$/);
if (match == null) {
throw new Error(`Invalid header: ${clean}`);
}
const [, key, value] = match;
headers.set(key.toLowerCase(), value);
}
const length = headers.get('content-length');
if (length === undefined) {
throw new Error('Expected Content-Length');
}
headers.delete('content-length');
return {
length: Number(length),
extra: headers,
};
}
function convertPositionToLSP(pos: undefined | Position): LSPPosition {
if (pos === undefined) {
return {
line: ob1Number0,
character: ob1Number0,
};
} else {
return {
line: ob1Coerce1To0(pos.line),
character: pos.column,
};
}
}
function convertDiagnosticLocationToLSPRange(
location: DiagnosticLocation,
): LSPRange {
return {
start: convertPositionToLSP(location.start),
end: convertPositionToLSP(location.end),
};
}
function convertDiagnosticsToLSP(
diagnostics: Diagnostics,
master: Master,
): Array<LSPDiagnostic> {
const lspDiagnostics: Array<LSPDiagnostic> = [];
for (const {description, location} of diagnostics) {
// Infer relatedInformation from log messages followed by frames
let relatedInformation: Array<LSPDiagnosticRelatedInformation> = [];
const {advice} = description;
for (let i = 0; i < advice.length; i++) {
const item = advice[i];
const nextItem = advice[i + 1];
if (
item.type === 'log' &&
nextItem !== undefined &&
nextItem.type === 'frame'
) {
const abs = master.projectManager.getFilePathFromUidOrAbsolute(
nextItem.location.filename,
);
if (abs !== undefined) {
relatedInformation.push({
message: markupToPlainTextString(item.text),
location: {
uri: `file://${abs.join()}`,
range: convertDiagnosticLocationToLSPRange(nextItem.location),
},
});
}
}
}
lspDiagnostics.push({
severity: 1,
range: convertDiagnosticLocationToLSPRange(location),
message: markupToPlainTextString(description.message.value),
code: description.category,
source: 'rome',
relatedInformation,
});
}
return lspDiagnostics;
}
function getPathFromTextDocument(consumer: Consumer): AbsoluteFilePath {
return createAbsoluteFilePath(consumer.get('uri').asString());
}
function diffTextEdits(original: string, desired: string): Array<LSPTextEdit> {
const edits: Array<LSPTextEdit> = [];
const diffs: Diffs = stringDiff(original, desired);
let currLine: Number0 = ob1Number0;
let currChar: Number0 = ob1Number0;
function advance(str: string) {
for (const char of str) {
if (char === '\n') {
currLine = ob1Inc(currLine);
currChar = ob1Number0;
} else {
currChar = ob1Inc(currChar);
}
}
}
function getPosition(): LSPPosition {
return {
line: currLine,
character: currChar,
};
}
for (const [type, text] of diffs) {
switch (type) {
case diffConstants.ADD: {
const pos = getPosition();
edits.push({
range: {
start: pos,
end: pos,
},
newText: text,
});
break;
}
case diffConstants.DELETE: {
const start: LSPPosition = getPosition();
advance(text);
const end: LSPPosition = getPosition();
edits.push({
range: {
start,
end,
},
newText: '',
});
break;
}
case diffConstants.EQUAL: {
advance(text);
break;
}
}
}
return edits;
}
let progressTokenCounter = 0;
class LSPProgress extends ReporterProgressBase {
constructor(
server: LSPServer,
reporter: Reporter,
opts?: ReporterProgressOptions,
) {
super(reporter, opts);
this.server = server;
this.token = progressTokenCounter++;
this.lastRenderKey = '';
server.write({
type: '$/progress',
params: {
token: this.token,
value: {
kind: 'begin',
cancellable: false,
title: this.title,
percentage: 0,
},
},
});
}
lastRenderKey: string;
token: number;
server: LSPServer;
render() {
const total = this.total === undefined ? 0 : this.total;
const percentage = Math.floor(100 / total * this.current);
// Make sure we don't send pointless duplicate messages
const renderKey = `percent:${percentage},text:${this.text}`;
if (this.lastRenderKey === renderKey) {
return;
}
this.lastRenderKey = renderKey;
this.server.write({
type: '$/progress',
params: {
token: this.token,
value: {
kind: 'report',
cancellable: false,
message: this.text,
percentage,
},
},
});
}
end() {
this.server.write({
type: '$/progress',
params: {
token: this.token,
value: {
kind: 'end',
},
},
});
}
}
export default class LSPServer {
constructor(request: MasterRequest) {
this.status = 'IDLE';
this.buffer = '';
this.nextHeaders = undefined;
this.request = request;
this.master = request.master;
this.client = request.client;
this.lintSessionsPending = new AbsoluteFilePathSet();
this.lintSessions = new AbsoluteFilePathMap();
request.endEvent.subscribe(() => {
this.shutdown();
});
}
request: MasterRequest;
client: MasterClient;
master: Master;
nextHeaders: undefined | Headers;
status: Status;
buffer: string;
lintSessionsPending: AbsoluteFilePathSet;
lintSessions: AbsoluteFilePathMap<MasterRequest>;
write(res: JSONObject) {
const json = JSON.stringify(res);
const out = `Content-Length: ${String(json.length)}${HEADERS_END}${json}`;
this.client.bridge.lspFromServerBuffer.send(out);
}
createFakeMasterRequest(
commandName: string,
args: Array<string> = [],
): MasterRequest {
return new MasterRequest({
client: this.client,
master: this.master,
query: {
requestFlags: DEFAULT_CLIENT_REQUEST_FLAGS,
commandFlags: {},
args,
commandName,
silent: true,
noData: false,
terminateWhenIdle: false,
},
});
}
unwatchProject(path: AbsoluteFilePath) {
// TODO maybe unset all buffers?
const req = this.lintSessions.get(path);
if (req !== undefined) {
req.teardown(EMPTY_SUCCESS_RESPONSE);
this.lintSessions.delete(path);
}
}
createProgress(opts?: ReporterProgressOptions): ReporterProgress {
return new LSPProgress(this, this.request.reporter, opts);
}
async watchProject(path: AbsoluteFilePath) {
if (this.lintSessions.has(path) || this.lintSessionsPending.has(path)) {
return;
}
this.lintSessionsPending.add(path);
const project = await this.master.projectManager.findProject(path);
if (project === undefined) {
// Not a Rome project
this.lintSessionsPending.delete(path);
return;
}
const req = this.createFakeMasterRequest('lsp_project', [path.join()]);
await req.init();
const linter = new Linter(
req,
{
save: false,
hasDecisions: false,
formatOnly: false,
},
);
const subscription = await linter.watch({
onRunStart: () => {},
createProgress: () => {
return this.createProgress();
},
onChanges: ({changes}) => {
for (const {ref, diagnostics} of changes) {
if (ref === undefined) {
// Cannot display diagnostics without a reference
continue;
}
// We want to filter pendingFixes because we'll autoformat the file on save if necessary and it's just noise
const processor = this.request.createDiagnosticsProcessor();
processor.addFilter({
category: 'lint/pendingFixes',
});
processor.addDiagnostics(diagnostics);
this.write({
method: 'textDocument/publishDiagnostics',
params: {
uri: `file://${ref.real.join()}`,
diagnostics: convertDiagnosticsToLSP(
processor.getDiagnostics(),
this.master,
),
},
});
}
},
});
req.endEvent.subscribe(() => {
subscription.unsubscribe();
});
this.lintSessions.set(path, req);
this.lintSessionsPending.delete(path);
}
shutdown() {
for (const path of this.lintSessions.keys()) {
this.unwatchProject(path);
}
this.lintSessions.clear();
}
async sendClientRequest(
req: PartialMasterQueryRequest,
): Promise<MasterQueryResponse> {
return this.master.handleRequest(
this.client,
{
silent: true,
...req,
},
);
}
async handleRequest(
method: string,
params: Consumer,
): Promise<JSONPropertyValue> {
switch (method) {
case 'initialize': {
const rootUri = params.get('rootUri');
if (rootUri.exists()) {
this.watchProject(createAbsoluteFilePath(rootUri.asString()));
}
const workspaceFolders = params.get('workspaceFolders');
if (workspaceFolders.exists()) {
for (const elem of workspaceFolders.asArray()) {
this.watchProject(getPathFromTextDocument(elem));
}
}
return {
capabilities: {
textDocumentSync: {
openClose: true,
// This sends over the full text on change. We should make this incremental later
change: 1,
},
documentFormattingProvider: true,
workspaceFolders: {
supported: true,
changeNotifications: true,
},
},
serverInfo: {
name: 'rome',
},
};
}
case 'textDocument/formatting': {
const path = getPathFromTextDocument(params.get('textDocument'));
const project = this.master.projectManager.findProjectExisting(path);
if (project === undefined) {
// Not in a Rome project
return null;
}
const res = await this.request.requestWorkerFormat(path, {});
if (res === undefined) {
// Not a file we support formatting
return null;
}
return diffTextEdits(res.original, res.formatted);
}
case 'shutdown': {
this.shutdown();
break;
}
}
return null;
}
async handleNotification(method: string, params: Consumer): Promise<void> {
switch (method) {
case 'workspace/didChangeWorkspaceFolders': {
for (const elem of params.get('added').asArray()) {
this.watchProject(getPathFromTextDocument(elem));
}
for (const elem of params.get('removed').asArray()) {
this.unwatchProject(getPathFromTextDocument(elem));
}
break;
}
case 'textDocument/didChange': {
const path = getPathFromTextDocument(params.get('textDocument'));
const content = params.get('contentChanges').asArray()[0].get('text').asString();
await this.request.requestWorkerUpdateBuffer(path, content);
break;
}
}
}
normalizeMessage(content: string): undefined | Consumer {
try {
const data = JSON.parse(content);
const consumer = consumeUnknown(data, 'lsp/parse');
return consumer;
} catch (err) {
if (err instanceof SyntaxError) {
console.error('JSON parse error', content);
return undefined;
} else {
throw err;
}
}
}
async onMessage(headers: Headers, content: string) {
const consumer = this.normalizeMessage(content);
if (consumer === undefined) {
return;
}
if (!consumer.has('method')) {
console.error('NO METHOD', content);
return;
}
const method: string = consumer.get('method').asString();
const params = consumer.get('params');
if (consumer.has('id')) {
const id = consumer.get('id').asNumber();
try {
const res: LSPResponseMessage = {
id,
result: await this.handleRequest(method, params),
};
this.write(res);
} catch (err) {
const res: LSPResponseMessage = {
id,
error: {
code: -32_603,
message: err.message,
},
};
this.write(res);
}
} else {
await this.handleNotification(method, params);
}
}
process() {
switch (this.status) {
case 'IDLE': {
if (this.buffer.length > 0) {
this.status = 'WAITING_FOR_HEADERS_END';
this.process();
}
break;
}
case 'WAITING_FOR_HEADERS_END': {
const endIndex = this.buffer.indexOf(HEADERS_END);
if (endIndex !== -1) {
// Parse headers
const rawHeaders = this.buffer.slice(0, endIndex);
this.nextHeaders = parseHeaders(rawHeaders);
// Process rest of the buffer
this.status = 'WAITING_FOR_RESPONSE_END';
this.buffer = this.buffer.slice(endIndex + HEADERS_END.length);
this.process();
}
break;
}
case 'WAITING_FOR_RESPONSE_END': {
const headers = this.nextHeaders;
if (headers === undefined) {
throw new Error('Expected headers due to our status');
}
if (this.buffer.length >= headers.length) {
const content = this.buffer.slice(0, headers.length);
this.onMessage(headers, content);
// Reset headers and trim content
this.nextHeaders = undefined;
this.buffer = this.buffer.slice(headers.length);
// Process rest of the buffer
this.status = 'IDLE';
this.process();
}
break;
}
}
}
append(data: string) {
this.buffer += data;
this.process();
}
}
|
<gh_stars>1-10
import sys
import os
import row_style
import openpyxl as xl
import constants as cosnt
def check_required_files_existence(file_path, source_file_name=None, construction_site=None):
"""Checks if required files existing in directory
Parameters:
file_path (str): path to be checked"""
if source_file_name is not None:
check = os.path.exists(file_path)
if check == False:
print(cosnt.required_files_chack_fail_msg(source_file_name))
exit(fail=True)
elif source_file_name == "base.xlsx":
print(cosnt.source_file_name_base_msg)
exit(fail=True)
else:
check = os.path.exists(file_path)
if check == True:
print(cosnt.output_folder_check_fail_msg(construction_site))
exit(fail=True)
def exit(fail=False):
"""Exit program by pressing a key
Parameters:
fail (boolen): flag for successfullnes of program"""
if fail == False:
input("\nData was converted successfully.\nPress key to exit...")
else:
input("\nSomething went wrong.\nCheck shown information and try again.\nPress key to exit...")
sys.exit()
|
// Copyright 2018-2020 opcua authors. All rights reserved.
// Use of this source code is governed by a MIT-style license that can be
// found in the LICENSE file.
package ua
import (
"testing"
)
func TestDecodeServerOnNetwork(t *testing.T) {
cases := []CodecTestCase{
{
Name: "single-cap",
Struct: &ServerOnNetwork{
RecordID: 1,
ServerName: "server-name",
DiscoveryURL: "discov-uri",
ServerCapabilities: []string{"server-cap-1"},
},
Bytes: []byte{
// RecordID
0x01, 0x00, 0x00, 0x00,
// ServerName
0x0b, 0x00, 0x00, 0x00,
0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x2d, 0x6e, 0x61, 0x6d, 0x65,
// DiscoveryURI
0x0a, 0x00, 0x00, 0x00,
0x64, 0x69, 0x73, 0x63, 0x6f, 0x76, 0x2d, 0x75, 0x72, 0x69,
// ServerCapabilities
0x01, 0x00, 0x00, 0x00,
0x0c, 0x00, 0x00, 0x00,
0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x2d, 0x63, 0x61, 0x70, 0x2d, 0x31,
},
},
{
Name: "multiple-caps",
Struct: &ServerOnNetwork{
RecordID: 1,
ServerName: "server-name",
DiscoveryURL: "discov-uri",
ServerCapabilities: []string{"server-cap-1", "server-cap-2"},
},
Bytes: []byte{
// RecordID
0x01, 0x00, 0x00, 0x00,
// ServerName
0x0b, 0x00, 0x00, 0x00,
0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x2d, 0x6e, 0x61, 0x6d, 0x65,
// DiscoveryURI
0x0a, 0x00, 0x00, 0x00,
0x64, 0x69, 0x73, 0x63, 0x6f, 0x76, 0x2d, 0x75, 0x72, 0x69,
// ServerCapabilities
0x02, 0x00, 0x00, 0x00,
0x0c, 0x00, 0x00, 0x00,
0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x2d, 0x63, 0x61, 0x70, 0x2d, 0x31,
0x0c, 0x00, 0x00, 0x00,
0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x2d, 0x63, 0x61, 0x70, 0x2d, 0x32,
},
},
}
RunCodecTest(t, cases)
}
func TestServerOnNetworkArray(t *testing.T) {
cases := []CodecTestCase{
{
Name: "normal",
Struct: []*ServerOnNetwork{
&ServerOnNetwork{
RecordID: 1,
ServerName: "server-name",
DiscoveryURL: "discov-uri",
ServerCapabilities: []string{"server-cap-1"},
},
&ServerOnNetwork{
RecordID: 1,
ServerName: "server-name",
DiscoveryURL: "discov-uri",
ServerCapabilities: []string{"server-cap-1", "server-cap-2"},
},
},
Bytes: []byte{
// ArraySize
0x02, 0x00, 0x00, 0x00,
// RecordID
0x01, 0x00, 0x00, 0x00,
// ServerName
0x0b, 0x00, 0x00, 0x00,
0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x2d, 0x6e, 0x61, 0x6d, 0x65,
// DiscoveryURI
0x0a, 0x00, 0x00, 0x00,
0x64, 0x69, 0x73, 0x63, 0x6f, 0x76, 0x2d, 0x75, 0x72, 0x69,
// ServerCapabilities
0x01, 0x00, 0x00, 0x00,
0x0c, 0x00, 0x00, 0x00,
0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x2d, 0x63, 0x61, 0x70, 0x2d, 0x31,
// RecordID
0x01, 0x00, 0x00, 0x00,
// ServerName
0x0b, 0x00, 0x00, 0x00,
0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x2d, 0x6e, 0x61, 0x6d, 0x65,
// DiscoveryURI
0x0a, 0x00, 0x00, 0x00,
0x64, 0x69, 0x73, 0x63, 0x6f, 0x76, 0x2d, 0x75, 0x72, 0x69,
// ServerCapabilities
0x02, 0x00, 0x00, 0x00,
0x0c, 0x00, 0x00, 0x00,
0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x2d, 0x63, 0x61, 0x70, 0x2d, 0x31,
0x0c, 0x00, 0x00, 0x00,
0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x2d, 0x63, 0x61, 0x70, 0x2d, 0x32,
},
},
}
RunCodecTest(t, cases)
}
|
def find_duplicates(inputstring):
num_occurrences = {}
result = []
for ch in inputstring:
if ch in num_occurrences:
num_occurrences[ch] += 1
else:
num_occurrences[ch] = 1
for ch, count in num_occurrences.items():
if count > 1:
result.append(ch)
return result |
package gitlab
import (
"context"
)
// AllowedParams compose set of parameters required to call 'GitlabAPI.Allowed' method.
type AllowedParams struct {
// RepoPath is an absolute path to the repository.
RepoPath string
// GitObjectDirectory is a path to git object directory.
GitObjectDirectory string
// GitAlternateObjectDirectories are the paths to alternate object directories.
GitAlternateObjectDirectories []string
// GLRepository is a name of the repository.
GLRepository string
// GLID is an identifier of the repository.
GLID string
// GLProtocol is a protocol used for operation.
GLProtocol string
// Changes is a set of changes to be applied.
Changes string
}
// PostReceiveMessage encapsulates a message from the /post_receive endpoint that gets printed to stdout
type PostReceiveMessage struct {
Message string `json:"message"`
Type string `json:"type"`
}
// CheckInfo represents the response of GitLabs `check` API endpoint
type CheckInfo struct {
// Version of the GitLab Rails component
Version string `json:"gitlab_version"`
// Revision of the Git object of the running GitLab
Revision string `json:"gitlab_revision"`
// APIVersion of GitLab, expected to be v4
APIVersion string `json:"api_version"`
// RedisReachable shows if GitLab can reach Redis. This can be false
// while the check itself succeeds. Normal hook API calls will likely
// fail.
RedisReachable bool `json:"redis"`
}
// Client is an interface for accessing the GitLab internal API
type Client interface {
// Allowed queries the gitlab internal api /allowed endpoint to determine if a ref change for a given repository and user is allowed
Allowed(ctx context.Context, params AllowedParams) (bool, string, error)
// Check verifies that GitLab can be reached, and authenticated to
Check(ctx context.Context) (*CheckInfo, error)
// PreReceive queries the gitlab internal api /pre_receive to increase the reference counter
PreReceive(ctx context.Context, glRepository string) (bool, error)
// PostReceive queries the gitlab internal api /post_receive to decrease the reference counter
PostReceive(ctx context.Context, glRepository, glID, changes string, pushOptions ...string) (bool, []PostReceiveMessage, error)
}
|
<reponame>nabeelkhan/Oracle-DBA-Life
REM anonymousDependencies.sql
REM Chapter 10, Oracle9i PL/SQL Programming by <NAME>
REM This package is used to illustrate dependencies between an
REM anonymous calling block and package runtime state.
CREATE OR REPLACE PACKAGE SimplePkg AS
v_GlobalVar NUMBER := 1;
PROCEDURE UpdateVar;
END SimplePkg;
/
CREATE OR REPLACE PACKAGE BODY SimplePkg AS
PROCEDURE UpdateVar IS
BEGIN
v_GlobalVar := 7;
END UpdateVar;
END SimplePkg;
/
|
<reponame>tuminoid/kisakone-dev
module.exports = {
'Log in as admin' : function (client) {
client
.url(client.launch_url)
.waitForElementVisible('body', 200)
.assert.title('Ajankohtaiset kilpailut - Kisakone')
.click('#login_link')
.waitForElementVisible('#login_form', 200)
.setValue('#loginUsernameInput', 'admin')
.setValue('#loginPassword', '<PASSWORD>')
.click('#loginSubmit')
.pause(200)
.waitForElementVisible('td#content', 200)
.assert.containsText('td#content', 'Olet nyt kirjautunut sisään')
.assert.containsText('.loginbox', 'Olet kirjautuneena tunnuksella')
},
/* TODO
'Check cookie validity' : function (client) {
client
.getCookie(function callback(result) {
this.assert.equals(result.name, 'kisakone_login');
})
},
*/
'Log out admin' : function (client) {
client
.waitForElementVisible('#header .loginbox', 200)
.assert.containsText('.loginbox', 'Kirjaudu ulos')
.click('#logout')
.waitForElementVisible('body', 200)
.assert.containsText('#login_link', 'Kirjaudu sisään')
},
'Remember login as admin' : function (client) {
client
.url(client.launch_url)
.waitForElementVisible('body', 200)
.assert.title('Ajankohtaiset kilpailut - Kisakone')
.click('#login_link')
.waitForElementVisible('#login_form', 200)
.setValue('#loginUsernameInput', 'admin')
.setValue('#loginPassword', '<PASSWORD>')
.click('#loginRememberMe')
.click('#loginSubmit')
.pause(200)
.waitForElementVisible('td#content', 200)
.assert.containsText('.loginbox', 'Olet kirjautuneena tunnuksella')
.assert.containsText('td#content', 'Olet nyt kirjautunut sisään')
},
/* TODO
'Check cookie validity' : function (client) {
client
.getCookie(function callback(result) {
this.assert.equals(result.name, 'kisakone_login');
})
},
*/
'Log out' : function (client) {
client
.waitForElementVisible('#header .loginbox', 200)
.assert.containsText('.loginbox', 'Kirjaudu ulos')
.click('#logout')
.waitForElementVisible('body', 200)
.assert.containsText('#login_link', 'Kirjaudu sisään')
.end()
},
}
|
<filename>tests/unit/test-tracker.js
var nodeunit = require('nodeunit'),
utils = require('./utils'),
tracker = require('../../lib/tracker'),
project = require('../../lib/resources/project');
var constructors = {
"tracker.Client": {
fn: tracker.Client,
args: []
}
};
exports.test_useToken = function(test) {
test.expect(2);
var origToken = '<PASSWORD>';
var client = new tracker.Client({trackerToken: origToken});
test.equal(origToken, client.config.trackerToken, 'Constructor should set config.trackerToken to '+origToken);
var newToken = '<PASSWORD>!';
client.useToken(newToken);
test.equal(newToken, client.config.trackerToken, 'useToken(newToken) should set config.trackerToken to '+newToken);
test.done();
};
exports.test_configChain = function(test) {
test.expect(26);
var origToken = '<PASSWORD>';
var client = new tracker.Client({trackerToken: origToken});
test.equal(origToken, client.config.trackerToken, 'Constructor should set config.trackerToken to '+origToken);
var newToken = '<PASSWORD>!';
client.useToken(newToken);
test.equal(newToken, client.config.trackerToken, 'useToken(newToken) should set config.trackerToken to '+newToken);
var project = client.project(123);
var story = project.story(567);
var task = story.task(456);
var label = story.label(890);
var comment = story.comment(122);
test.equal(newToken, project.config.trackerToken, 'client -> project token chaining should set project.config.trackerToken to '+newToken);
test.equal(newToken, story.config.trackerToken, 'project -> story token chaining should set project.config.trackerToken to '+newToken);
test.equal(newToken, task.config.trackerToken, 'story -> task token chaining should set project.config.trackerToken to '+newToken);
test.equal(newToken, label.config.trackerToken, 'story -> label token chaining should set project.config.trackerToken to '+newToken);
test.equal(newToken, comment.config.trackerToken, 'story -> comment token chaining should set project.config.trackerToken to '+newToken);
var projects = client.projects;
var stories = projects.stories;
var tasks = stories.tasks;
var labels = stories.labels;
var comments = stories.comments;
test.equal(newToken, projects.config.trackerToken, 'client -> projects token chaining should set project.config.trackerToken to '+newToken);
test.equal(newToken, stories.config.trackerToken, 'projects -> stories token chaining should set project.config.trackerToken to '+newToken);
test.equal(newToken, tasks.config.trackerToken, 'stories -> tasks token chaining should set project.config.trackerToken to '+newToken);
test.equal(newToken, labels.config.trackerToken, 'stories -> labels token chaining should set project.config.trackerToken to '+newToken);
test.equal(newToken, comments.config.trackerToken, 'stories -> comments token chaining should set project.config.trackerToken to '+newToken);
var newNewToken = 'new NEW token!';
client.useToken(newNewToken);
test.equal(newNewToken, project.config.trackerToken, 'client -> project token chaining should auto-update token to '+newNewToken);
test.equal(newNewToken, story.config.trackerToken, 'project -> story token chaining should auto-update token to '+newNewToken);
test.equal(newNewToken, task.config.trackerToken, 'story -> task token chaining should auto-update token to '+newNewToken);
test.equal(newNewToken, label.config.trackerToken, 'story -> label token chaining should auto-update token to '+newNewToken);
test.equal(newNewToken, comment.config.trackerToken, 'story -> comment token chaining should auto-update token to '+newNewToken);
test.equal(newNewToken, projects.config.trackerToken, 'client -> projects token chaining should auto-update token to '+newNewToken);
test.equal(newNewToken, stories.config.trackerToken, 'projects -> stories token chaining should auto-update token to '+newNewToken);
test.equal(newNewToken, tasks.config.trackerToken, 'stories -> tasks token chaining should set auto-update token to '+newNewToken);
test.equal(newNewToken, labels.config.trackerToken, 'stories -> labels token chaining should auto-update token to '+newNewToken);
test.equal(newNewToken, comments.config.trackerToken, 'stories -> comments token chaining should auto-update token to '+newNewToken);
test.equal(123, client.project(123).story(456).projectId, 'stories -> project identifier should match');
test.equal(null, client.project(undefined).story(456).projectId, 'stories -> project identifier should match');
test.equal('projects/123/stories/456', client.project(123).story(456).pathSegments().join('/'), 'stories -> path segments should match');
test.equal('stories/456', client.project(undefined).story(456).pathSegments().join('/'), 'stories -> path segments should match');
test.done();
}; |
#to be sourced
# Load the modules we need:
module load IntelPython3-Packages
module load Boost/1.73.0-intel-2020a
module load HDF5/1.10.6-intel-2020a-noMPI
module load SQLite/3.31.1-intel-2020a
module load git
module li
# Activate the project's virtual environment:
# (assuming current directory is project directory)
. .venv/bin/activate |
func extractMethodNames(_ input: String) -> [String] {
var methodNames: [String] = []
if let startIndex = input.firstIndex(of: "{"),
let endIndex = input.lastIndex(of: "}") {
let classContent = input[startIndex...endIndex]
let methodRegex = try! NSRegularExpression(pattern: "func\\s+([a-zA-Z0-9_]+)\\s*\\([^\\)]*\\)\\s*\\{", options: [])
let fullRange = NSRange(classContent.startIndex..<classContent.endIndex, in: classContent)
methodRegex.enumerateMatches(in: classContent, options: [], range: fullRange) { match, _, _ in
if let match = match,
let range = Range(match.range(at: 1), in: classContent) {
methodNames.append(String(classContent[range]))
}
}
}
return methodNames
} |
<reponame>raidenei0626/dev-resources-backend
module.exports = (url, domain) => {
let normalizedUrl = '';
if (/^\//.test(url)) {
normalizedUrl = domain + url;
} else {
normalizedUrl = url;
}
return normalizedUrl;
};
|
<gh_stars>0
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
var core_1 = require("./core");
if (!core_1.isUnitSupported('bit')) {
Intl.NumberFormat = core_1.UnifiedNumberFormat;
}
//# sourceMappingURL=polyfill.js.map |
<filename>fbcms/src/main/java/com/comp/admin/controller/GroupController.java<gh_stars>0
package com.comp.admin.controller;
import java.util.List;
import javax.servlet.http.HttpServletRequest;
import com.comp.admin.biz.AccountGroupBo;
import com.comp.admin.entities.AccountGroup;
import com.comp.admin.vo.AccountGroupVo;
import com.fbcms.util.DataResult;
import com.fbcms.util.StringUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.servlet.ModelAndView;
@Controller
@RequestMapping("/preferences/e")
public class GroupController extends BaseController{
private static final Logger logger = LoggerFactory.getLogger(GroupController.class);
@Autowired
private AccountGroupBo accountGroupBo;
@RequestMapping("/group")
public ModelAndView groupList(HttpServletRequest request) {
return new ModelAndView("/admin/group_list").addObject("userType","e");
}
@RequestMapping("/group/ajax/list")
@ResponseBody
public DataResult ajaxGroupList(AccountGroupVo queryVo) {
DataResult res = new DataResult();
res.put("listData", accountGroupBo.pageQuery(queryVo));
res.put("listCount", accountGroupBo.count(queryVo));
return res;
}
@RequestMapping("/group/ajax/save")
@ResponseBody
public DataResult ajaxSave(AccountGroup inVo) {
DataResult res = new DataResult();
if(StringUtil.isEmptyOrNull(inVo.getId())){
accountGroupBo.insert(inVo);
}else{
accountGroupBo.update(inVo);
}
return res;
}
@RequestMapping("/group/ajax/del")
@ResponseBody
public DataResult del(AccountGroup queryVo) {
DataResult res = new DataResult();
accountGroupBo.deleteById(queryVo.getId());
return res;
}
@RequestMapping("/group/ajax/all")
@ResponseBody
public Object ajaxlist(HttpServletRequest request) {
AccountGroupVo queryVo = new AccountGroupVo();
queryVo.setOffset(0);
queryVo.setLimit(Integer.MAX_VALUE);
List<AccountGroup> list = accountGroupBo.pageQuery(queryVo);
return list;
}
}
|
<reponame>totatoti/monogram<gh_stars>1-10
import { FingerprintIntarface, Attribute } from './fingerprintIntarface'
import * as twgl from 'twgl.js'
import sha3 from 'crypto-js/sha3'
import Hex from 'crypto-js/enc-hex'
export class Webgl implements FingerprintIntarface {
fingerprint(fingerprints: Map<string, Attribute>): Map<string, Attribute> {
const width = 200
const height = 200
const canvasElement = document.createElement('canvas')
canvasElement.hidden = true
const canvas = document.body.appendChild(canvasElement)
canvas.width = width
canvas.height = height
const ctx =
canvas.getContext('webgl2', { preserveDrawingBuffer: true }) ||
canvas.getContext('experimental-webgl2', { preserveDrawingBuffer: true }) ||
canvas.getContext('webgl', { preserveDrawingBuffer: true }) ||
canvas.getContext('experimental-webgl', { preserveDrawingBuffer: true })
if (ctx != null) {
const debugInfo = ctx.getExtension('WEBGL_debug_renderer_info')
if (debugInfo != null) {
fingerprints.set('webgl_unmasked_vendor', new Attribute(ctx.getParameter(debugInfo.UNMASKED_VENDOR_WEBGL), 1))
fingerprints.set(
'webgl_unmasked_renderer',
new Attribute(ctx.getParameter(debugInfo.UNMASKED_RENDERER_WEBGL), 1)
)
}
fingerprints.set(
'webgl_shader_language_version',
new Attribute(ctx.getParameter(ctx.SHADING_LANGUAGE_VERSION), 1)
)
fingerprints.set('webgl_vendor', new Attribute(ctx.getParameter(ctx.VENDOR), 1))
fingerprints.set('webgl_version', new Attribute(ctx.getParameter(ctx.VERSION), 1))
fingerprints.set('webgl_renderer', new Attribute(ctx.getParameter(ctx.RENDERER), 1))
const webglHash = this.webglHash(ctx, width, height)
if (webglHash != null) {
fingerprints.set('webgl_hash', new Attribute(webglHash, 1))
}
}
document.body.removeChild(canvasElement)
return fingerprints
}
private webglHash(ctx: WebGLRenderingContext, width: number, height: number): string | null {
const vs = `
attribute vec3 position;
attribute vec4 color;
uniform mat4 worldViewProjection;
varying vec4 vColor;
void main(void){
vColor = color;
gl_Position = worldViewProjection * vec4(position, 1.0);
}
`
const fs = `
precision mediump float;
varying vec4 vColor;
void main(void){
gl_FragColor = vColor;
}
`
const programInfo = twgl.createProgramInfo(ctx, [vs, fs])
if (programInfo != null) {
const arrays = {
position: [1.0, 1.0, 0.0, 1.0, -1.0, 0.0, -1.0, 1.0, 0.0, -1.0, -1.0, 0.0],
color: [1.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0],
indices: [0, 1, 2, 1, 2, 3],
}
const bufferInfo = twgl.createBufferInfoFromArrays(ctx, arrays)
ctx.clearColor(0.0, 0.0, 0.0, 1.0)
ctx.clear(ctx.COLOR_BUFFER_BIT | ctx.DEPTH_BUFFER_BIT)
const m4 = twgl.m4
const projection = m4.perspective((30 * Math.PI) / 180, width / height, 0.01, 100)
const eye = [-2, -2, -3.5]
const target = [0, 0, 0]
const up = [0, 1, 0]
const camera = m4.lookAt(eye, target, up)
const view = m4.inverse(camera)
const viewProjection = m4.multiply(projection, view)
const world = m4.rotationY(0)
const uniforms = {
worldViewProjection: m4.multiply(viewProjection, world),
}
ctx.useProgram(programInfo.program)
twgl.setBuffersAndAttributes(ctx, programInfo, bufferInfo)
twgl.setUniforms(programInfo, uniforms)
ctx.drawElements(ctx.TRIANGLES, bufferInfo.numElements, ctx.UNSIGNED_SHORT, 0)
ctx.flush()
const n = new Uint8Array(width * height * 4)
ctx.readPixels(0, 0, width, height, ctx.RGBA, ctx.UNSIGNED_BYTE, n)
let hex = ''
for (let i = 0; i < n.byteLength; i++) {
hex += n[i].toString(16)
}
const hash = sha3(hex, { outputLength: 512 })
return hash.toString(Hex)
}
return null
}
}
|
export default {
LEFT: 37,
RIGHT: 39,
UP: 38,
DOWN: 40,
ENTER: 13,
RETURN: 8,
ZERO: 48,
ONE: 49,
TWO: 50,
THREE: 51,
FOUR: 52,
FIVE: 53,
SIX: 54,
SEVEN: 55,
EIGHT: 56,
NINE: 57,
RED: 403,
GREEN: 404,
YELLOW: 502,
BLUE: 406,
PLAY: 415,
PAUSE: 19,
STOP: 413,
REC: -1,
FF: 465,
RW: 412,
TOOLS: -1,
PUP: -1,
PDOWN: -1,
CHLIST: -1,
PRECH: -1,
TXTMIX: -1,
FAVCH: -1,
EXIT: -1,
INFO: 469
};
|
<reponame>kalebm1/react-d3-tree
// eslint-disable-next-line
const raf = global.requestAnimationFrame = cb => {
setTimeout(cb, 0)
}
export default raf |
public class TestTwiceTheSum {
public static void main(String[] args) {
String[] a = {"1", "4", "3", "8"};
TwiceTheSum sum1 = new TwiceTheSum(a);
String[] b = {"132", "289", "893", "1", "839", "56"};
TwiceTheSum sum2 = new TwiceTheSum(b);
String[] c = {"857882923723748", "8493849384340394920", "483948454982399"};
TwiceTheSum sum3 = new TwiceTheSum(c);
}
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.