text stringlengths 1 1.05M |
|---|
package org.slos.battle.abilities.rule.target;
import org.slos.battle.GameContext;
import org.slos.battle.monster.MonsterBattleStats;
import java.util.List;
public interface TargetRule {
List<MonsterBattleStats> selectTargets(MonsterBattleStats monsterBattleStats, List<MonsterBattleStats> selectFrom, GameContext gameContext);
}
|
#!/bin/bash
# Step performed after the cluster validation, to remove:
# 1. bad clusters (≥ 10% bad-aligned ORFs)
# 2. shadow clusters (≥ 30% shadow ORFs)
# 3. single rejected ORFs (shadow, spurious and bad-aligned)
# From the validation results we already have a table with the good clusters:
# good_cl.tsv (in the directory with the validation results)
MD=data
GOOD="${MD}"/cluster_validation/good_cl.tsv
OUTDIR="${MD}"/cluster_refinement
if [[ ! -d "${OUTDIR}" ]]; then
mkdir -p "${OUTDIR}"
fi
#Cluster db
CLSTR="${MD}"/mmseqs_clustering/marine_hmp_db_03112017_clu_fa
# Table with info about shadow and spurious ORFs
# Column: orf - length - cl_name - cl_size - prop_shadow - is.shadow - is.spurious
SHSP="${MD}"/spurious_and_shadows/marine_hmp_info_shadow_spurious.tsv.gz
# Remove the clusters with ≥ 30% shadows
join -11 -21 -v2 <(awk '$5>=0.3 && !seen[$2]++{print $2}' <(zcat ${SHSP}) | sort -k1,1) \
<(awk '{print $1}' ${GOOD} | sort -k1,1) > ${OUTDIR}/marine_hmp_good_less30_cl.txt
# retrieve the subdb with the ORFs opf these clusters
~/opt/MMseqs2/bin/mmseqs createsubdb ${OUTDIR}/marine_hmp_good_less30_cl.txt ${CLSTR} ${OUTDIR}/marine_hmp_kept_less30_clu_fa
# retrieve the bad-aligned sequences in these clusters
#awk '$1!="cl_name"' ${OUTDIR}/marine_hmp_good_less30_cl.txt | parallel --progress -j 20 cat "${MD}"/cluster_validation/compositional/results/{}_rejected.txt > ${OUTDIR}/marine_hmp_good_less30_cl_rejected.txt
# Or if we have instaed the file with all the rejected sequences
join -11 -21 <(sort -k1,1 ${OUTDIR}/marine_hmp_good_less30_cl.txt) \
<(sort -k1,1 ${MD}/cluster_validation/rejected_seqs_cl.tsv) > ${OUTDIR}/marine_hmp_good_less30_cl_rejected.txt
# Add the bad-aligned sequences to the spurious and shadows
cat ${OUTDIR}/marine_hmp_good_less30_cl_rejected.txt <(awk '$5<0.3 && $6!="FALSE" || $5<0.3 && $7!="FALSE"{print $1}' ${SHSP}) > ${OUTDIR}/marine_hmp_orfs_to_remove.txt
# add cluster membership
join -11 -21 <(sort ${OUTDIR}/${SMPL}_orfs_to_remove.txt) \
<(awk '{print $2,$1}' <(zcat ${MD}/mmseqs_clustering/marine_hmp_db_03112017_clu_info.tsv.gz) | sort -k1,1) > ${OUTDIR}/marine_hmp_orfs_to_remove_cl.tsv
# remove the single orfs from the clusters with less than 10% bad-aligned ORFs and less than 30% shadows.
mpirun -np 12 ~/opt/ffindex_mg_updt/bin/ffindex_apply_mpi \
${OUTDIR}/marine_hmp_kept_less30_clu_fa ${OUTDIR}/marine_hmp_kept_less30_clu_fa.index \
-i ${OUTDIR}/ffindex_files/marine_hmp_refined_cl_fa.ffindex -d ${OUTDIR}/ffindex_files/marine_hmp_refined_cl_fa.ffdata -- ${PWD}/scripts/C_refinement/remove_orfs.sh ${OUTDIR}/marine_hmp_orfs_to_remove_cl.tsv
# From the refined clusters select the annotated and the not annotated for the following classification steps
# Create tables with new seqs and new clusters for some stats and checking the numbers
join -11 -22 <(awk '{print $1}' ${OUTDIR}/ffindex_files/marine_hmp_refined_cl_fa.ffindex | sort -k1,1) \
<(awk '{print $1,$2}' ${SHSP} | sort -k2,2) > ${OUTDIR}/tmp
join -12 -21 -v1 <(sort -k2,2 ${OUTDIR}/tmp) \
<(sort -k1,1 ${OUTDIR}/marine_hmp_orfs_to_remove.txt) > ${OUTDIR}/marine_hmp_refined_cl.tsv
rm ${OUTDIR}/tmp
# annotated (check those left with no-annotated sequences) --> join with file with all annotated clusters..for annotations
join -11 -21 <(sort -k1,1 ${OUTDIR}/marine_hmp_refined_cl.tsv) \
<(awk '{print $2,$1}' ${MD}/annot_and_clust/marine_hmp_clu_ge10_annot.tsv \
| sort -k1,1) > ${OUTDIR}/marine_hmp_refined_annot_cl.tsv
join -11 -21 <( awk '{print $3,$2}' ${OUTDIR}/marine_hmp_refined_annot_cl.tsv | sort -k1,1) \
<(awk '{print $1,$2,$3}' ${MD}/annot_and_clust/marine_hmp_clu_ge10_annot.tsv \
| sort -k1,1) > ${OUTDIR}/marine_hmp_refined_all_annot_cl.tsv
#find clusters with no annotated members
sort -k1,1 ${OUTDIR}/marine_hmp_refined_all_annot_cl.tsv \
| awk '!seen[$2,$4]++{print $2,$4}' \
| awk 'BEGIN{ getline; id=$1;l1=$1;l2=$2;} { if ($1 != id) { print l1,l2; l1=$1;l2=$2;} else { l2=l2"|"$2;} id=$1;} END { print l1,l2;}' \
| grep -v '|' | awk '$2=="NA"{print $1}' > ${OUTDIR}/marine_hmp_new_unkn_cl.txt
if [[ ! -s ${OUTDIR}/marine_hmp_new_unkn_cl.txt ]]; then
#move the clusters left with no annotated member to the not annotated
join -12 -21 -v1 <(awk '!seen[$1,$2,$3]++' ${OUTDIR}/marine_hmp_refined_all_annot_cl.tsv | sort -k2,2) \
<(sort ${OUTDIR}/marine_hmp_new_unkn_cl.txt) > ${OUTDIR}/marine_hmp_refined_annot_cl.tsv
join -12 -21 <(awk '!seen[$1,$2,$3]++' ${OUTDIR}/marine_hmp_refined_annot_cl.tsv | sort -k2,2) \
<(sort ${OUTDIR}/marine_hmp_new_unkn_cl.txt) > ${OUTDIR}/marine_hmp_refined_noannot_cl.tsv
# not annotated
join -12 -21 <(sort -k2,2 ${OUTDIR}/marine_hmp_refined_cl.tsv) \
<(awk '$4=="noannot"{print $1,$2}' ${GOOD} | sort -k1,1) >> ${OUTDIR}/marine_hmp_refined_noannot_cl.tsv
else
# not annotated
join -12 -21 <(sort -k2,2 ${OUTDIR}/marine_hmp_refined_cl.tsv) \
<(awk '$4=="noannot"{print $1,$2}' ${GOOD} | sort -k1,1) > ${OUTDIR}/marine_hmp_refined_noannot_cl.tsv
fi
# Uisng the cluster ids retrieve the two sub database for annotated clusters and not
ln -s ${OUTDIR}/ffindex_files/marine_hmp_refined_cl_fa.ffindex ${OUTDIR}/ffindex_files/marine_hmp_refined_cl_fa.index
ln -s ${OUTDIR}/ffindex_files/marine_hmp_refined_cl_fa.ffdata ${OUTDIR}/ffindex_files/marine_hmp_refined_cl_fa
~/MMseqs2/bin/mmseqs createsubdb <(awk '!seen[$1]++{print $1}' ${OUTDIR}/marine_hmp_refined_annot_cl.tsv) ${OUTDIR}/ffindex_files/marine_hmp_refined_cl_fa ${OUTDIR}/ffindex_files/marine_hmp_refined_cl_annot_fa
~/MMseqs2/bin/mmseqs createsubdb <(awk '!seen[$1]++{print $1}' ${OUTDIR}/marine_hmp_refined_noannot_cl.tsv) ${OUTDIR}/ffindex_files/marine_hmp_refined_cl_fa ${OUTDIR}/ffindex_files/marine_hmp_refined_cl_noannot_fa
|
<reponame>threathunterX/offline
package com.threathunter.bordercollie.slot.api;
import com.threathunter.bordercollie.slot.util.SlotVariableMetaRegister;
import com.threathunter.bordercollie.slot.util.StrategyInfoCache;
import com.threathunter.config.CommonDynamicConfig;
import com.threathunter.model.BaseEventMeta;
import com.threathunter.model.EventMetaRegistry;
import com.threathunter.model.VariableMeta;
import com.threathunter.persistent.core.EventSchemaRegister;
import com.threathunter.variable.VariableMetaBuilder;
import org.codehaus.jackson.map.ObjectMapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.HttpURLConnection;
import java.net.URL;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
/**
*
*/
public class SlotConfigUpdater {
// todo move to nebula_slot_compute
private static final Logger logger = LoggerFactory.getLogger(SlotConfigUpdater.class);
private static final SlotConfigUpdater configUtil = new SlotConfigUpdater();
public static SlotConfigUpdater getInstance() {
return configUtil;
}
public void doUpdates() throws Exception {
try {
if ("dev".equals(CommonDynamicConfig.getInstance().getString("environment"))) {
initEventMeta();
logger.warn("update event metas success (json)");
initVariablemeta();
logger.warn("update variable metas success (json)");
initStrategyInfoCache();
logger.warn("update strategy info cache success (json)");
return;
}
updateEventMetas();
logger.warn("update event metas success");
updateOfflineVariableNodes();
logger.warn("update offline variable success");
doPollingStrategiesInfo();
logger.warn("update strategy info success");
} catch (Exception e) {
logger.error("error for update");
throw new RuntimeException("fail to update variable", e);
}
}
private void updateEventMetas() throws Exception {
String eventMetasUrl = CommonDynamicConfig.getInstance().getString("nebula.meta.poller.eventurl");
if (eventMetasUrl == null || eventMetasUrl.isEmpty()) {
throw new Exception("event metas is empty");
}
String body = getRestfulResult(eventMetasUrl);
if (body.isEmpty()) {
System.out.println(eventMetasUrl);
throw new Exception("fail to fetch event metas ");
}
ObjectMapper mapper = new ObjectMapper();
Map<Object, Object> response = mapper.readValue(body, Map.class);
List<Object> eventsObjects = null;
eventsObjects = (List<Object>) response.get("values");
eventsObjects.forEach(event -> EventMetaRegistry.getInstance().addEventMeta(BaseEventMeta.from_json_object(event)));
}
private void updateOfflineVariableNodes() throws Exception {
String offlineVariablesUrl = CommonDynamicConfig.getInstance().getString("nebula.meta.offline.variableurl");
if (offlineVariablesUrl == null || offlineVariablesUrl.isEmpty()) {
throw new Exception("offline variables is empty");
}
String body = getRestfulResult(offlineVariablesUrl);
if (body.isEmpty()) {
System.out.println(offlineVariablesUrl);
throw new Exception("fetch offline variables is empty");
}
ObjectMapper mapper = new ObjectMapper();
Map<Object, Object> response = mapper.reader(Map.class).readValue(body);
List<Object> variableObjects = (List<Object>) response.get("values");
List<VariableMeta> variableMetas = new ArrayList<>();
VariableMetaBuilder builder = new VariableMetaBuilder();
List<VariableMeta> metas = builder.buildFromJson(variableObjects);
logger.info("init in common base: metas size = {}", metas.size());
SlotVariableMetaRegister.getInstance().update(metas);
}
private void doPollingStrategiesInfo() throws Exception {
String strategyInfoUrl = CommonDynamicConfig.getInstance().getString("nebula.meta.strategyurl");
if (strategyInfoUrl == null || strategyInfoUrl.isEmpty()) {
throw new Exception("strategy information url is empty");
}
String body = getRestfulResult(strategyInfoUrl);
ObjectMapper mapper = new ObjectMapper();
Map<Object, Object> response = mapper.reader(Map.class).readValue(body);
List<Map<String, Object>> strategyObjects = (List<Map<String, Object>>) response.get("values");
StrategyInfoCache.getInstance().update(strategyObjects);
}
private String getRestfulResult(String url) throws Exception {
InputStream inputStream = null;
try {
String authUrl;
if (!url.contains("?")) {
authUrl = String.format("%s?auth=%s", url, CommonDynamicConfig.getInstance().getString("auth"));
} else {
authUrl = String.format("%s&auth=%s", url, CommonDynamicConfig.getInstance().getString("auth"));
}
HttpURLConnection conn = getHttpURLConnection(authUrl);
inputStream = conn.getInputStream();
return readInputStream(inputStream);
} finally {
if (inputStream != null) {
try {
inputStream.close();
} catch (Exception e) {
e.printStackTrace();
}
}
}
}
private HttpURLConnection getHttpURLConnection(String curEventUrl) throws Exception {
URL u = new URL(curEventUrl);
HttpURLConnection conn = (HttpURLConnection) u.openConnection();
conn.setConnectTimeout(10000);
conn.setReadTimeout(1000 * 30);
conn.setRequestMethod("GET");
conn.setInstanceFollowRedirects(false);
conn.setRequestProperty("accept", "*/*");
conn.setRequestProperty("connection", "Keep-Alive");
conn.setRequestProperty("content-type", "application/json");
conn.setDoOutput(false);
conn.setDoInput(true);
return conn;
}
private String readInputStream(InputStream in) throws IOException {
char[] buffer = new char[2000];
StringBuilder result = new StringBuilder();
InputStreamReader ins = new InputStreamReader(in);
int readBytes;
while ((readBytes = ins.read(buffer, 0, 2000)) >= 0) {
result.append(buffer, 0, readBytes);
}
return result.toString();
}
private void initStrategyInfoCache() {
ObjectMapper mapper = new ObjectMapper();
List<Map<String, Object>> strategyObjects;
try {
URL in = SlotConfigUpdater.class.getClassLoader().getResource("strategy.json");
strategyObjects = mapper.reader(List.class).readValue(in);
} catch (Exception e) {
throw new RuntimeException("strategy cache init", e);
}
StrategyInfoCache.getInstance().update(strategyObjects);
}
private static void initEventMeta() {
ObjectMapper mapper = new ObjectMapper();
List<Object> eventsObjects = null;
try {
URL in = SlotConfigUpdater.class.getClassLoader().getResource("events.json");
eventsObjects = mapper.reader(List.class).readValue(in);
} catch (Exception e) {
e.printStackTrace();
}
eventsObjects.forEach(event -> EventMetaRegistry.getInstance().addEventMeta(BaseEventMeta.from_json_object(event)));
}
private static void initVariablemeta() {
ObjectMapper mapper = new ObjectMapper();
List<Object> variableObjects = null;
try {
URL in = SlotConfigUpdater.class.getClassLoader().getResource("slot_metas.json");
variableObjects = mapper.reader(List.class).readValue(in);
} catch (Exception e) {
e.printStackTrace();
}
VariableMetaBuilder builder = new VariableMetaBuilder();
List<VariableMeta> metas = builder.buildFromJson(variableObjects);
// List<VariableMeta> metas = builder.buildFromJson(JsonFileReader.getValuesFromFile("slot_metas.json", JsonFileReader.ClassType.LIST));
logger.info("init in common base: metas size = {}", metas.size());
SlotVariableMetaRegister.getInstance().update(metas);
}
}
|
package org.ywb.scgextend.common;
import com.google.common.base.Strings;
import lombok.Getter;
import java.io.Serializable;
import java.util.Objects;
import java.util.function.Supplier;
@Getter
public class ResultVO<T> implements Serializable {
/**
* 相应状态码{@link ResultCode}
*/
private String code;
/**
* 响应信息描述
*/
private String message;
/**
* 返回数据
*/
private T data;
private ResultVO(String message, String code) {
this.message = message;
this.code = code;
}
/**
* 通过定义好的枚举创建VO
*
* @param resultCode {@link ResultCode}
* @param <D> data
* @return ResultVo create By Enum
*/
private static <D> ResultVO<D> buildByEnum(ResultCode resultCode) {
return new ResultVO<>(resultCode.getMessage(), resultCode.getCode());
}
/**
* 通过定义好的枚举创建VO
*
* @param resultCode {@link ResultCode}
* @param <D> data
* @return ResultVo create By Enum
*/
public static <D> ResultVO<D> buildFailure(ResultCode resultCode) {
return buildByEnum(resultCode);
}
public static <D> ResultVO<D> buildFailure(String code, String message) {
return new ResultVO<>(message, code);
}
/**
* 创建成功返回对象
*
* @param data 返回数据
* @param <D> 返回值类型
* @return 统一返回对象
*/
public static <D> ResultVO<D> buildSuccess(D data) {
ResultVO<D> resultVO = buildByEnum(ResultCode.SUCCESS);
resultVO.data = data;
return resultVO;
}
/**
* 创建成功返回对象
*
* @param <D> 返回值类型
* @return 统一返回对象
*/
public static <D> ResultVO<D> buildSuccess() {
return buildByEnum(ResultCode.SUCCESS);
}
/**
* 获取成功的数据,如果不成功,抛出{@link IllegalArgumentException}异常
* 并携带{@see message}为异常信息
*
* @param resultVO resultVO
* @param message 异常时抛出的信息
* @param <D> data
* @return D
*/
public static <D> D getDataIfSuccess(ResultVO<D> resultVO, String message) {
assertSuccess(resultVO, message);
return resultVO.data;
}
/**
* 获取成功的数据,如果不成功,抛出{@link IllegalArgumentException}异常
* 并携带{@see message}为异常信息
*
* @param resultVO resultVO
* @param messageSupplier 异常时抛出的信息
* @param <D> data
* @return D
*/
public static <D> D getDataIfSuccess(ResultVO<D> resultVO, Supplier<String> messageSupplier) {
assertSuccess(resultVO, nullSafeGet(messageSupplier));
return resultVO.data;
}
/**
* 校验结果是否成功
*
* @param resultVO
* @param <D>
* @return
*/
public static <D> boolean isSuccess(ResultVO<D> resultVO) {
if (Objects.isNull(resultVO) || Strings.isNullOrEmpty(resultVO.code) || !resultVO.code.equals(ResultCode.SUCCESS.getCode())) {
return false;
}
return true;
}
/**
* 断言resultVO是否成功,如果不成功,抛出{@link IllegalArgumentException}异常
* 并携带通过调用{@see messageSupplier}方法获取携带的异常信息
*
* @param resultVO resultVO
* @param message 异常时抛出的信息
* @param <D> data
*/
public static <D> void assertSuccess(ResultVO<D> resultVO, String message) {
if (Objects.isNull(resultVO) || Strings.isNullOrEmpty(resultVO.code)) {
throw new IllegalArgumentException(message);
}
if (!resultVO.code.equals(ResultCode.SUCCESS.getCode())) {
throw new IllegalArgumentException(message);
}
}
/**
* 断言resultVO是否成功,如果不成功,抛出{@link IllegalArgumentException}异常
* 并携带通过调用{@see messageSupplier}方法获取携带的异常信息
*
* @param resultVO resultVO
* @param messageSupplier 异常时抛出的信息
* @param <D> data
*/
public static <D> void assertSuccess(ResultVO<D> resultVO, Supplier<String> messageSupplier) {
if (Objects.isNull(resultVO) || Strings.isNullOrEmpty(resultVO.code)) {
throw new IllegalArgumentException(nullSafeGet(messageSupplier));
}
if (!resultVO.code.equals(ResultCode.SUCCESS.getCode())) {
throw new IllegalArgumentException(nullSafeGet(messageSupplier));
}
}
private static String nullSafeGet(Supplier<String> messageSupplier) {
return (messageSupplier != null ? messageSupplier.get() : null);
}
public static <D> ResultVO<D> buildFailure(String code, String message, D data) {
ResultVO<D> resultVO = buildFailure(code, message);
resultVO.data = data;
return resultVO;
}
@Override
public String toString() {
return "ResultVO{" +
"message='" + message + '\'' +
", code='" + code + '\'' +
", data=" + data +
'}';
}
}
|
public class RunThreads implements Runnable {
public static void main (String[] args) {
RunThreads runner = new RunThreads();
Thread alpha = new Thread(runner);
Thread beta = new Thread(runner);
alpha.setName("Alpha thread");
beta.setName("Beta thread");
alpha.start();
beta.start();
}
public void run(){
for (int i=0; i<25; i++){
String threadName = Thread.currentThread().getName();
System.out.println(threadName+ " is running");
}
}
}
|
#!/bin/sh
python scripts/loaddb.py -s csvfiles -d postgresdb -p test/data/format2014.csv -u splqueryutils_test_postgres -w splqueryutils_test_postgres -b splqueryutils_test_postgres -v format_2014
|
import React from 'react'
import styled from 'styled-components'
const Container = styled.div`
position: relative;
padding: 32px 16px;
background: ${({ theme }) => theme.colors.primary};
`
const Title = styled.h1`
text-align: center;
position: relative;
font-size: 64px;
text-transform: uppercase;
color: #313131;
font-family: ${({ theme }) => theme.font.secondary}
`
const Header = () => (
<Container>
<Title>
<span>Palette Makr</span>
</Title>
</Container>
)
export default Header
|
#!/usr/bin/env bash
###############################################################################
# Copyright 2017 The Apollo Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
TOP_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )/.." && pwd -P)"
source ${TOP_DIR}/scripts/apollo.bashrc
function check_in_docker() {
if [ -f /.dockerenv ]; then
APOLLO_IN_DOCKER=true
else
APOLLO_IN_DOCKER=false
fi
export APOLLO_IN_DOCKER
}
function set_lib_path() {
export LD_LIBRARY_PATH=/usr/lib:/usr/lib/x86_64-linux-gnu
if [ "$RELEASE_DOCKER" == 1 ]; then
local CYBER_SETUP="/apollo/cyber/setup.bash"
if [ -e "${CYBER_SETUP}" ]; then
source "${CYBER_SETUP}"
fi
PY_LIB_PATH=/apollo/lib
PY_TOOLS_PATH=/apollo/modules/tools
else
local CYBER_SETUP="/apollo/cyber/setup.bash"
if [ -e "${CYBER_SETUP}" ]; then
source "${CYBER_SETUP}"
fi
PY_LIB_PATH=${APOLLO_ROOT_DIR}/py_proto
PY_TOOLS_PATH=${APOLLO_ROOT_DIR}/modules/tools
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/apollo/lib:/apollo/bazel-genfiles/external/caffe/lib
fi
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/apollo/lib:/usr/local/apollo/local_integ/lib
export LD_LIBRARY_PATH=/usr/local/adolc/lib64:$LD_LIBRARY_PATH
export LD_LIBRARY_PATH=/usr/local/Qt5.12.2/5.12.2/gcc_64/lib:$LD_LIBRARY_PATH
export LD_LIBRARY_PATH=/usr/local/fast-rtps/lib:$LD_LIBRARY_PATH
if [ "$USE_GPU" != "1" ];then
export LD_LIBRARY_PATH=/usr/local/apollo/libtorch/lib:$LD_LIBRARY_PATH
else
export LD_LIBRARY_PATH=/usr/local/apollo/libtorch_gpu/lib:$LD_LIBRARY_PATH
fi
export LD_LIBRARY_PATH=/usr/local/apollo/boost/lib:$LD_LIBRARY_PATH
export LD_LIBRARY_PATH=/usr/local/apollo/paddlepaddle_dep/mkldnn/lib/:$LD_LIBRARY_PATH
export PYTHONPATH=${PY_LIB_PATH}:${PY_TOOLS_PATH}:${PYTHONPATH}
# Set teleop paths
export LD_LIBRARY_PATH=/usr/local/lib:$LD_LIBRARY_PATH
export PYTHONPATH=/apollo/modules/teleop/common:${PYTHONPATH}
add_to_path "/apollo/modules/teleop/common/scripts"
if [ -e /usr/local/cuda/ ];then
add_to_path "/usr/local/cuda/bin"
export LD_LIBRARY_PATH=/usr/local/cuda/lib64:$LD_LIBRARY_PATH
export C_INCLUDE_PATH=/usr/local/cuda/include:$C_INCLUDE_PATH
export CPLUS_INCLUDE_PATH=/usr/local/cuda/include:$CPLUS_INCLUDE_PATH
fi
}
function create_data_dir() {
local DATA_DIR=""
if [ "$RELEASE_DOCKER" != "1" ];then
DATA_DIR="${APOLLO_ROOT_DIR}/data"
else
DATA_DIR="${HOME}/data"
fi
mkdir -p "${DATA_DIR}/log"
mkdir -p "${DATA_DIR}/bag"
mkdir -p "${DATA_DIR}/core"
}
function determine_bin_prefix() {
APOLLO_BIN_PREFIX=$APOLLO_ROOT_DIR
if [ -e "${APOLLO_ROOT_DIR}/bazel-bin" ]; then
APOLLO_BIN_PREFIX="${APOLLO_ROOT_DIR}/bazel-bin"
fi
export APOLLO_BIN_PREFIX
}
function find_device() {
# ${1} = device pattern
local device_list=$(find /dev -name "${1}")
if [ -z "${device_list}" ]; then
warning "Failed to find device with pattern \"${1}\" ..."
else
local devices=""
for device in $(find /dev -name "${1}"); do
ok "Found device: ${device}."
devices="${devices} --device ${device}:${device}"
done
echo "${devices}"
fi
}
function setup_device() {
if [ $(uname -s) != "Linux" ]; then
echo "Not on Linux, skip mapping devices."
return
fi
# setup CAN device
for INDEX in `seq 0 3`
do
# soft link if sensorbox exist
if [ -e /dev/zynq_can${INDEX} ] && [ ! -e /dev/can${INDEX} ]; then
sudo ln -s /dev/zynq_can${INDEX} /dev/can${INDEX}
fi
if [ ! -e /dev/can${INDEX} ]; then
sudo mknod --mode=a+rw /dev/can${INDEX} c 52 $INDEX
fi
done
MACHINE_ARCH=$(uname -m)
if [ "$MACHINE_ARCH" == 'aarch64' ]; then
sudo ip link set can0 type can bitrate 500000
sudo ip link set can0 up
fi
# setup nvidia device
sudo /sbin/modprobe nvidia
sudo /sbin/modprobe nvidia-uvm
if [ ! -e /dev/nvidia0 ];then
sudo mknod -m 666 /dev/nvidia0 c 195 0
fi
if [ ! -e /dev/nvidiactl ];then
sudo mknod -m 666 /dev/nvidiactl c 195 255
fi
if [ ! -e /dev/nvidia-uvm ];then
sudo mknod -m 666 /dev/nvidia-uvm c 243 0
fi
if [ ! -e /dev/nvidia-uvm-tools ];then
sudo mknod -m 666 /dev/nvidia-uvm-tools c 243 1
fi
}
function decide_task_dir() {
# Try to find largest NVMe drive.
DISK="$(df | grep "^/dev/nvme" | sort -nr -k 4 | \
awk '{print substr($0, index($0, $6))}')"
# Try to find largest external drive.
if [ -z "${DISK}" ]; then
DISK="$(df | grep "/media/${DOCKER_USER}" | sort -nr -k 4 | \
awk '{print substr($0, index($0, $6))}')"
fi
if [ -z "${DISK}" ]; then
echo "Cannot find portable disk. Fallback to apollo data dir."
DISK="/apollo"
fi
# Create task dir.
BAG_PATH="${DISK}/data/bag"
TASK_ID=$(date +%Y-%m-%d-%H-%M-%S)
TASK_DIR="${BAG_PATH}/${TASK_ID}"
mkdir -p "${TASK_DIR}"
echo "Record bag to ${TASK_DIR}..."
export TASK_ID="${TASK_ID}"
export TASK_DIR="${TASK_DIR}"
}
function is_stopped_customized_path() {
MODULE_PATH=$1
MODULE=$2
NUM_PROCESSES="$(pgrep -c -f "modules/${MODULE_PATH}/launch/${MODULE}.launch")"
if [ "${NUM_PROCESSES}" -eq 0 ]; then
return 1
else
return 0
fi
}
function start_customized_path() {
MODULE_PATH=$1
MODULE=$2
shift 2
is_stopped_customized_path "${MODULE_PATH}" "${MODULE}"
if [ $? -eq 1 ]; then
eval "nohup cyber_launch start /apollo/modules/${MODULE_PATH}/launch/${MODULE}.launch &"
sleep 0.5
is_stopped_customized_path "${MODULE_PATH}" "${MODULE}"
if [ $? -eq 0 ]; then
echo "Launched module ${MODULE}."
return 0
else
echo "Could not launch module ${MODULE}. Is it already built?"
return 1
fi
else
echo "Module ${MODULE} is already running - skipping."
return 2
fi
}
function start() {
MODULE=$1
shift
start_customized_path $MODULE $MODULE "$@"
}
function start_prof_customized_path() {
MODULE_PATH=$1
MODULE=$2
shift 2
echo "Make sure you have built with 'bash apollo.sh build_prof'"
LOG="${APOLLO_ROOT_DIR}/data/log/${MODULE}.out"
is_stopped_customized_path "${MODULE_PATH}" "${MODULE}"
if [ $? -eq 1 ]; then
PROF_FILE="/tmp/$MODULE.prof"
rm -rf $PROF_FILE
BINARY=${APOLLO_BIN_PREFIX}/modules/${MODULE_PATH}/${MODULE}
eval "CPUPROFILE=$PROF_FILE $BINARY \
--flagfile=modules/${MODULE_PATH}/conf/${MODULE}.conf \
--log_dir=${APOLLO_ROOT_DIR}/data/log $@ </dev/null >${LOG} 2>&1 &"
sleep 0.5
is_stopped_customized_path "${MODULE_PATH}" "${MODULE}"
if [ $? -eq 0 ]; then
echo -e "Launched module ${MODULE} in prof mode. \nExport profile by command:"
echo -e "${YELLOW}google-pprof --pdf $BINARY $PROF_FILE > ${MODULE}_prof.pdf${NO_COLOR}"
return 0
else
echo "Could not launch module ${MODULE}. Is it already built?"
return 1
fi
else
echo "Module ${MODULE} is already running - skipping."
return 2
fi
}
function start_prof() {
MODULE=$1
shift
start_prof_customized_path $MODULE $MODULE "$@"
}
function start_fe_customized_path() {
MODULE_PATH=$1
MODULE=$2
shift 2
is_stopped_customized_path "${MODULE_PATH}" "${MODULE}"
if [ $? -eq 1 ]; then
eval "cyber_launch start /apollo/modules/${MODULE_PATH}/launch/${MODULE}.launch"
else
echo "Module ${MODULE} is already running - skipping."
return 2
fi
}
function start_fe() {
MODULE=$1
shift
start_fe_customized_path $MODULE $MODULE "$@"
}
function start_gdb_customized_path() {
MODULE_PATH=$1
MODULE=$2
shift 2
eval "gdb --args ${APOLLO_BIN_PREFIX}/modules/${MODULE_PATH}/${MODULE} \
--flagfile=modules/${MODULE_PATH}/conf/${MODULE}.conf \
--log_dir=${APOLLO_ROOT_DIR}/data/log $@"
}
function start_gdb() {
MODULE=$1
shift
start_gdb_customized_path $MODULE $MODULE "$@"
}
function stop_customized_path() {
MODULE_PATH=$1
MODULE=$2
is_stopped_customized_path "${MODULE_PATH}" "${MODULE}"
if [ $? -eq 1 ]; then
echo "${MODULE} process is not running!"
return
fi
cyber_launch stop "/apollo/modules/${MODULE_PATH}/launch/${MODULE}.launch"
if [ $? -eq 0 ]; then
echo "Successfully stopped module ${MODULE}."
else
echo "Module ${MODULE} is not running - skipping."
fi
}
function stop() {
MODULE=$1
stop_customized_path $MODULE $MODULE
}
# Note: This 'help' function here will overwrite the bash builtin command 'help'.
# TODO: add a command to query known modules.
function help() {
cat <<EOF
Invoke ". scripts/apollo_base.sh" within docker to add the following commands to the environment:
Usage: COMMAND [<module_name>]
COMMANDS:
help: show this help message
start: start the module in background
start_fe: start the module without putting in background
start_gdb: start the module with gdb
stop: stop the module
EOF
}
function run_customized_path() {
local module_path=$1
local module=$2
local cmd=$3
shift 3
case $cmd in
start)
start_customized_path $module_path $module "$@"
;;
start_fe)
start_fe_customized_path $module_path $module "$@"
;;
start_gdb)
start_gdb_customized_path $module_path $module "$@"
;;
start_prof)
start_prof_customized_path $module_path $module "$@"
;;
stop)
stop_customized_path $module_path $module
;;
help)
help
;;
*)
start_customized_path $module_path $module $cmd "$@"
;;
esac
}
# Write log to a file about the env when record a bag.
function record_bag_env_log() {
if [ -z "${TASK_ID}" ]; then
TASK_ID=$(date +%Y-%m-%d-%H-%M)
fi
git status >/dev/null 2>&1
if [ $? -ne 0 ]; then
echo "Not in Git repo, maybe because you are in release container."
echo "Skip log environment."
return
fi
commit=$(git log -1)
echo -e "Date:$(date)\n" >> Bag_Env_$TASK_ID.log
git branch | awk '/\*/ { print "current branch: " $2; }' >> Bag_Env_$TASK_ID.log
echo -e "\nNewest commit:\n$commit" >> Bag_Env_$TASK_ID.log
echo -e "\ngit diff:" >> Bag_Env_$TASK_ID.log
git diff >> Bag_Env_$TASK_ID.log
echo -e "\n\n\n\n" >> Bag_Env_$TASK_ID.log
echo -e "git diff --staged:" >> Bag_Env_$TASK_ID.log
git diff --staged >> Bag_Env_$TASK_ID.log
}
# run command_name module_name
function run() {
local module=$1
shift
run_customized_path $module $module "$@"
}
check_in_docker
unset OMP_NUM_THREADS
if [ $APOLLO_IN_DOCKER = "true" ]; then
CYBER_SETUP="/apollo/cyber/setup.bash"
if [ -e "${CYBER_SETUP}" ]; then
source "${CYBER_SETUP}"
fi
create_data_dir
set_lib_path $1
if [ -z $APOLLO_BASE_SOURCED ]; then
determine_bin_prefix
export APOLLO_BASE_SOURCED=1
fi
fi
|
from bs4 import BeautifulSoup
def extractFormData(html):
soup = BeautifulSoup(html, 'html.parser')
form = soup.find('form', id='data-form')
username_input = form.find('input', id='username')
password_input = form.find('input', id='password')
username = username_input['value'] if 'value' in username_input.attrs else ''
password = password_input['value'] if 'value' in password_input.attrs else ''
return {"username": username, "password": password} |
/**
* Copyright 2014 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.raigad.utils;
import com.google.common.base.Charsets;
import com.google.common.hash.HashCode;
import com.google.common.hash.Hashing;
import com.google.common.io.Files;
import org.apache.commons.codec.binary.Base64;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.http.HttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpPut;
import org.apache.http.entity.StringEntity;
import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.http.params.BasicHttpParams;
import org.apache.http.params.HttpConnectionParams;
import org.apache.http.params.HttpParams;
import org.joda.time.DateTime;
import org.joda.time.format.DateTimeFormat;
import org.joda.time.format.DateTimeFormatter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.*;
import java.net.HttpURLConnection;
import java.net.URL;
import java.nio.charset.StandardCharsets;
import java.security.MessageDigest;
import java.util.List;
public class SystemUtils
{
private static final Logger logger = LoggerFactory.getLogger(SystemUtils.class);
public static final String NOT_FOUND_STR = "NOT_FOUND";
public static String getDataFromUrl(String url) {
try
{
HttpURLConnection conn = (HttpURLConnection) new URL(url).openConnection();
conn.setConnectTimeout(1000);
conn.setReadTimeout(1000);
conn.setRequestMethod("GET");
if (conn.getResponseCode() == 404)
{
return NOT_FOUND_STR;
}
if (conn.getResponseCode() != 200)
{
throw new RuntimeException("Unable to get data for URL " + url);
}
byte[] b = new byte[2048];
ByteArrayOutputStream bos = new ByteArrayOutputStream();
DataInputStream d = new DataInputStream((FilterInputStream) conn.getContent());
int c = 0;
while ((c = d.read(b, 0, b.length)) != -1)
bos.write(b, 0, c);
String return_ = new String(bos.toByteArray(), Charsets.UTF_8);
logger.info("Calling URL API: {} returns: {}", url, return_);
conn.disconnect();
return return_;
}
catch (Exception ex)
{
throw new RuntimeException(ex);
}
}
public static String runHttpGetCommand(String url) throws Exception
{
String return_;
DefaultHttpClient client = new DefaultHttpClient();
InputStream isStream = null;
try {
HttpParams httpParameters = new BasicHttpParams();
int timeoutConnection = 1000;
int timeoutSocket = 1000;
HttpConnectionParams.setConnectionTimeout(httpParameters, timeoutConnection);
HttpConnectionParams.setSoTimeout(httpParameters, timeoutSocket);
client.setParams(httpParameters);
HttpGet getRequest = new HttpGet(url);
getRequest.setHeader("Content-type", "application/json");
HttpResponse resp = client.execute(getRequest);
if (resp == null || resp.getEntity() == null) {
throw new ESHttpException("Unable to execute GET URL (" + url + ") Exception Message: < Null Response or Null HttpEntity >");
}
isStream = resp.getEntity().getContent();
if (resp.getStatusLine().getStatusCode() != 200) {
throw new ESHttpException("Unable to execute GET URL (" + url + ") Exception Message: (" + IOUtils.toString(isStream,StandardCharsets.UTF_8.toString()) + ")");
}
return_=IOUtils.toString(isStream,StandardCharsets.UTF_8.toString());
logger.debug("GET URL API: {} returns: {}", url, return_);
}
catch(Exception e)
{
throw new ESHttpException("Caught an exception during execution of URL (" + url + ")Exception Message: (" + e + ")");
}
finally{
if (isStream != null)
isStream.close();
}
return return_;
}
public static String runHttpPutCommand(String url,String jsonBody) throws IOException
{
String return_;
DefaultHttpClient client = new DefaultHttpClient();
InputStream isStream = null;
try {
HttpParams httpParameters = new BasicHttpParams();
int timeoutConnection = 1000;
int timeoutSocket = 1000;
HttpConnectionParams.setConnectionTimeout(httpParameters, timeoutConnection);
HttpConnectionParams.setSoTimeout(httpParameters, timeoutSocket);
client.setParams(httpParameters);
HttpPut putRequest = new HttpPut(url);
putRequest.setEntity(new StringEntity(jsonBody, StandardCharsets.UTF_8));
putRequest.setHeader("Content-type", "application/json");
HttpResponse resp = client.execute(putRequest);
if (resp == null || resp.getEntity() == null) {
throw new ESHttpException("Unable to execute PUT URL (" + url + ") Exception Message: < Null Response or Null HttpEntity >");
}
isStream = resp.getEntity().getContent();
if (resp.getStatusLine().getStatusCode() != 200) {
throw new ESHttpException("Unable to execute PUT URL (" + url + ") Exception Message: (" + IOUtils.toString(isStream,StandardCharsets.UTF_8.toString()) + ")");
}
return_=IOUtils.toString(isStream,StandardCharsets.UTF_8.toString());
logger.debug("PUT URL API: {} with JSONBody {} returns: {}", url, jsonBody, return_);
}
catch(Exception e)
{
throw new ESHttpException("Caught an exception during execution of URL (" + url + ")Exception Message: (" + e + ")");
}
finally{
if (isStream != null)
isStream.close();
}
return return_;
}
public static String runHttpPostCommand(String url,String jsonBody) throws IOException
{
String return_;
DefaultHttpClient client = new DefaultHttpClient();
InputStream isStream = null;
try {
HttpParams httpParameters = new BasicHttpParams();
int timeoutConnection = 1000;
int timeoutSocket = 1000;
HttpConnectionParams.setConnectionTimeout(httpParameters, timeoutConnection);
HttpConnectionParams.setSoTimeout(httpParameters, timeoutSocket);
client.setParams(httpParameters);
HttpPost postRequest = new HttpPost(url);
if(StringUtils.isNotEmpty(jsonBody))
postRequest.setEntity(new StringEntity(jsonBody, StandardCharsets.UTF_8));
postRequest.setHeader("Content-type", "application/json");
HttpResponse resp = client.execute(postRequest);
if (resp == null || resp.getEntity() == null) {
throw new ESHttpException("Unable to execute POST URL (" + url + ") Exception Message: < Null Response or Null HttpEntity >");
}
isStream = resp.getEntity().getContent();
if (resp.getStatusLine().getStatusCode() != 200) {
throw new ESHttpException("Unable to execute POST URL (" + url + ") Exception Message: (" + IOUtils.toString(isStream,StandardCharsets.UTF_8.toString()) + ")");
}
return_=IOUtils.toString(isStream,StandardCharsets.UTF_8.toString());
logger.debug("POST URL API: {} with JSONBody {} returns: {}", url, jsonBody, return_);
}
catch(Exception e)
{
throw new ESHttpException("Caught an exception during execution of URL (" + url + ")Exception Message: (" + e + ")");
}
finally{
if (isStream != null)
isStream.close();
}
return return_;
}
/**
* delete all the files/dirs in the given Directory but dont delete the dir
* itself.
*/
public static void cleanupDir(String dirPath, List<String> childdirs) throws IOException
{
if (childdirs == null || childdirs.size() == 0)
FileUtils.cleanDirectory(new File(dirPath));
else
{
for (String cdir : childdirs)
FileUtils.cleanDirectory(new File(dirPath + "/" + cdir));
}
}
public static void createDirs(String location)
{
File dirFile = new File(location);
if (dirFile.exists() && dirFile.isFile())
{
dirFile.delete();
dirFile.mkdirs();
}
else if (!dirFile.exists())
dirFile.mkdirs();
}
public static byte[] md5(byte[] buf)
{
try
{
MessageDigest mdigest = MessageDigest.getInstance("MD5");
mdigest.update(buf, 0, buf.length);
return mdigest.digest();
}
catch (Exception e)
{
throw new RuntimeException(e);
}
}
/**
* Get a Md5 string which is similar to OS Md5sum
*/
public static String md5(File file)
{
try
{
HashCode hc = Files.hash(file, Hashing.md5());
return toHex(hc.asBytes());
}
catch (Exception e)
{
throw new RuntimeException(e);
}
}
public static String toHex(byte[] digest)
{
StringBuffer sb = new StringBuffer(digest.length * 2);
for (int i = 0; i < digest.length; i++)
{
String hex = Integer.toHexString(digest[i]);
if (hex.length() == 1)
{
sb.append("0");
}
else if (hex.length() == 8)
{
hex = hex.substring(6);
}
sb.append(hex);
}
return sb.toString().toLowerCase();
}
public static String toBase64(byte[] md5)
{
byte encoded[] = Base64.encodeBase64(md5, false);
return new String(encoded);
}
public static String formatDate(DateTime dateTime,String dateFormat)
{
DateTimeFormatter fmt = DateTimeFormat.forPattern(dateFormat);
return dateTime.toString(fmt);
}
public static String[] getSecurityGroupIds(String MAC_ID)
{
String sec_group_ids = SystemUtils.getDataFromUrl("http://169.254.169.254/latest/meta-data/network/interfaces/macs/" + MAC_ID + "/security-group-ids/").trim();
if (sec_group_ids.isEmpty())
throw new RuntimeException("Security Group IDs are Null or Empty, Something is Wrong, hence failing !!");
return sec_group_ids.split("\n");
}
}
|
var structarmnn_1_1_layer_type_of_impl_3_01_layer_type_1_1_transpose_01_4 =
[
[ "Type", "structarmnn_1_1_layer_type_of_impl_3_01_layer_type_1_1_transpose_01_4.xhtml#a00cf7c4d734f497092d8084da1c7d727", null ]
]; |
package com.mrh0.createaddition.blocks.base;
import net.minecraft.core.BlockPos;
import net.minecraft.core.Direction;
import net.minecraft.server.level.ServerLevel;
import net.minecraft.world.Containers;
import net.minecraft.world.inventory.AbstractContainerMenu;
import net.minecraft.world.item.context.BlockPlaceContext;
import net.minecraft.world.level.Level;
import net.minecraft.world.level.block.AbstractFurnaceBlock;
import net.minecraft.world.level.block.Block;
import net.minecraft.world.level.block.HorizontalDirectionalBlock;
import net.minecraft.world.level.block.Mirror;
import net.minecraft.world.level.block.RenderShape;
import net.minecraft.world.level.block.Rotation;
import net.minecraft.world.level.block.entity.BlockEntity;
import net.minecraft.world.level.block.state.BlockBehaviour;
import net.minecraft.world.level.block.state.BlockState;
import net.minecraft.world.level.block.state.StateDefinition;
import net.minecraft.world.level.block.state.properties.BooleanProperty;
import net.minecraft.world.level.block.state.properties.DirectionProperty;
public abstract class AbstractBurnerBlock extends Block {
public static final DirectionProperty FACING = HorizontalDirectionalBlock.FACING;
public static final BooleanProperty LIT = AbstractFurnaceBlock.LIT;
protected AbstractBurnerBlock(BlockBehaviour.Properties p_48687_) {
super(p_48687_);
this.registerDefaultState(
this.stateDefinition.any().setValue(FACING, Direction.NORTH).setValue(LIT, Boolean.valueOf(false)));
}
public BlockState getStateForPlacement(BlockPlaceContext p_48689_) {
return this.defaultBlockState().setValue(FACING, p_48689_.getHorizontalDirection().getOpposite());
}
public void onRemove(BlockState state, Level level, BlockPos pos, BlockState p_48716_,
boolean p_48717_) {
if (!state.is(p_48716_.getBlock())) {
BlockEntity blockentity = level.getBlockEntity(pos);
if (blockentity instanceof AbstractBurnerBlockEntity) {
if (level instanceof ServerLevel) {
Containers.dropContents(level, pos, (AbstractBurnerBlockEntity) blockentity);
}
level.updateNeighbourForOutputSignal(pos, this);
}
super.onRemove(state, level, pos, p_48716_, p_48717_);
}
}
public boolean hasAnalogOutputSignal(BlockState p_48700_) {
return true;
}
public int getAnalogOutputSignal(BlockState p_48702_, Level p_48703_, BlockPos p_48704_) {
return AbstractContainerMenu.getRedstoneSignalFromBlockEntity(p_48703_.getBlockEntity(p_48704_));
}
public RenderShape getRenderShape(BlockState p_48727_) {
return RenderShape.MODEL;
}
public BlockState rotate(BlockState p_48722_, Rotation p_48723_) {
return p_48722_.setValue(FACING, p_48723_.rotate(p_48722_.getValue(FACING)));
}
public BlockState mirror(BlockState state, Mirror p_48720_) {
return state.rotate(p_48720_.getRotation(state.getValue(FACING)));
}
protected void createBlockStateDefinition(StateDefinition.Builder<Block, BlockState> p_48725_) {
p_48725_.add(FACING, LIT);
}
} |
#!/bin/sh
cpanm -f MogileFS::Server
cpanm MogileFS::Utils
cpanm DBD::mysql
mysql -u root -e 'create database if not exists mogilefs;'
mogdbsetup --yes --dbname=mogilefs --dbuser=root
mogstored -c ./mogilefs/mogstored.conf &
mogilefsd -c ./mogilefs/mogilefsd.conf &
mogadm --trackers=127.0.0.1:7001 host add localhost --ip=127.0.0.1 --port=7500 --status=alive
mogadm --trackers=127.0.0.1:7001 device add localhost 1
mogadm --trackers=127.0.0.1:7001 domain add go-mogilefs-client
mogadm --trackers=127.0.0.1:7001 class add go-mogilefs-client test --mindevcount=1
mkdir -p mogilefs/var/mogdata/dev1
mogadm check
pkill -f mogilefsd
pkill -f mogstored
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.math3.analysis.solvers;
import org.apache.commons.math3.exception.NoBracketingException;
import org.apache.commons.math3.exception.TooManyEvaluationsException;
import org.apache.commons.math3.exception.NumberIsTooLargeException;
import org.apache.commons.math3.util.FastMath;
import org.apache.commons.math3.util.Precision;
/**
* This class implements the <a href="http://mathworld.wolfram.com/BrentsMethod.html">
* Brent algorithm</a> for finding zeros of real univariate functions.
* The function should be continuous but not necessarily smooth.
* The {@code solve} method returns a zero {@code x} of the function {@code f}
* in the given interval {@code [a, b]} to within a tolerance
* {@code 6 eps abs(x) + t} where {@code eps} is the relative accuracy and
* {@code t} is the absolute accuracy.
* The given interval must bracket the root.
*
* @version $Id: BrentSolver.java 1379560 2012-08-31 19:40:30Z erans $
*/
public class BrentSolver extends AbstractUnivariateSolver {
/** Default absolute accuracy. */
private static final double DEFAULT_ABSOLUTE_ACCURACY = 1e-6;
/**
* Construct a solver with default accuracy (1e-6).
*/
public BrentSolver() {
this(DEFAULT_ABSOLUTE_ACCURACY);
}
/**
* Construct a solver.
*
* @param absoluteAccuracy Absolute accuracy.
*/
public BrentSolver(double absoluteAccuracy) {
super(absoluteAccuracy);
}
/**
* Construct a solver.
*
* @param relativeAccuracy Relative accuracy.
* @param absoluteAccuracy Absolute accuracy.
*/
public BrentSolver(double relativeAccuracy,
double absoluteAccuracy) {
super(relativeAccuracy, absoluteAccuracy);
}
/**
* Construct a solver.
*
* @param relativeAccuracy Relative accuracy.
* @param absoluteAccuracy Absolute accuracy.
* @param functionValueAccuracy Function value accuracy.
*/
public BrentSolver(double relativeAccuracy,
double absoluteAccuracy,
double functionValueAccuracy) {
super(relativeAccuracy, absoluteAccuracy, functionValueAccuracy);
}
/**
* {@inheritDoc}
*/
@Override
protected double doSolve()
throws NoBracketingException,
TooManyEvaluationsException,
NumberIsTooLargeException {
double min = getMin();
double max = getMax();
final double initial = getStartValue();
final double functionValueAccuracy = getFunctionValueAccuracy();
verifySequence(min, initial, max);
// Return the initial guess if it is good enough.
double yInitial = computeObjectiveValue(initial);
if (FastMath.abs(yInitial) <= functionValueAccuracy) {
return initial;
}
// Return the first endpoint if it is good enough.
double yMin = computeObjectiveValue(min);
if (FastMath.abs(yMin) <= functionValueAccuracy) {
return min;
}
// Reduce interval if min and initial bracket the root.
if (yInitial * yMin < 0) {
return brent(min, initial, yMin, yInitial);
}
// Return the second endpoint if it is good enough.
double yMax = computeObjectiveValue(max);
if (FastMath.abs(yMax) <= functionValueAccuracy) {
return max;
}
// Reduce interval if initial and max bracket the root.
if (yInitial * yMax < 0) {
return brent(initial, max, yInitial, yMax);
}
throw new NoBracketingException(min, max, yMin, yMax);
}
/**
* Search for a zero inside the provided interval.
* This implementation is based on the algorithm described at page 58 of
* the book
* <quote>
* <b>Algorithms for Minimization Without Derivatives</b>
* <it><NAME></it>
* Dover 0-486-41998-3
* </quote>
*
* @param lo Lower bound of the search interval.
* @param hi Higher bound of the search interval.
* @param fLo Function value at the lower bound of the search interval.
* @param fHi Function value at the higher bound of the search interval.
* @return the value where the function is zero.
*/
private double brent(double lo, double hi,
double fLo, double fHi) {
double a = lo;
double fa = fLo;
double b = hi;
double fb = fHi;
double c = a;
double fc = fa;
double d = b - a;
double e = d;
final double t = getAbsoluteAccuracy();
final double eps = getRelativeAccuracy();
while (true) {
if (FastMath.abs(fc) < FastMath.abs(fb)) {
a = b;
b = c;
c = a;
fa = fb;
fb = fc;
fc = fa;
}
final double tol = 2 * eps * FastMath.abs(b) + t;
final double m = 0.5 * (c - b);
if (FastMath.abs(m) <= tol ||
Precision.equals(fb, 0)) {
return b;
}
if (FastMath.abs(e) < tol ||
FastMath.abs(fa) <= FastMath.abs(fb)) {
// Force bisection.
d = m;
e = d;
} else {
double s = fb / fa;
double p;
double q;
// The equality test (a == c) is intentional,
// it is part of the original Brent's method and
// it should NOT be replaced by proximity test.
if (a == c) {
// Linear interpolation.
p = 2 * m * s;
q = 1 - s;
} else {
// Inverse quadratic interpolation.
q = fa / fc;
final double r = fb / fc;
p = s * (2 * m * q * (q - r) - (b - a) * (r - 1));
q = (q - 1) * (r - 1) * (s - 1);
}
if (p > 0) {
q = -q;
} else {
p = -p;
}
s = e;
e = d;
if (p >= 1.5 * m * q - FastMath.abs(tol * q) ||
p >= FastMath.abs(0.5 * s * q)) {
// Inverse quadratic interpolation gives a value
// in the wrong direction, or progress is slow.
// Fall back to bisection.
d = m;
e = d;
} else {
d = p / q;
}
}
a = b;
fa = fb;
if (FastMath.abs(d) > tol) {
b += d;
} else if (m > 0) {
b += tol;
} else {
b -= tol;
}
fb = computeObjectiveValue(b);
if ((fb > 0 && fc > 0) ||
(fb <= 0 && fc <= 0)) {
c = a;
fc = fa;
d = b - a;
e = d;
}
}
}
}
|
# frozen_string_literal: true
module Demux
# Demux::Demuxer is the heart of pairing signals to apps.
# It's a base implementation of what needs to happen, but apps can
# provide their own custom demuxer that calls out to this one.
#
# For example a host application will likely want to process signals in a
# background queue and can supply their own demuxer with details on how that
# should happen.
class Demuxer
# Return the agruments that will be needed to re-initialize a custom
# demuxer. These might be used to pass to a job in which the custom demuxer
# would call resolve_now
#
# @returns [Hash] hash representing the signal_attributes for initializing
# the demuxer
attr_reader :demuxer_arguments
def initialize(**args)
@demuxer_arguments = args
@signal_attributes = SignalAttributes.new(**@demuxer_arguments)
@account_id = @signal_attributes.account_id
@account_type = @signal_attributes.account_type
@signal_class = @signal_attributes.signal_class
end
# Called by signal to resolve transmissions.
#
# If you are implementing a custom demuxer, you can override this method
# to provide your own implementation as long as you ultimately call
# `#resolve_now` in your new implementation.
#
# @return [self]
def resolve
resolve_now
self
end
# Called by the implementation of #resolve to immediately resolve
# transmissions from a signal.
#
# @return [self]
def resolve_now
queue_transmissions
queued_transmissions.each do |transmission|
transmit(transmission)
end
self
end
# Called in `#resolve_now` when a resolved transmission is ready to be
# transmitted. You can override this in a custom demuxer as long as you
# ultimately call `#transmit` on the transmission.
#
# @return [self]
def transmit(transmission)
transmission.transmit
self
end
private
def queued_transmissions
Transmission
.queued
.for_app(listening_apps)
.where(uniqueness_hash: @signal_attributes.hashed)
end
def queue_transmissions
listening_apps.transmission_requested_all(@signal_attributes)
self
end
def listening_apps
Demux::App.listening_for(
signal_name: @signal_class.constantize.signal_name,
account_id: @account_id,
account_type: @account_type
)
end
end
end
|
export class Quote {
public showDetails:boolean;
constructor (public name:string, public quote:string, public author:string, public upVote: number, public downVote: number, public postingDate:Date,){
this.showDetails;false;
}
} |
sunway_run_GFS_27km_ly.sh
|
'use babel';
'use strict';
describe('AMU Font Options', () => {
beforeEach(() => {
this.workspace = atom.views.getView(atom.workspace);
jasmine.attachToDOM(this.workspace);
waitsForPromise('Theme Activation', () => {
return atom.packages.activatePackage('learn-ide-material-ui');
});
});
it('should be able to scale UI via font-size', () => {
atom.config.set('learn-ide-material-ui.fonts.fontSize', '18');
expect(document.querySelector(':root').style.fontSize).toBe('18px');
atom.config.set('learn-ide-material-ui.fonts.fontSize', '16');
expect(document.querySelector(':root').style.fontSize).toBe('16px');
});
});
|
<reponame>YourBrainEatsYou/adventofcode2021
import Challenge19 from './challenge-19';
import Challenge20 from './challenge-20';
export {
Challenge19,
Challenge20,
};
|
//
// LENTurnTablePreviewViewController.h
// TurnTable
//
// Created by 林南水 on 2019/7/18.
// Copyright © 2019 ledon. All rights reserved.
//
#import <UIKit/UIKit.h>
NS_ASSUME_NONNULL_BEGIN
@interface LENTurnTablePreviewViewController : UIViewController
@property (nonatomic, strong) LENTurnTableModel *model;
@property (weak, nonatomic) IBOutlet UIView *contentView;
@end
NS_ASSUME_NONNULL_END
|
#!/bin/bash
#!/bin/bash
echo "++++++++INIT++++++++++"
source /simulation/leader-follower/inputs/parameters/swarm.sh
source /opt/ros/kinetic/setup.bash
source ~/catkin_ws/devel/setup.bash
## Previous clean-up
rm -rf /root/src/Firmware/Tools/sitl_gazebo/models/f450-tmp-*
rm -f /root/src/Firmware/posix-configs/SITL/init/lpe/f450-tmp-*
rm -f /root/src/Firmware/launch/posix_sitl_multi_tmp.launch
## world setup #
cp /simulation/leader-follower/inputs/world/empty.world /root/src/Firmware/Tools/sitl_gazebo/worlds/empty.world
cp /simulation/leader-follower/inputs/models/f450-1/f450-1.sdf /root/src/Firmware/Tools/sitl_gazebo/models/f450-1/f450-1.sdf
cp /simulation/leader-follower/inputs/setup/posix_sitl_openuav_swarm_base.launch /root/src/Firmware/launch/posix_sitl_openuav_swarm_base.launch
mkdir /simulation/leader-follower/outputs
rm -f /simulation/leader-follower/outputs/*.csv
rm -f /simulation/leader-follower/outputs/*.txt
echo "Setup..." #>> /tmp/debug
python /simulation/leader-follower/inputs/setup/gen_gazebo_ros_spawn.py $num_uavs
python /simulation/leader-follower/inputs/setup/gen_px4_sitl.py $num_uavs
python /simulation/leader-follower/inputs/setup/gen_mavros.py $num_uavs
for((i=1;i<=$num_uavs;i+=1))
do
echo "px4 posix_sitl_multi_gazebo_ros$num_uavs.launch"
echo "launching uav$i ..." >> /tmp/debug
roslaunch px4 posix_sitl_multi_gazebo_ros$i.launch &> /dev/null &
until rostopic echo /gazebo/model_states | grep -m1 f450-tmp-$i ; do : ; done
roslaunch px4 posix_sitl_multi_px4_sitl$i.launch &> /dev/null &
sleep 2
roslaunch px4 posix_sitl_multi_mavros$i.launch &> /dev/null &
until rostopic echo /mavros$i/state | grep -m1 "connected: True" ; do : ; done
echo "launched uav$i ..." >> /tmp/debug
done
python /simulation/leader-follower/inputs/measures/measureInterRobotDistance.py $num_uavs 1 &> /dev/null &
rosrun web_video_server web_video_server _port:=80 _server_threads:=100 &> /dev/null &
roslaunch rosbridge_server rosbridge_websocket.launch ssl:=false &> /dev/null &
python /simulation/leader-follower/inputs/controllers/test_1_Loop.py $LOOP_EDGE $ALTITUDE 1 0 &> /dev/null &
python /simulation/leader-follower/inputs/setup/testArmAll.py $num_uavs &> /dev/null &
sleep 3
for((i=1;i<$num_uavs;i+=1))
do
one=1
python /simulation/leader-follower/inputs/controllers/test_3_Follow.py $(( i + one)) $i $FOLLOW_D_GAIN &> /dev/null &
sleep 1
done
roslaunch opencv_apps general_contours.launch image:=/uav_1_camera_front/image_raw debug_view:=false &> /dev/null &
echo "Measures..."
python /simulation/leader-follower/inputs/measures/measureInterRobotDistance.py $num_uavs 1 &> /dev/null &
for((i=1;i<=$num_uavs;i+=1))
do
/usr/bin/python -u /opt/ros/kinetic/bin/rostopic echo -p /mavros$i/local_position/odom > /simulation/leader-follower/outputs/uav$i.csv &
done
/usr/bin/python -u /opt/ros/kinetic/bin/rostopic echo -p /measure > /simulation/leader-follower/outputs/measure.csv &
sleep $duration_seconds
cat /simulation/leader-follower/outputs/measure.csv | awk -F',' '{sum+=$2; ++n} END { print sum/n }' > /simulation/leader-follower/outputs/average_measure.txt
|
import { readFile } from 'fs';
import { log, warn, error } from '../log';
import { isNull, isArray } from '../util';
import { has } from '../util/objects';
let systems = {};
let commonSystem = null;
let premiumSystem = null;
let promises = [];
export class System {
constructor(system) {
this.code = system.code;
this.name = system.name;
this.units = {};
system.units.forEach(unit => {
this.units[unit.id] = unit;
});
}
getUnit(code) {
if (has(this.units, code)) {
return this.units[code];
}
if (commonSystem !== null && has(commonSystem.units, code)) {
return commonSystem.units[code];
}
if (premiumSystem !== null && has(premiumSystem.units, code)) {
return premiumSystem.units[code];
}
warn("System", "Unknown unit:", code);
return null;
}
getUnits(codes) {
return codes.flatMap(code => {
let unit = this.getUnit(code);
return isNull(unit) ? [] : [unit];
});
}
inferUnits(units) {
// infer required units (to a finite depth)
let more = true;
for (let i = 0; more && i < 10; i++) {
more = false;
// log("System", "Checking for required units");
let moreunits = [];
let unitIds = units.map(unit => unit.id);
// log("Character", "Unit IDs:", unitIds);
units.forEach(unit => {
if (has(unit, "require") && !isArray(unit.require)) {
error("System", `Require not an array in ${unit.id}`, unit.require);
}
if (has(unit, "require")) {
unit.require.forEach(req => {
// log("System", `Unit ${unit.id} requires`, req);
// check if the new unit is really new
if (unitIds.includes(req.unit))
return;
// check if the new unit has dependencies on other units
if (has(req, "with")) {
if (!unitIds.includes(req.with))
return;
}
const newunit = this.getUnit(req.unit);
if (!isNull(newunit)) {
moreunits.push(newunit);
more = true; // let's do this again
}
});
}
});
units = units.concat(moreunits);
}
units = [...new Set(units)];
return units;
}
}
export function loadSystemData(codes) {
codes.forEach(code => {
const systemFile = __dirname + "/lib-" + code + ".json";
// log("System", `Loading: ${systemFile}`);
const promise = new Promise((resolve, reject) => {
readFile(systemFile, 'utf-8', (err, data) => {
if (err) {
error("System", `Error loading system file ${systemFile}:`, err);
resolve();
return;
}
const systemData = JSON.parse(data);
const system = new System(systemData);
log("System", `Loaded ${system.name} (${systemData.units.length} units)`);
systems[code] = system;
if (code == "common") {
// log("System", "Found common system");
commonSystem = system;
} else if (code == "premium") {
premiumSystem = system;
}
resolve();
});
});
promises.push(promise);
});
}
export function ready(callback) {
Promise.all(promises).then(() => {
// log("System", "Ready");
callback();
});
}
export function getSystem(code) {
// log("System", `Get system: ${code}`);
// log("System", "Systems", systems);
return systems[code];
}
|
#include <iostream>
#include <string>
#include <algorithm>
using namespace std;
string toUpperCase(string str) {
transform(str.begin(), str.end(), str.begin(), ::toupper);
return str;
}
int main() {
string inputString = "Hello, World!";
string outputString = toUpperCase(inputString);
cout << outputString;
return 0;
} |
require 'spec_helper'
describe Travis::Settings::Model do
attr_reader :model_class
before do
@model_class = Class.new(described_class) do
attribute :name
attribute :loves_travis, :Boolean
attribute :height, Integer
attribute :awesome, :Boolean, default: true
attribute :secret, Travis::Settings::EncryptedValue
end
end
it 'returns a default if it is set' do
model_class.new.awesome.should be_true
end
it 'allows to override the default' do
model_class.new(awesome: false).awesome.should be_false
end
it 'validates encrypted attributes properly' do
model_class = Class.new(described_class) do
attribute :secret, Travis::Settings::EncryptedValue
validates :secret, presence: true
end
model = model_class.new
model.should_not be_valid
model.errors[:secret].should == [:blank]
end
it 'implements read_attribute_for_serialization method' do
model = model_class.new(name: 'foo')
model.read_attribute_for_serialization(:name).should == 'foo'
end
it 'does not coerce nil' do
model = model_class.new(name: nil)
model.name.should be_nil
end
it 'can be loaded from json' do
encrypted = Travis::Model::EncryptedColumn.new(use_prefix: false).dump('foo')
model = model_class.load(secret: encrypted)
model.secret.decrypt.should == 'foo'
end
it 'allows to update attributes' do
model = model_class.new
model.update(name: 'Piotr', loves_travis: true, height: 178)
model.name.should == 'Piotr'
model.loves_travis.should be_true
model.height.should == 178
end
it 'creates an instance with attributes' do
model = model_class.new(name: 'Piotr', loves_travis: true, height: 178)
model.name.should == 'Piotr'
model.loves_travis.should be_true
model.height.should == 178
end
it 'allows to overwrite values' do
model = model_class.new(name: 'Piotr')
model.name = 'Peter'
model.name.should == 'Peter'
end
it 'coerces values by default' do
model = model_class.new(height: '178', loves_travis: 'true')
model.height.should == 178
model.loves_travis.should == true
end
it 'allows to override attribute methods' do
model_class.class_eval do
def name
super.upcase
end
end
model = model_class.new(name: 'piotr')
model.name.should == 'PIOTR'
end
it 'handles validations' do
model_class = Class.new(described_class) do
attribute :name
validates :name, presence: true
def self.name; "Foo"; end
end
model = model_class.new
model.should_not be_valid
model.errors[:name].should == [:blank]
end
describe 'encryption' do
before do
@model_class = Class.new(described_class) do
attribute :secret, Travis::Settings::EncryptedValue
end
end
it 'returns EncryptedValue instance even for nil values' do
model_class.new.secret.should be_a Travis::Settings::EncryptedValue
end
it 'automatically encrypts the data' do
encrypted_column = Travis::Model::EncryptedColumn.new(use_prefix: false)
model = model_class.new secret: 'foo'
encrypted_column.load(model.secret).should == 'foo'
model.secret.decrypt.should == 'foo'
encrypted_column.load(model.to_hash[:secret].to_s).should == 'foo'
encrypted_column.load(JSON.parse(model.to_json)['secret']).should == 'foo'
end
end
end
|
<filename>include/triumf/nmr/nuclei.hpp<gh_stars>0
#ifndef TRIUMF_NMR_NUCLEI_HPP
#define TRIUMF_NMR_NUCLEI_HPP
#include <boost/math/constants/constants.hpp>
#include <triumf/nmr/utilities.hpp>
// TRIUMF: Canada's particle accelerator centre
namespace triumf {
// nuclear magnetic resonance (NMR)
namespace nmr {
// NMR probe nuclei and their properties
namespace nuclei {
// niobium-93
template <typename T = double> struct niobium_93 {
// radioactive half-life (s)
static inline constexpr T half_life() {
return std::numeric_limits<T>::infinity();
}
// radioactive lifetime (s)
static inline constexpr T lifetime() {
return half_life() / boost::math::constants::ln_two<T>();
}
// spin quantum number
static inline constexpr T spin() { return 9.0 / 2.0; }
// magnetic dipole moment (nm)
static inline constexpr T magnetic_dipole_moment() { return 6.163; }
// electric dipole moment (b)
static inline constexpr T electric_quadrupole_moment() { return -0.32; }
// gyromagnetic ratio (s^-1 T^-1)
static inline constexpr T gyromagnetic_ratio() {
return triumf::nmr::utilities::calculate_gamma<T>(magnetic_dipole_moment(),
spin());
}
// gyromagnetic ratio (MHz / T)
static inline constexpr T gyromagnetic_ratio_in_MHz_T() {
return gyromagnetic_ratio() / 1e6 / boost::math::constants::two_pi<T>();
}
};
} // namespace nuclei
} // namespace nmr
} // namespace triumf
#endif // TRIUMF_NMR_NUCLEI_HPP
|
from typing import List
def final_value_of_num(assignments: List[str]) -> int:
num = 0 # Initial value of num
for assignment in assignments:
exec(assignment) # Execute the assignment
return num |
#ifndef INCLUDED_MODEL_MND
#define INCLUDED_MODEL_MND
// mixed normal distribution used in MCMC simulation
// number of normal distributions
#define NUM_NORM_DISTRIBS 4
// min, max of mu
// only used in init_rand
#define MU_MIN -5
#define MU_MAX 5
// max difference of mu and mu_next
#define DMU_MAX 5
#include "../randomDistribution/normalDistribution.hpp"
// class State
class State{
public:
// constructor (no-argument function), in which all paramters are initialized
State();
// number of parameters in the state (index 0..num_params-1)
const int num_params=NND*2-1;
// output the state (one-line text)
char* print();
// load parameters from one-line text, with the format same as the output of print(), return 1 when succeeded, negative (error code) otherwise
int load(char* dataRow);
// compose a state with a parameter (index) updated according to the parameter sigma (do not always depend on the parameter)
void next(int index, double sigma, State* s_next);
// randomly initialize
void init_rand();
//---- added variables ----//
public:
// number of normal distributions
static const int NND=NUM_NORM_DISTRIBS;
// mu[NND]
double mu[NND];
// a[NND+1]: intensity separator
// a[0]=0<a[1]< ...<a[NND-1]<a[NND]=1
// a[0] and a[NND] are not paramters, but constants
// intensity of ND[i] = a[i+1]-a[i]
double a[NND+1];
// calculate probability at x
double probability(double x);
private:
// format for the load of one parameter
// mu[0], ..., mu[NND-1], a[1], ..., a[NND-1]
const char* format_l="%lf";
// iterative format for print();
const char* format_iter="%s%16.8e ";
// iterative format for load();
const char* format_l_iter="%%*lf%s";
ND* nd;
};
// return data header, which appears at the first row of output
char* StateHeader();
// class Hamiltonian
class Hamiltonian{
public:
// constructor (adequately modify MCMC_main.cpp according to the arguments)
// sample_file: data file of samples (histogram)
// Nbin: number of bins in the file
Hamiltonian(char* sample_file, int Nbin);
// calculate the energy of the state s H(s)
// used in RX prcess
double energy(State* s);
// calculate beta*(H(s)-1/beta log(psi(s)))=beta*H(s)-log(psi(s))
// if psi(s) is an uniform prior distribution, log(psi(s)) term can be neglected
// used in MC process
double betaH(State* s, double beta);
//---- added variables ----//
// Nbin: Number of bins
int Nbin;
// Xvalue[Nbin]: values of bins
double* Xvalue;
// Count[Nbin]: heights of bins
int* Count;
};
#endif
|
from typing import Tuple
def finalPosition(commands: str) -> Tuple[int, int]:
x, y = 0, 0 # Initial position
for command in commands:
if command == 'U':
y += 1
elif command == 'D':
y -= 1
elif command == 'L':
x -= 1
elif command == 'R':
x += 1
return (x, y) |
<gh_stars>1-10
// Code generated by entc, DO NOT EDIT.
package network
const (
// Label holds the string label denoting the network type in the database.
Label = "network"
// FieldID holds the string denoting the id field in the database.
FieldID = "id"
// FieldIP holds the string denoting the ip field in the database.
FieldIP = "ip"
// FieldUseragent holds the string denoting the useragent field in the database.
FieldUseragent = "useragent"
// EdgeEvents holds the string denoting the events edge name in mutations.
EdgeEvents = "events"
// Table holds the table name of the network in the database.
Table = "networks"
// EventsTable is the table the holds the events relation/edge.
EventsTable = "events"
// EventsInverseTable is the table name for the Event entity.
// It exists in this package in order to avoid circular dependency with the "event" package.
EventsInverseTable = "events"
// EventsColumn is the table column denoting the events relation/edge.
EventsColumn = "event_network"
)
// Columns holds all SQL columns for network fields.
var Columns = []string{
FieldID,
FieldIP,
FieldUseragent,
}
|
package mezz.jei.render;
import java.awt.Color;
import java.awt.Rectangle;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import net.minecraftforge.fml.client.config.GuiUtils;
import net.minecraft.client.Minecraft;
import net.minecraft.client.gui.FontRenderer;
import net.minecraft.client.gui.GuiScreen;
import net.minecraft.client.gui.inventory.GuiContainer;
import net.minecraft.client.renderer.GlStateManager;
import net.minecraft.client.util.ITooltipFlag;
import net.minecraft.item.ItemStack;
import net.minecraft.util.text.TextFormatting;
import com.google.common.base.Joiner;
import mezz.jei.Internal;
import mezz.jei.api.ingredients.IIngredientHelper;
import mezz.jei.api.ingredients.IIngredientRenderer;
import mezz.jei.color.ColorNamer;
import mezz.jei.config.Config;
import mezz.jei.config.Constants;
import mezz.jei.gui.TooltipRenderer;
import mezz.jei.gui.ingredients.IIngredientListElement;
import mezz.jei.startup.ForgeModIdHelper;
import mezz.jei.util.ErrorUtil;
import mezz.jei.util.Log;
import mezz.jei.util.Translator;
public class IngredientRenderer<T> {
private static final int BLACKLIST_COLOR = Color.red.getRGB();
private static final Rectangle DEFAULT_AREA = new Rectangle(0, 0, 16, 16);
protected final IIngredientListElement<T> element;
protected Rectangle area = DEFAULT_AREA;
protected int padding;
public IngredientRenderer(IIngredientListElement<T> element) {
this.element = element;
}
public void setArea(Rectangle area) {
this.area = area;
}
public void setPadding(int padding) {
this.padding = padding;
}
public IIngredientListElement<T> getElement() {
return element;
}
public Rectangle getArea() {
return area;
}
public void renderSlow() {
if (Config.isEditModeEnabled()) {
renderEditMode(element, area, padding);
}
try {
IIngredientRenderer<T> ingredientRenderer = element.getIngredientRenderer();
T ingredient = element.getIngredient();
ingredientRenderer.render(Minecraft.getMinecraft(), area.x + padding, area.y + padding, ingredient);
} catch (RuntimeException | LinkageError e) {
throw ErrorUtil.createRenderIngredientException(e, element.getIngredient());
}
}
/**
* Matches the highlight code in {@link GuiContainer#drawScreen(int, int, float)}
*/
public void drawHighlight() {
GlStateManager.disableLighting();
GlStateManager.disableDepth();
GlStateManager.colorMask(true, true, true, false);
GuiUtils.drawGradientRect(0, area.x, area.y, area.x + area.width, area.y + area.height, 0x80FFFFFF, 0x80FFFFFF);
GlStateManager.colorMask(true, true, true, true);
GlStateManager.enableDepth();
}
public void drawTooltip(Minecraft minecraft, int mouseX, int mouseY) {
T ingredient = element.getIngredient();
IIngredientRenderer<T> ingredientRenderer = element.getIngredientRenderer();
List<String> tooltip = getTooltip(minecraft, element);
FontRenderer fontRenderer = ingredientRenderer.getFontRenderer(minecraft, ingredient);
IIngredientHelper<T> ingredientHelper = element.getIngredientHelper();
ItemStack itemStack = ingredientHelper.getCheatItemStack(ingredient);
TooltipRenderer.drawHoveringText(itemStack, minecraft, tooltip, mouseX, mouseY, fontRenderer);
}
protected static <V> void renderEditMode(IIngredientListElement<V> element, Rectangle area, int padding) {
V ingredient = element.getIngredient();
IIngredientHelper<V> ingredientHelper = element.getIngredientHelper();
if (Config.isIngredientOnConfigBlacklist(ingredient, ingredientHelper)) {
GuiScreen.drawRect(area.x + padding, area.y + padding, area.x + 16 + padding, area.y + 16 + padding, BLACKLIST_COLOR);
GlStateManager.color(1f, 1f, 1f, 1f);
}
}
private static <V> List<String> getTooltip(Minecraft minecraft, IIngredientListElement<V> element) {
List<String> tooltip = getIngredientTooltipSafe(minecraft, element);
V ingredient = element.getIngredient();
IIngredientHelper<V> ingredientHelper = element.getIngredientHelper();
tooltip = ForgeModIdHelper.getInstance().addModNameToIngredientTooltip(tooltip, ingredient, ingredientHelper);
int maxWidth = Constants.MAX_TOOLTIP_WIDTH;
for (String tooltipLine : tooltip) {
int width = minecraft.fontRenderer.getStringWidth(tooltipLine);
if (width > maxWidth) {
maxWidth = width;
}
}
if (Config.getColorSearchMode() != Config.SearchMode.DISABLED) {
addColorSearchInfoToTooltip(minecraft, element, tooltip, maxWidth);
}
if (Config.isEditModeEnabled()) {
addEditModeInfoToTooltip(minecraft, tooltip, maxWidth);
}
return tooltip;
}
private static <V> List<String> getIngredientTooltipSafe(Minecraft minecraft, IIngredientListElement<V> element) {
IIngredientRenderer<V> ingredientRenderer = element.getIngredientRenderer();
V ingredient = element.getIngredient();
try {
ITooltipFlag.TooltipFlags tooltipFlag = minecraft.gameSettings.advancedItemTooltips ? ITooltipFlag.TooltipFlags.ADVANCED : ITooltipFlag.TooltipFlags.NORMAL;
return ingredientRenderer.getTooltip(minecraft, ingredient, tooltipFlag);
} catch (RuntimeException | LinkageError e) {
Log.get().error("Tooltip crashed.", e);
}
List<String> tooltip = new ArrayList<>();
tooltip.add(TextFormatting.RED + Translator.translateToLocal("jei.tooltip.error.crash"));
return tooltip;
}
private static <V> void addColorSearchInfoToTooltip(Minecraft minecraft, IIngredientListElement<V> element, List<String> tooltip, int maxWidth) {
ColorNamer colorNamer = Internal.getColorNamer();
V ingredient = element.getIngredient();
IIngredientHelper<V> ingredientHelper = element.getIngredientHelper();
Iterable<Color> colors = ingredientHelper.getColors(ingredient);
Collection<String> colorNames = colorNamer.getColorNames(colors, false);
if (!colorNames.isEmpty()) {
String colorNamesString = Joiner.on(", ").join(colorNames);
String colorNamesLocalizedString = TextFormatting.GRAY + Translator.translateToLocalFormatted("jei.tooltip.item.colors", colorNamesString);
tooltip.addAll(minecraft.fontRenderer.listFormattedStringToWidth(colorNamesLocalizedString, maxWidth));
}
}
private static void addEditModeInfoToTooltip(Minecraft minecraft, List<String> tooltip, int maxWidth) {
tooltip.add("");
tooltip.add(TextFormatting.DARK_GREEN + Translator.translateToLocal("gui.jei.editMode.description"));
String controlKeyLocalization = Translator.translateToLocal(Minecraft.IS_RUNNING_ON_MAC ? "key.jei.ctrl.mac" : "key.jei.ctrl");
String hideMessage = TextFormatting.GRAY + Translator.translateToLocal("gui.jei.editMode.description.hide").replace("%CTRL", controlKeyLocalization);
tooltip.addAll(minecraft.fontRenderer.listFormattedStringToWidth(hideMessage, maxWidth));
String hideWildMessage = TextFormatting.GRAY + Translator.translateToLocal("gui.jei.editMode.description.hide.wild").replace("%CTRL", controlKeyLocalization);
tooltip.addAll(minecraft.fontRenderer.listFormattedStringToWidth(hideWildMessage, maxWidth));
}
}
|
/*
* Client to access to energy counters without superuser privileges
*
* This file is part of Yeppp! library and licensed under the New BSD license.
* See LICENSE.txt for the full text of the license.
* Author: rschoene
*/
#include <sys/types.h>
#include <sys/socket.h>
#include <netinet/in.h>
#include <netdb.h>
#include <arpa/inet.h>
#include <unistd.h>
#include <stdio.h>
#include <stdlib.h>
#include "yeppp_client.h"
/**
* Closes the socket and returns the given exit value.
* If closing the socket fails, return #ERROR_CODE_CLOSING_SOCKET
* @param[in] socketfd The file descriptor of the socket to close.
* @param[in] exit_value
*/
int closing(int socketfd, int exit_value) {
// close existing socket
printf("Closing socket with fd=%d\n", socketfd);
if(close(socketfd)) {
// Error occured
perror("Error during closing the existing socket: ");
exit_value = ERROR_CODE_CLOSING_SOCKET;
}
return exit_value;
}
/**
* Takes the request, sends it to the server and receive the response storing
* it in the request.
* For sending the request, only the kind_counter attribute is used.
* On failure the received request has an error_code greater than zero and an
* undetermined read_value. On success, the error_code is equal to zero and
* the read_value equals to the value of the read counter.
* @param[in/out] request The request to send. Will contain the received request as described above.
* @retval #ERROR_CODE_CONNCECTION Could not connect to the socket.
* @retval #ERROR_CODE_SEND_GENERAL Problems with sending the request.
* @retval #ERROR_CODE_SEND_BYTES Did not sent all bytes of the request.
* @retval #ERROR_CODE_RECV_SHUTDOWN The server socket has shut down the connection.
* @retval #ERROR_CODE_RECV_GENERAL Problems with receiving the response.
* @retval #ERROR_CODE_RECV_BYTES Did not receive all bytes of the response.
* @retval #EXIT_SUCCESS The response information was stored in the given request.
*/
int sendAndReceiveRequest(counter_data* request) {
// create socket
int socketfd = socket(AF_INET, SOCK_STREAM, 0);
if(socketfd < 0) {
perror("Error during socket init: ");
return ERROR_CODE_INIT_SOCKET;
}
int option_value = 1;
if(setsockopt(socketfd, SOL_SOCKET, SO_REUSEADDR, &option_value, sizeof(option_value))) {
// errors on setting option
perror("Error during socket init (option SO_REUSEADDR): ");
}
// prepare connection
struct sockaddr_in server;
bzero((char *) &server, sizeof(server));
unsigned long addr = inet_addr(YEPPP_IP);
memcpy( (char*)&server.sin_addr, &addr, sizeof(addr));
server.sin_family = AF_INET;
server.sin_port = htons(YEPPP_PORT);
// connect
if(connect(socketfd, (struct sockaddr*)&server, sizeof(server)) == -1) {
// Error occured
perror("Error during connecting: ");
return closing(socketfd, ERROR_CODE_CONNCECTION);
}
// send structure
int n = sizeof(*request);
int sent_bytes = send(socketfd, request, n, 0);
if(sent_bytes == -1) {
// Error occured
perror("Error during sending: ");
return closing(socketfd, ERROR_CODE_SEND_GENERAL);
}
else if(n - sent_bytes > 0) {
// Did not sent all
fprintf(stderr, "Did not sent all, just %d of %d bytes.\n", sent_bytes, n);
return closing(socketfd, ERROR_CODE_SEND_BYTES);
}
// receive structure
sent_bytes = recv(socketfd, request, n, 0);
if(sent_bytes == 0) {
// Server has shut down connection
fprintf(stderr, "Server has shut down connection.\n");
return closing(socketfd, ERROR_CODE_RECV_SHUTDOWN);
}
else if(sent_bytes == -1) {
// Error occured
perror("Error during receiving: ");
return closing(socketfd, ERROR_CODE_RECV_GENERAL);
}
else if(n - sent_bytes > 0) {
fprintf(stderr, "Did not receive all, just %d of %d bytes.\n", sent_bytes, n);
return closing(socketfd, ERROR_CODE_RECV_BYTES);
}
// finally got the return value
if(request->error_code > 0) {
fprintf(stderr, "Received error code %d.", request->error_code);
return closing(socketfd, request->error_code);
}
// everything went fine
return closing(socketfd, EXIT_SUCCESS);
}
/**
* Calls #sendAndReceiveRequest with either a value given as program argument,
* or if no such on is given, read one from stdin.
* Prints the result of the call to stdout.
*/
int main(int argc, char **argv) {
struct counter_data request;
//TODO let ip and port be specified as arguments
// prepare structure
// -2=close server, 0=release, 1-8=acquire this kind of counter
if(argc > 1) {
// counter type given by first argument
request.kind_counter = atoi(argv[1]);
}
else {
// read from stdin
char buffer[256];
#if YEPPP_CLIENT_VERBOSE
printf("Enter number (-2=close server, 0=release, 1-8=acquire this kind of counter)\n");
#endif
bzero(buffer, 256);
fgets(buffer, 255, stdin);
request.kind_counter = atoi(buffer);
}
int retval = sendAndReceiveRequest(&request);
printf("Retval: %d, kind_counter: %d, error_code: %d, read_value: %f\n",
retval, request.kind_counter, request.error_code, request.read_value);
return EXIT_SUCCESS;
}
|
#!/bin/sh
# Exit immediately if a pipeline, which may consist of a single simple command,
# a list, or a compound command returns a non-zero status
set -e
NAV_HOME=${NAV_HOME:?undefined}
readonly NAV_VERSION=90d9a11
echo Patching NAV $NAV_VERSION at $NAV_HOME...
patch $NAV_HOME/ipdevpoll/neighbor.py /nav-add-ons/snmp/patches/nav/ipdevpoll/neighbor.py-$NAV_VERSION.patch
patch $NAV_HOME/ipdevpoll/plugins/interfaces.py /nav-add-ons/snmp/patches/nav/ipdevpoll/plugins/interfaces.py-$NAV_VERSION.patch
patch $NAV_HOME/ipdevpoll/plugins/sensors.py /nav-add-ons/snmp/patches/nav/ipdevpoll/plugins/sensors.py-$NAV_VERSION.patch
patch $NAV_HOME/ipdevpoll/plugins/statsystem.py /nav-add-ons/snmp/patches/nav/ipdevpoll/plugins/statsystem.py-$NAV_VERSION.patch
patch $NAV_HOME/ipdevpoll/shadows/__init__.py /nav-add-ons/snmp/patches/nav/ipdevpoll/shadows/__init__.py-$NAV_VERSION.patch
patch $NAV_HOME/mibs/if_mib.py /nav-add-ons/snmp/patches/nav/mibs/if_mib.py-$NAV_VERSION.patch
echo ...done
|
#!/bin/bash
curl "http://tic-tac-toe.wdibos.com/" \
--include \
--request POST \
--data-urlencode ""
echo
|
<filename>src/components/Day.js
import React, { useContext } from "react";
import { CalendarContext, sameDay } from "../context/CalendarContext";
import { contrast } from "../utils/utils";
import Task from "./Task";
function Day({ day, date }) {
const { setTask, setDate } = useContext(CalendarContext);
const getStyle = (color) => {
return { background: color, color: contrast(color) };
};
const selected = sameDay(day.date, date);
const style =
(day.date.getMonth() !== date.getMonth() ? " disabled" : "") +
(sameDay(day.date, new Date()) ? " current-day" : "") +
(selected ? " selected-day" : "");
return (
<div className={`day ${style}`} onClick={() => setDate(day.date)}>
<div className="task-day">
<h3> {day.date.getDate()} </h3>
</div>
<div className="tasks">
{day.tasks.map((task) => (
<Task key={task.id} task={task} style={getStyle(task.color)} />
))}
</div>
{selected ? (
<div
className="button button-blue add-button"
onClick={() => setTask({})}
>
+
</div>
) : null}
</div>
);
}
export default Day;
|
<gh_stars>1-10
// @flow
const util = require('../util/util');
const browser = require('../util/browser');
const window = require('../util/window');
const {HTMLImageElement} = require('../util/window');
const DOM = require('../util/dom');
const ajax = require('../util/ajax');
const Style = require('../style/style');
const AnimationLoop = require('../style/animation_loop');
const Painter = require('../render/painter');
const Transform = require('../geo/transform');
const Hash = require('./hash');
const bindHandlers = require('./bind_handlers');
const Camera = require('./camera');
const LngLat = require('../geo/lng_lat');
const LngLatBounds = require('../geo/lng_lat_bounds');
const Point = require('@mapbox/point-geometry');
const AttributionControl = require('./control/attribution_control');
const LogoControl = require('./control/logo_control');
const isSupported = require('mapbox-gl-supported');
require('./events'); // Pull in for documentation.js
import type {LngLatLike} from '../geo/lng_lat';
import type {LngLatBoundsLike} from '../geo/lng_lat_bounds';
import type {RequestParameters} from '../util/ajax';
import type {StyleOptions} from '../style/style';
import type {MapEvent, MapDataEvent} from './events';
import type {RGBAImage} from '../util/image';
import type ScrollZoomHandler from './handler/scroll_zoom';
import type BoxZoomHandler from './handler/box_zoom';
import type DragRotateHandler from './handler/drag_rotate';
import type DragPanHandler from './handler/drag_pan';
import type KeyboardHandler from './handler/keyboard';
import type DoubleClickZoomHandler from './handler/dblclick_zoom';
import type TouchZoomRotateHandler from './handler/touch_zoom_rotate';
type ControlPosition = 'top-left' | 'top-right' | 'bottom-left' | 'bottom-right';
/* eslint-disable no-use-before-define */
type IControl = {
onAdd(map: Map): HTMLElement;
onRemove(map: Map): void;
+getDefaultPosition?: () => ControlPosition;
}
/* eslint-enable no-use-before-define */
type ResourceTypeEnum = $Keys<typeof ajax.ResourceType>;
export type RequestTransformFunction = (url: string, resourceType?: ResourceTypeEnum) => RequestParameters;
type MapOptions = {
hash?: boolean,
interactive?: boolean,
container: HTMLElement | string,
bearingSnap?: number,
classes?: Array<string>,
attributionControl?: boolean,
logoPosition?: ControlPosition,
failIfMajorPerformanceCaveat?: boolean,
preserveDrawingBuffer?: boolean,
refreshExpiredTiles?: boolean,
maxBounds?: LngLatBoundsLike,
scrollZoom?: boolean,
minZoom?: ?number,
maxZoom?: ?number,
boxZoom?: boolean,
dragRotate?: boolean,
dragPan?: boolean,
keyboard?: boolean,
doubleClickZoom?: boolean,
touchZoomRotate?: boolean,
trackResize?: boolean,
center?: LngLatLike,
zoom?: number,
bearing?: number,
pitch?: number,
renderWorldCopies?: boolean,
maxTileCacheSize?: number,
transformRequest?: RequestTransformFunction
};
const defaultMinZoom = 0;
const defaultMaxZoom = 22;
const defaultOptions = {
center: [0, 0],
zoom: 0,
bearing: 0,
pitch: 0,
minZoom: defaultMinZoom,
maxZoom: defaultMaxZoom,
interactive: true,
scrollZoom: true,
boxZoom: true,
dragRotate: true,
dragPan: true,
keyboard: true,
doubleClickZoom: true,
touchZoomRotate: true,
bearingSnap: 7,
hash: false,
attributionControl: true,
failIfMajorPerformanceCaveat: false,
preserveDrawingBuffer: false,
trackResize: true,
renderWorldCopies: true,
refreshExpiredTiles: true,
maxTileCacheSize: null,
transformRequest: null
};
/**
* The `Map` object represents the map on your page. It exposes methods
* and properties that enable you to programmatically change the map,
* and fires events as users interact with it.
*
* You create a `Map` by specifying a `container` and other options.
* Then Mapbox GL JS initializes the map on the page and returns your `Map`
* object.
*
* @extends Evented
* @param {Object} options
* @param {HTMLElement|string} options.container The HTML element in which Mapbox GL JS will render the map, or the element's string `id`. The specified element must have no children.
* @param {number} [options.minZoom=0] The minimum zoom level of the map (0-22).
* @param {number} [options.maxZoom=22] The maximum zoom level of the map (0-22).
* @param {Object|string} [options.style] The map's Mapbox style. This must be an a JSON object conforming to
* the schema described in the [Mapbox Style Specification](https://mapbox.com/mapbox-gl-style-spec/), or a URL to
* such JSON.
*
* To load a style from the Mapbox API, you can use a URL of the form `mapbox://styles/:owner/:style`,
* where `:owner` is your Mapbox account name and `:style` is the style ID. Or you can use one of the following
* [the predefined Mapbox styles](https://www.mapbox.com/maps/):
*
* * `mapbox://styles/mapbox/streets-v9`
* * `mapbox://styles/mapbox/outdoors-v9`
* * `mapbox://styles/mapbox/light-v9`
* * `mapbox://styles/mapbox/dark-v9`
* * `mapbox://styles/mapbox/satellite-v9`
* * `mapbox://styles/mapbox/satellite-streets-v9`
*
* Tilesets hosted with Mapbox can be style-optimized if you append `?optimize=true` to the end of your style URL, like `mapbox://styles/mapbox/streets-v9?optimize=true`.
* Learn more about style-optimized vector tiles in our [API documentation](https://www.mapbox.com/api-documentation/#retrieve-tiles).
*
* @param {boolean} [options.hash=false] If `true`, the map's position (zoom, center latitude, center longitude, bearing, and pitch) will be synced with the hash fragment of the page's URL.
* For example, `http://path/to/my/page.html#2.59/39.26/53.07/-24.1/60`.
* @param {boolean} [options.interactive=true] If `false`, no mouse, touch, or keyboard listeners will be attached to the map, so it will not respond to interaction.
* @param {number} [options.bearingSnap=7] The threshold, measured in degrees, that determines when the map's
* bearing (rotation) will snap to north. For example, with a `bearingSnap` of 7, if the user rotates
* the map within 7 degrees of north, the map will automatically snap to exact north.
* @param {boolean} [options.pitchWithRotate=true] If `false`, the map's pitch (tilt) control with "drag to rotate" interaction will be disabled.
* @param {boolean} [options.attributionControl=true] If `true`, an {@link AttributionControl} will be added to the map.
* @param {string} [options.logoPosition='bottom-left'] A string representing the position of the Mapbox wordmark on the map. Valid options are `top-left`,`top-right`, `bottom-left`, `bottom-right`.
* @param {boolean} [options.failIfMajorPerformanceCaveat=false] If `true`, map creation will fail if the performance of Mapbox
* GL JS would be dramatically worse than expected (i.e. a software renderer would be used).
* @param {boolean} [options.preserveDrawingBuffer=false] If `true`, the map's canvas can be exported to a PNG using `map.getCanvas().toDataURL()`. This is `false` by default as a performance optimization.
* @param {boolean} [options.refreshExpiredTiles=true] If `false`, the map won't attempt to re-request tiles once they expire per their HTTP `cacheControl`/`expires` headers.
* @param {LngLatBoundsLike} [options.maxBounds] If set, the map will be constrained to the given bounds.
* @param {boolean|Object} [options.scrollZoom=true] If `true`, the "scroll to zoom" interaction is enabled. An `Object` value is passed as options to {@link ScrollZoomHandler#enable}.
* @param {boolean} [options.boxZoom=true] If `true`, the "box zoom" interaction is enabled (see {@link BoxZoomHandler}).
* @param {boolean} [options.dragRotate=true] If `true`, the "drag to rotate" interaction is enabled (see {@link DragRotateHandler}).
* @param {boolean} [options.dragPan=true] If `true`, the "drag to pan" interaction is enabled (see {@link DragPanHandler}).
* @param {boolean} [options.keyboard=true] If `true`, keyboard shortcuts are enabled (see {@link KeyboardHandler}).
* @param {boolean} [options.doubleClickZoom=true] If `true`, the "double click to zoom" interaction is enabled (see {@link DoubleClickZoomHandler}).
* @param {boolean|Object} [options.touchZoomRotate=true] If `true`, the "pinch to rotate and zoom" interaction is enabled. An `Object` value is passed as options to {@link TouchZoomRotateHandler#enable}.
* @param {boolean} [options.trackResize=true] If `true`, the map will automatically resize when the browser window resizes.
* @param {LngLatLike} [options.center=[0, 0]] The inital geographical centerpoint of the map. If `center` is not specified in the constructor options, Mapbox GL JS will look for it in the map's style object. If it is not specified in the style, either, it will default to `[0, 0]` Note: Mapbox GL uses longitude, latitude coordinate order (as opposed to latitude, longitude) to match GeoJSON.
* @param {number} [options.zoom=0] The initial zoom level of the map. If `zoom` is not specified in the constructor options, Mapbox GL JS will look for it in the map's style object. If it is not specified in the style, either, it will default to `0`.
* @param {number} [options.bearing=0] The initial bearing (rotation) of the map, measured in degrees counter-clockwise from north. If `bearing` is not specified in the constructor options, Mapbox GL JS will look for it in the map's style object. If it is not specified in the style, either, it will default to `0`.
* @param {number} [options.pitch=0] The initial pitch (tilt) of the map, measured in degrees away from the plane of the screen (0-60). If `pitch` is not specified in the constructor options, Mapbox GL JS will look for it in the map's style object. If it is not specified in the style, either, it will default to `0`.
* @param {boolean} [options.renderWorldCopies=true] If `true`, multiple copies of the world will be rendered, when zoomed out.
* @param {number} [options.maxTileCacheSize=null] The maxiumum number of tiles stored in the tile cache for a given source. If omitted, the cache will be dynamically sized based on the current viewport.
* @param {string} [options.localIdeographFontFamily=null] If specified, defines a CSS font-family
* for locally overriding generation of glyphs in the 'CJK Unified Ideographs' and 'Hangul Syllables' ranges.
* In these ranges, font settings from the map's style will be ignored, except for font-weight keywords (light/regular/medium/bold).
* The purpose of this option is to avoid bandwidth-intensive glyph server requests. (see [Use locally generated ideographs](https://www.mapbox.com/mapbox-gl-js/example/local-ideographs))
* @param {RequestTransformFunction} [options.transformRequest=null] A callback run before the Map makes a request for an external URL. The callback can be used to modify the url, set headers, or set the credentials property for cross-origin requests.
* Expected to return an object with a `url` property and optionally `headers` and `credentials` properties.
* @example
* var map = new mapboxgl.Map({
* container: 'map',
* center: [-122.420679, 37.772537],
* zoom: 13,
* style: style_object,
* hash: true,
* transformRequest: (url, resourceType)=> {
* if(resourceType == 'Source' && url.startsWith('http://myHost') {
* return {
* url: url.replace('http', 'https'),
* headers: { 'my-custom-header': true},
* credentials: 'include' // Include cookies for cross-origin requests
* }
* }
* }
* });
* @see [Display a map](https://www.mapbox.com/mapbox-gl-js/examples/)
*/
class Map extends Camera {
style: Style;
painter: Painter;
animationLoop: AnimationLoop;
_classes: Array<string>;
_container: HTMLElement;
_missingCSSContainer: HTMLElement;
_canvasContainer: HTMLElement;
_controlContainer: HTMLElement;
_controlPositions: {[string]: HTMLElement};
_classOptions: ?{transition?: boolean};
_interactive: ?boolean;
_showTileBoundaries: ?boolean;
_showCollisionBoxes: ?boolean;
_showOverdrawInspector: boolean;
_repaint: ?boolean;
_vertices: ?boolean;
_canvas: HTMLCanvasElement;
_transformRequest: RequestTransformFunction;
_maxTileCacheSize: number;
_frameId: any;
_styleDirty: ?boolean;
_sourcesDirty: ?boolean;
_loaded: boolean;
_trackResize: boolean;
_preserveDrawingBuffer: boolean;
_failIfMajorPerformanceCaveat: boolean;
_refreshExpiredTiles: boolean;
_hash: Hash;
_delegatedListeners: any;
scrollZoom: ScrollZoomHandler;
boxZoom: BoxZoomHandler;
dragRotate: DragRotateHandler;
dragPan: DragPanHandler;
keyboard: KeyboardHandler;
doubleClickZoom: DoubleClickZoomHandler;
touchZoomRotate: TouchZoomRotateHandler;
constructor(options: MapOptions) {
options = util.extend({}, defaultOptions, options);
if (options.minZoom != null && options.maxZoom != null && options.minZoom > options.maxZoom) {
throw new Error(`maxZoom must be greater than minZoom`);
}
const transform = new Transform(options.minZoom, options.maxZoom, options.renderWorldCopies);
super(transform, options);
this._interactive = options.interactive;
this._maxTileCacheSize = options.maxTileCacheSize;
this._failIfMajorPerformanceCaveat = options.failIfMajorPerformanceCaveat;
this._preserveDrawingBuffer = options.preserveDrawingBuffer;
this._trackResize = options.trackResize;
this._bearingSnap = options.bearingSnap;
this._refreshExpiredTiles = options.refreshExpiredTiles;
const transformRequestFn = options.transformRequest;
this._transformRequest = transformRequestFn ? (url, type) => transformRequestFn(url, type) || ({ url }) : (url) => ({ url });
if (typeof options.container === 'string') {
const container = window.document.getElementById(options.container);
if (!container) {
throw new Error(`Container '${options.container}' not found.`);
} else {
this._container = container;
}
} else {
this._container = options.container;
}
this.animationLoop = new AnimationLoop();
if (options.maxBounds) {
this.setMaxBounds(options.maxBounds);
}
util.bindAll([
'_onWindowOnline',
'_onWindowResize',
'_contextLost',
'_contextRestored',
'_update',
'_render',
'_onData',
'_onDataLoading'
], this);
this._setupContainer();
this._setupPainter();
this.on('move', this._update.bind(this, false));
this.on('zoom', this._update.bind(this, true));
this.on('moveend', () => {
this.animationLoop.set(300); // text fading
this._rerender();
});
if (typeof window !== 'undefined') {
window.addEventListener('online', this._onWindowOnline, false);
window.addEventListener('resize', this._onWindowResize, false);
}
bindHandlers(this, options);
this._hash = options.hash && (new Hash()).addTo(this);
// don't set position from options if set through hash
if (!this._hash || !this._hash._onHashChange()) {
this.jumpTo({
center: options.center,
zoom: options.zoom,
bearing: options.bearing,
pitch: options.pitch
});
}
this.resize();
if (options.style) this.setStyle(options.style, { localIdeographFontFamily: options.localIdeographFontFamily });
if (options.attributionControl) this.addControl(new AttributionControl());
this.addControl(new LogoControl(), options.logoPosition);
this.on('style.load', function() {
if (this.transform.unmodified) {
this.jumpTo(this.style.stylesheet);
}
this.style.update({transition: false});
});
this.on('data', this._onData);
this.on('dataloading', this._onDataLoading);
}
/**
* Adds a {@link IControl} to the map, calling `control.onAdd(this)`.
*
* @param {IControl} control The {@link IControl} to add.
* @param {string} [position] position on the map to which the control will be added.
* Valid values are `'top-left'`, `'top-right'`, `'bottom-left'`, and `'bottom-right'`. Defaults to `'top-right'`.
* @returns {Map} `this`
* @see [Display map navigation controls](https://www.mapbox.com/mapbox-gl-js/example/navigation/)
*/
addControl(control: IControl, position?: ControlPosition) {
if (position === undefined && control.getDefaultPosition) {
position = control.getDefaultPosition();
}
if (position === undefined) {
position = 'top-right';
}
const controlElement = control.onAdd(this);
const positionContainer = this._controlPositions[position];
if (position.indexOf('bottom') !== -1) {
positionContainer.insertBefore(controlElement, positionContainer.firstChild);
} else {
positionContainer.appendChild(controlElement);
}
return this;
}
/**
* Removes the control from the map.
*
* @param {IControl} control The {@link IControl} to remove.
* @returns {Map} `this`
*/
removeControl(control: IControl) {
control.onRemove(this);
return this;
}
/**
* Resizes the map according to the dimensions of its
* `container` element.
*
* This method must be called after the map's `container` is resized by another script,
* or when the map is shown after being initially hidden with CSS.
*
* @returns {Map} `this`
*/
resize() {
const dimensions = this._containerDimensions();
const width = dimensions[0];
const height = dimensions[1];
this._resizeCanvas(width, height);
this.transform.resize(width, height);
this.painter.resize(width, height);
return this
.fire('movestart')
.fire('move')
.fire('resize')
.fire('moveend');
}
/**
* Returns the map's geographical bounds.
*
* @returns {LngLatBounds} The map's geographical bounds.
*/
getBounds() {
const bounds = new LngLatBounds(
this.transform.pointLocation(new Point(0, this.transform.height)),
this.transform.pointLocation(new Point(this.transform.width, 0)));
if (this.transform.angle || this.transform.pitch) {
bounds.extend(this.transform.pointLocation(new Point(this.transform.size.x, 0)));
bounds.extend(this.transform.pointLocation(new Point(0, this.transform.size.y)));
}
return bounds;
}
/**
* Gets the map's geographical bounds.
*
* Returns the LngLatBounds by which pan and zoom operations on the map are constrained.
*
* @returns {LngLatBounds | null} The maximum bounds the map is constrained to, or `null` if none set.
*/
getMaxBounds () {
if (this.transform.latRange && this.transform.latRange.length === 2 &&
this.transform.lngRange && this.transform.lngRange.length === 2) {
return new LngLatBounds([this.transform.lngRange[0], this.transform.latRange[0]],
[this.transform.lngRange[1], this.transform.latRange[1]]);
} else {
return null;
}
}
/**
* Sets or clears the map's geographical bounds.
*
* Pan and zoom operations are constrained within these bounds.
* If a pan or zoom is performed that would
* display regions outside these bounds, the map will
* instead display a position and zoom level
* as close as possible to the operation's request while still
* remaining within the bounds.
*
* @param {LngLatBoundsLike | null | undefined} lnglatbounds The maximum bounds to set. If `null` or `undefined` is provided, the function removes the map's maximum bounds.
* @returns {Map} `this`
*/
setMaxBounds(lnglatbounds: LngLatBoundsLike) {
if (lnglatbounds) {
const b = LngLatBounds.convert(lnglatbounds);
this.transform.lngRange = [b.getWest(), b.getEast()];
this.transform.latRange = [b.getSouth(), b.getNorth()];
this.transform._constrain();
this._update();
} else if (lnglatbounds === null || lnglatbounds === undefined) {
this.transform.lngRange = null;
this.transform.latRange = null;
this._update();
}
return this;
}
/**
* Sets or clears the map's minimum zoom level.
* If the map's current zoom level is lower than the new minimum,
* the map will zoom to the new minimum.
*
* @param {number | null | undefined} minZoom The minimum zoom level to set (0-20).
* If `null` or `undefined` is provided, the function removes the current minimum zoom (i.e. sets it to 0).
* @returns {Map} `this`
*/
setMinZoom(minZoom?: ?number) {
minZoom = minZoom === null || minZoom === undefined ? defaultMinZoom : minZoom;
if (minZoom >= defaultMinZoom && minZoom <= this.transform.maxZoom) {
this.transform.minZoom = minZoom;
this._update();
if (this.getZoom() < minZoom) this.setZoom(minZoom);
return this;
} else throw new Error(`minZoom must be between ${defaultMinZoom} and the current maxZoom, inclusive`);
}
/**
* Returns the map's minimum allowable zoom level.
*
* @returns {number} minZoom
*/
getMinZoom() { return this.transform.minZoom; }
/**
* Sets or clears the map's maximum zoom level.
* If the map's current zoom level is higher than the new maximum,
* the map will zoom to the new maximum.
*
* @param {number | null | undefined} maxZoom The maximum zoom level to set.
* If `null` or `undefined` is provided, the function removes the current maximum zoom (sets it to 20).
* @returns {Map} `this`
*/
setMaxZoom(maxZoom?: ?number) {
maxZoom = maxZoom === null || maxZoom === undefined ? defaultMaxZoom : maxZoom;
if (maxZoom >= this.transform.minZoom) {
this.transform.maxZoom = maxZoom;
this._update();
if (this.getZoom() > maxZoom) this.setZoom(maxZoom);
return this;
} else throw new Error(`maxZoom must be greater than the current minZoom`);
}
/**
* Returns the map's maximum allowable zoom level.
*
* @returns {number} maxZoom
*/
getMaxZoom() { return this.transform.maxZoom; }
/**
* Returns a {@link Point} representing pixel coordinates, relative to the map's `container`,
* that correspond to the specified geographical location.
*
* @param {LngLatLike} lnglat The geographical location to project.
* @returns {Point} The {@link Point} corresponding to `lnglat`, relative to the map's `container`.
*/
project(lnglat: LngLatLike) {
return this.transform.locationPoint(LngLat.convert(lnglat));
}
/**
* Returns a {@link LngLat} representing geographical coordinates that correspond
* to the specified pixel coordinates.
*
* @param {PointLike} point The pixel coordinates to unproject.
* @returns {LngLat} The {@link LngLat} corresponding to `point`.
* @see [Show polygon information on click](https://www.mapbox.com/mapbox-gl-js/example/polygon-popup-on-click/)
*/
unproject(point: PointLike) {
return this.transform.pointLocation(Point.convert(point));
}
/**
* Adds a listener for events of a specified type.
*
* @method
* @name on
* @memberof Map
* @instance
* @param {string} type The event type to add a listen for.
* @param {Function} listener The function to be called when the event is fired.
* The listener function is called with the data object passed to `fire`,
* extended with `target` and `type` properties.
* @returns {Map} `this`
*/
/**
* Adds a listener for events of a specified type occurring on features in a specified style layer.
*
* @param {string} type The event type to listen for; one of `'mousedown'`, `'mouseup'`, `'click'`, `'dblclick'`,
* `'mousemove'`, `'mouseenter'`, `'mouseleave'`, `'mouseover'`, `'mouseout'`, `'contextmenu'`, `'touchstart'`,
* `'touchend'`, or `'touchcancel'`. `mouseenter` and `mouseover` events are triggered when the cursor enters
* a visible portion of the specified layer from outside that layer or outside the map canvas. `mouseleave`
* and `mouseout` events are triggered when the cursor leaves a visible portion of the specified layer, or leaves
* the map canvas.
* @param {string} layer The ID of a style layer. Only events whose location is within a visible
* feature in this layer will trigger the listener. The event will have a `features` property containing
* an array of the matching features.
* @param {Function} listener The function to be called when the event is fired.
* @returns {Map} `this`
*/
on(type: MapEvent, layer: any, listener: any) {
if (listener === undefined) {
return super.on(type, layer);
}
const delegatedListener = (() => {
if (type === 'mouseenter' || type === 'mouseover') {
let mousein = false;
const mousemove = (e) => {
const features = this.getLayer(layer) ? this.queryRenderedFeatures(e.point, {layers: [layer]}) : [];
if (!features.length) {
mousein = false;
} else if (!mousein) {
mousein = true;
listener.call(this, util.extend({features}, e, {type}));
}
};
const mouseout = () => {
mousein = false;
};
return {layer, listener, delegates: {mousemove, mouseout}};
} else if (type === 'mouseleave' || type === 'mouseout') {
let mousein = false;
const mousemove = (e) => {
const features = this.getLayer(layer) ? this.queryRenderedFeatures(e.point, {layers: [layer]}) : [];
if (features.length) {
mousein = true;
} else if (mousein) {
mousein = false;
listener.call(this, util.extend({}, e, {type}));
}
};
const mouseout = (e) => {
if (mousein) {
mousein = false;
listener.call(this, util.extend({}, e, {type}));
}
};
return {layer, listener, delegates: {mousemove, mouseout}};
} else {
const delegate = (e) => {
const features = this.getLayer(layer) ? this.queryRenderedFeatures(e.point, {layers: [layer]}) : [];
if (features.length) {
listener.call(this, util.extend({features}, e));
}
};
return {layer, listener, delegates: {[type]: delegate}};
}
})();
this._delegatedListeners = this._delegatedListeners || {};
this._delegatedListeners[type] = this._delegatedListeners[type] || [];
this._delegatedListeners[type].push(delegatedListener);
for (const event in delegatedListener.delegates) {
this.on((event: any), delegatedListener.delegates[event]);
}
return this;
}
/**
* Removes an event listener previously added with `Map#on`.
*
* @method
* @name off
* @memberof Map
* @instance
* @param {string} type The event type previously used to install the listener.
* @param {Function} listener The function previously installed as a listener.
* @returns {Map} `this`
*/
/**
* Removes an event listener for layer-specific events previously added with `Map#on`.
*
* @param {string} type The event type previously used to install the listener.
* @param {string} layer The layer ID previously used to install the listener.
* @param {Function} listener The function previously installed as a listener.
* @returns {Map} `this`
*/
off(type: MapEvent, layer: any, listener: any) {
if (listener === undefined) {
return super.off(type, layer);
}
if (this._delegatedListeners && this._delegatedListeners[type]) {
const listeners = this._delegatedListeners[type];
for (let i = 0; i < listeners.length; i++) {
const delegatedListener = listeners[i];
if (delegatedListener.layer === layer && delegatedListener.listener === listener) {
for (const event in delegatedListener.delegates) {
this.off((event: any), delegatedListener.delegates[event]);
}
listeners.splice(i, 1);
return this;
}
}
}
return this;
}
/**
* Returns an array of [GeoJSON](http://geojson.org/)
* [Feature objects](http://geojson.org/geojson-spec.html#feature-objects)
* representing visible features that satisfy the query parameters.
*
* @param {PointLike|Array<PointLike>} [geometry] - The geometry of the query region:
* either a single point or southwest and northeast points describing a bounding box.
* Omitting this parameter (i.e. calling {@link Map#queryRenderedFeatures} with zero arguments,
* or with only a `parameters` argument) is equivalent to passing a bounding box encompassing the entire
* map viewport.
* @param {Object} [parameters]
* @param {Array<string>} [parameters.layers] An array of style layer IDs for the query to inspect.
* Only features within these layers will be returned. If this parameter is undefined, all layers will be checked.
* @param {Array} [parameters.filter] A [filter](https://www.mapbox.com/mapbox-gl-style-spec/#types-filter)
* to limit query results.
*
* @returns {Array<Object>} An array of [GeoJSON](http://geojson.org/)
* [feature objects](http://geojson.org/geojson-spec.html#feature-objects).
*
* The `properties` value of each returned feature object contains the properties of its source feature. For GeoJSON sources, only
* string and numeric property values are supported (i.e. `null`, `Array`, and `Object` values are not supported).
*
* Each feature includes a top-level `layer` property whose value is an object representing the style layer to
* which the feature belongs. Layout and paint properties in this object contain values which are fully evaluated
* for the given zoom level and feature.
*
* Features from layers whose `visibility` property is `"none"`, or from layers whose zoom range excludes the
* current zoom level are not included. Symbol features that have been hidden due to text or icon collision are
* not included. Features from all other layers are included, including features that may have no visible
* contribution to the rendered result; for example, because the layer's opacity or color alpha component is set to
* 0.
*
* The topmost rendered feature appears first in the returned array, and subsequent features are sorted by
* descending z-order. Features that are rendered multiple times (due to wrapping across the antimeridian at low
* zoom levels) are returned only once (though subject to the following caveat).
*
* Because features come from tiled vector data or GeoJSON data that is converted to tiles internally, feature
* geometries may be split or duplicated across tile boundaries and, as a result, features may appear multiple
* times in query results. For example, suppose there is a highway running through the bounding rectangle of a query.
* The results of the query will be those parts of the highway that lie within the map tiles covering the bounding
* rectangle, even if the highway extends into other tiles, and the portion of the highway within each map tile
* will be returned as a separate feature. Similarly, a point feature near a tile boundary may appear in multiple
* tiles due to tile buffering.
*
* @example
* // Find all features at a point
* var features = map.queryRenderedFeatures(
* [20, 35],
* { layers: ['my-layer-name'] }
* );
*
* @example
* // Find all features within a static bounding box
* var features = map.queryRenderedFeatures(
* [[10, 20], [30, 50]],
* { layers: ['my-layer-name'] }
* );
*
* @example
* // Find all features within a bounding box around a point
* var width = 10;
* var height = 20;
* var features = map.queryRenderedFeatures([
* [point.x - width / 2, point.y - height / 2],
* [point.x + width / 2, point.y + height / 2]
* ], { layers: ['my-layer-name'] });
*
* @example
* // Query all rendered features from a single layer
* var features = map.queryRenderedFeatures({ layers: ['my-layer-name'] });
* @see [Get features under the mouse pointer](https://www.mapbox.com/mapbox-gl-js/example/queryrenderedfeatures/)
* @see [Highlight features within a bounding box](https://www.mapbox.com/mapbox-gl-js/example/using-box-queryrenderedfeatures/)
* @see [Center the map on a clicked symbol](https://www.mapbox.com/mapbox-gl-js/example/center-on-symbol/)
*/
queryRenderedFeatures(...args: [PointLike | [PointLike, PointLike], Object] | [PointLike | [PointLike, PointLike]] | [Object]) {
let params = {};
let geometry;
if (args.length === 2) {
geometry = arguments[0];
params = arguments[1];
} else if (args.length === 1 && isPointLike(args[0])) {
geometry = args[0];
} else if (args.length === 1) {
params = args[0];
}
if (!this.style) {
return [];
}
return this.style.queryRenderedFeatures(
this._makeQueryGeometry(geometry),
params,
this.transform.zoom,
this.transform.angle
);
function isPointLike(input) {
return input instanceof Point || Array.isArray(input);
}
}
_makeQueryGeometry(pointOrBox?: PointLike | [PointLike, PointLike]) {
if (pointOrBox === undefined) {
// bounds was omitted: use full viewport
pointOrBox = [
Point.convert([0, 0]),
Point.convert([this.transform.width, this.transform.height])
];
}
let queryGeometry;
if (pointOrBox instanceof Point || typeof pointOrBox[0] === 'number') {
const point = Point.convert(pointOrBox);
queryGeometry = [point];
} else {
const box = [Point.convert(pointOrBox[0]), Point.convert(pointOrBox[1])];
queryGeometry = [
box[0],
new Point(box[1].x, box[0].y),
box[1],
new Point(box[0].x, box[1].y),
box[0]
];
}
queryGeometry = queryGeometry.map((p) => {
return this.transform.pointCoordinate(p);
});
return queryGeometry;
}
/**
* Returns an array of [GeoJSON](http://geojson.org/)
* [Feature objects](http://geojson.org/geojson-spec.html#feature-objects)
* representing features within the specified vector tile or GeoJSON source that satisfy the query parameters.
*
* @param {string} sourceID The ID of the vector tile or GeoJSON source to query.
* @param {Object} [parameters]
* @param {string} [parameters.sourceLayer] The name of the vector tile layer to query. *For vector tile
* sources, this parameter is required.* For GeoJSON sources, it is ignored.
* @param {Array} [parameters.filter] A [filter](https://www.mapbox.com/mapbox-gl-style-spec/#types-filter)
* to limit query results.
*
* @returns {Array<Object>} An array of [GeoJSON](http://geojson.org/)
* [Feature objects](http://geojson.org/geojson-spec.html#feature-objects).
*
* In contrast to {@link Map#queryRenderedFeatures}, this function
* returns all features matching the query parameters,
* whether or not they are rendered by the current style (i.e. visible). The domain of the query includes all currently-loaded
* vector tiles and GeoJSON source tiles: this function does not check tiles outside the currently
* visible viewport.
*
* Because features come from tiled vector data or GeoJSON data that is converted to tiles internally, feature
* geometries may be split or duplicated across tile boundaries and, as a result, features may appear multiple
* times in query results. For example, suppose there is a highway running through the bounding rectangle of a query.
* The results of the query will be those parts of the highway that lie within the map tiles covering the bounding
* rectangle, even if the highway extends into other tiles, and the portion of the highway within each map tile
* will be returned as a separate feature. Similarly, a point feature near a tile boundary may appear in multiple
* tiles due to tile buffering.
* @see [Filter features within map view](https://www.mapbox.com/mapbox-gl-js/example/filter-features-within-map-view/)
* @see [Highlight features containing similar data](https://www.mapbox.com/mapbox-gl-js/example/query-similar-features/)
*/
querySourceFeatures(sourceID: string, parameters: ?{sourceLayer: ?string, filter: ?Array<any>}) {
return this.style.querySourceFeatures(sourceID, parameters);
}
/**
* Updates the map's Mapbox style object with a new value. If the given
* value is style JSON object, compares it against the the map's current
* state and perform only the changes necessary to make the map style match
* the desired state.
*
* @param style A JSON object conforming to the schema described in the
* [Mapbox Style Specification](https://mapbox.com/mapbox-gl-style-spec/), or a URL to such JSON.
* @param {Object} [options]
* @param {boolean} [options.diff=true] If false, force a 'full' update, removing the current style
* and adding building the given one instead of attempting a diff-based update.
* @param {string} [options.localIdeographFontFamily=null] If non-null, defines a css font-family
* for locally overriding generation of glyphs in the 'CJK Unified Ideographs' and 'Hangul Syllables'
* ranges. Forces a full update.
* @returns {Map} `this`
* @see [Change a map's style](https://www.mapbox.com/mapbox-gl-js/example/setstyle/)
*/
setStyle(style: StyleSpecification | string | null, options?: {diff?: boolean} & StyleOptions) {
const shouldTryDiff = (!options || (options.diff !== false && !options.localIdeographFontFamily)) && this.style;
if (shouldTryDiff && style && typeof style === 'object') {
try {
if (this.style.setState(style)) {
this._update(true);
}
return this;
} catch (e) {
util.warnOnce(`Unable to perform style diff: ${e.message || e.error || e}. Rebuilding the style from scratch.`);
}
}
if (this.style) {
this.style.setEventedParent(null);
this.style._remove();
this.off('rotate', this.style._redoPlacement);
this.off('pitch', this.style._redoPlacement);
this.off('move', this.style._redoPlacement);
}
if (!style) {
delete this.style;
return this;
} else {
this.style = new Style(this, options || {});
}
this.style.setEventedParent(this, {style: this.style});
if (typeof style === 'string') {
this.style.loadURL(style);
} else {
this.style.loadJSON(style);
}
this.on('rotate', this.style._redoPlacement);
this.on('pitch', this.style._redoPlacement);
this.on('move', this.style._redoPlacement);
return this;
}
/**
* Returns the map's Mapbox style object, which can be used to recreate the map's style.
*
* @returns {Object} The map's style object.
*/
getStyle() {
if (this.style) {
return this.style.serialize();
}
}
/**
* Returns a Boolean indicating whether the map's style is fully loaded.
*
* @returns {boolean} A Boolean indicating whether the style is fully loaded.
*/
isStyleLoaded() {
if (!this.style) return util.warnOnce('There is no style added to the map.');
return this.style.loaded();
}
/**
* Adds a source to the map's style.
*
* @param {string} id The ID of the source to add. Must not conflict with existing sources.
* @param {Object} source The source object, conforming to the
* Mapbox Style Specification's [source definition](https://www.mapbox.com/mapbox-gl-style-spec/#sources).
* @param {string} source.type The source type, which must be either one of the core Mapbox GL source types defined in the style specification or a custom type that has been added to the map with {@link Map#addSourceType}.
* @fires source.add
* @returns {Map} `this`
* @see [Draw GeoJSON points](https://www.mapbox.com/mapbox-gl-js/example/geojson-markers/)
* @see [Style circles using data-driven styling](https://www.mapbox.com/mapbox-gl-js/example/data-driven-circle-colors/)
* @see [Set a point after Geocoder result](https://www.mapbox.com/mapbox-gl-js/example/point-from-geocoder-result/)
*/
addSource(id: string, source: SourceSpecification) {
this.style.addSource(id, source);
this._update(true);
return this;
}
/**
* Returns a Boolean indicating whether the source is loaded.
*
* @param {string} id The ID of the source to be checked.
* @returns {boolean} A Boolean indicating whether the source is loaded.
*/
isSourceLoaded(id: string) {
const source = this.style && this.style.sourceCaches[id];
if (source === undefined) {
this.fire('error', {
error: new Error(`There is no source with ID '${id}'`)
});
return;
}
return source.loaded();
}
/**
* Returns a Boolean indicating whether all tiles in the viewport from all sources on
* the style are loaded.
*
* @returns {boolean} A Boolean indicating whether all tiles are loaded.
*/
areTilesLoaded() {
const sources = this.style && this.style.sourceCaches;
for (const id in sources) {
const source = sources[id];
const tiles = source._tiles;
for (const t in tiles) {
const tile = tiles[t];
if (!(tile.state === 'loaded' || tile.state === 'errored')) return false;
}
}
return true;
}
/**
* Adds a [custom source type](#Custom Sources), making it available for use with
* {@link Map#addSource}.
* @private
* @param {string} name The name of the source type; source definition objects use this name in the `{type: ...}` field.
* @param {Function} SourceType A {@link Source} constructor.
* @param {Function} callback Called when the source type is ready or with an error argument if there is an error.
*/
addSourceType(name: string, SourceType: any, callback: Function) {
return this.style.addSourceType(name, SourceType, callback);
}
/**
* Removes a source from the map's style.
*
* @param {string} id The ID of the source to remove.
* @returns {Map} `this`
*/
removeSource(id: string) {
this.style.removeSource(id);
this._update(true);
return this;
}
/**
* Returns the source with the specified ID in the map's style.
*
* @param {string} id The ID of the source to get.
* @returns {?Object} The style source with the specified ID, or `undefined`
* if the ID corresponds to no existing sources.
* @see [Create a draggable point](https://www.mapbox.com/mapbox-gl-js/example/drag-a-point/)
* @see [Animate a point](https://www.mapbox.com/mapbox-gl-js/example/animate-point-along-line/)
* @see [Add live realtime data](https://www.mapbox.com/mapbox-gl-js/example/live-geojson/)
*/
getSource(id: string) {
return this.style.getSource(id);
}
/**
* Add an image to the style. This image can be used in `icon-image`,
* `background-pattern`, `fill-pattern`, and `line-pattern`. An
* {@link Map#error} event will be fired if there is not enough space in the
* sprite to add this image.
*
* @see [Add an icon to the map](https://www.mapbox.com/mapbox-gl-js/example/add-image/)
* @see [Add a generated icon to the map](https://www.mapbox.com/mapbox-gl-js/example/add-image-generated/)
* @param id The ID of the image.
* @param data The image as an `HTMLImageElement`, `ImageData`, or object with `width`, `height`, and `data`
* properties with the same format as `ImageData`.
* @param options
* @param options.pixelRatio The ratio of pixels in the image to physical pixels on the screen
* @param options.sdf Whether the image should be interpreted as an SDF image
*/
addImage(id: string, data: HTMLImageElement | ImageData | {width: number, height: number, data: Uint8Array | Uint8ClampedArray},
{pixelRatio = 1, sdf = false}: {pixelRatio?: number, sdf?: boolean} = {}) {
if (data instanceof HTMLImageElement) {
data = browser.getImageData(data);
} else if (data.width === undefined || data.height === undefined) {
return this.fire('error', {error: new Error(
'Invalid arguments to map.addImage(). The second argument must be an `HTMLImageElement`, `ImageData`, ' +
'or object with `width`, `height`, and `data` properties with the same format as `ImageData`')});
}
this.style.addImage(id, { data: ((data: any): RGBAImage), pixelRatio, sdf });
}
/**
* Remove an image from the style (such as one used by `icon-image` or `background-pattern`).
*
* @param id The ID of the image.
*/
removeImage(id: string) {
this.style.removeImage(id);
}
/**
* Load an image from an external URL for use with `Map#addImage`. External
* domains must support [CORS](https://developer.mozilla.org/en-US/docs/Web/HTTP/Access_control_CORS).
*
* @param {string} url The URL of the image file. Image file must be in png, webp, or jpg format.
* @param {Function} callback Expecting `callback(error, data)`. Called when the image has loaded or with an error argument if there is an error.
* @see [Add an icon to the map](https://www.mapbox.com/mapbox-gl-js/example/add-image/)
*/
loadImage(url: string, callback: Function) {
ajax.getImage(this._transformRequest(url, ajax.ResourceType.Image), callback);
}
/**
* Adds a [Mapbox style layer](https://www.mapbox.com/mapbox-gl-style-spec/#layers)
* to the map's style.
*
* A layer defines styling for data from a specified source.
*
* @param {Object} layer The style layer to add, conforming to the Mapbox Style Specification's
* [layer definition](https://www.mapbox.com/mapbox-gl-style-spec/#layers).
* @param {string} [before] The ID of an existing layer to insert the new layer before.
* If this argument is omitted, the layer will be appended to the end of the layers array.
* @returns {Map} `this`
* @see [Create and style clusters](https://www.mapbox.com/mapbox-gl-js/example/cluster/)
* @see [Add a vector tile source](https://www.mapbox.com/mapbox-gl-js/example/vector-source/)
* @see [Add a WMS source](https://www.mapbox.com/mapbox-gl-js/example/wms/)
*/
addLayer(layer: LayerSpecification, before?: string) {
this.style.addLayer(layer, before);
this._update(true);
return this;
}
/**
* Moves a layer to a different z-position.
*
* @param {string} id The ID of the layer to move.
* @param {string} [beforeId] The ID of an existing layer to insert the new layer before.
* If this argument is omitted, the layer will be appended to the end of the layers array.
* @returns {Map} `this`
*/
moveLayer(id: string, beforeId?: string) {
this.style.moveLayer(id, beforeId);
this._update(true);
return this;
}
/**
* Removes the layer with the given id from the map's style.
*
* If no such layer exists, an `error` event is fired.
*
* @param {string} id id of the layer to remove
* @fires error
*/
removeLayer(id: string) {
this.style.removeLayer(id);
this._update(true);
return this;
}
/**
* Returns the layer with the specified ID in the map's style.
*
* @param {string} id The ID of the layer to get.
* @returns {?Object} The layer with the specified ID, or `undefined`
* if the ID corresponds to no existing layers.
* @see [Filter symbols by toggling a list](https://www.mapbox.com/mapbox-gl-js/example/filter-markers/)
* @see [Filter symbols by text input](https://www.mapbox.com/mapbox-gl-js/example/filter-markers-by-input/)
*/
getLayer(id: string) {
return this.style.getLayer(id);
}
/**
* Sets the filter for the specified style layer.
*
* @param {string} layer The ID of the layer to which the filter will be applied.
* @param {Array | null | undefined} filter The filter, conforming to the Mapbox Style Specification's
* [filter definition](https://www.mapbox.com/mapbox-gl-style-spec/#types-filter). If `null` or `undefined` is provided, the function removes any existing filter from the layer.
* @returns {Map} `this`
* @example
* map.setFilter('my-layer', ['==', 'name', 'USA']);
* @see [Filter features within map view](https://www.mapbox.com/mapbox-gl-js/example/filter-features-within-map-view/)
* @see [Highlight features containing similar data](https://www.mapbox.com/mapbox-gl-js/example/query-similar-features/)
* @see [Create a timeline animation](https://www.mapbox.com/mapbox-gl-js/example/timeline-animation/)
*/
setFilter(layer: string, filter: FilterSpecification) {
this.style.setFilter(layer, filter);
this._update(true);
return this;
}
/**
* Sets the zoom extent for the specified style layer.
*
* @param {string} layerId The ID of the layer to which the zoom extent will be applied.
* @param {number} minzoom The minimum zoom to set (0-20).
* @param {number} maxzoom The maximum zoom to set (0-20).
* @returns {Map} `this`
* @example
* map.setLayerZoomRange('my-layer', 2, 5);
*/
setLayerZoomRange(layerId: string, minzoom: number, maxzoom: number) {
this.style.setLayerZoomRange(layerId, minzoom, maxzoom);
this._update(true);
return this;
}
/**
* Returns the filter applied to the specified style layer.
*
* @param {string} layer The ID of the style layer whose filter to get.
* @returns {Array} The layer's filter.
*/
getFilter(layer: string) {
return this.style.getFilter(layer);
}
/**
* Sets the value of a paint property in the specified style layer.
*
* @param {string} layer The ID of the layer to set the paint property in.
* @param {string} name The name of the paint property to set.
* @param {*} value The value of the paint propery to set.
* Must be of a type appropriate for the property, as defined in the [Mapbox Style Specification](https://www.mapbox.com/mapbox-gl-style-spec/).
* @returns {Map} `this`
* @example
* map.setPaintProperty('my-layer', 'fill-color', '#faafee');
* @see [Change a layer's color with buttons](https://www.mapbox.com/mapbox-gl-js/example/color-switcher/)
* @see [Adjust a layer's opacity](https://www.mapbox.com/mapbox-gl-js/example/adjust-layer-opacity/)
* @see [Create a draggable point](https://www.mapbox.com/mapbox-gl-js/example/drag-a-point/)
*/
setPaintProperty(layer: string, name: string, value: any) {
this.style.setPaintProperty(layer, name, value);
this._update(true);
return this;
}
/**
* Returns the value of a paint property in the specified style layer.
*
* @param {string} layer The ID of the layer to get the paint property from.
* @param {string} name The name of a paint property to get.
* @returns {*} The value of the specified paint property.
*/
getPaintProperty(layer: string, name: string) {
return this.style.getPaintProperty(layer, name);
}
/**
* Sets the value of a layout property in the specified style layer.
*
* @param {string} layer The ID of the layer to set the layout property in.
* @param {string} name The name of the layout property to set.
* @param {*} value The value of the layout propery. Must be of a type appropriate for the property, as defined in the [Mapbox Style Specification](https://www.mapbox.com/mapbox-gl-style-spec/).
* @returns {Map} `this`
* @example
* map.setLayoutProperty('my-layer', 'visibility', 'none');
*/
setLayoutProperty(layer: string, name: string, value: any) {
this.style.setLayoutProperty(layer, name, value);
this._update(true);
return this;
}
/**
* Returns the value of a layout property in the specified style layer.
*
* @param {string} layer The ID of the layer to get the layout property from.
* @param {string} name The name of the layout property to get.
* @returns {*} The value of the specified layout property.
*/
getLayoutProperty(layer: string, name: string) {
return this.style.getLayoutProperty(layer, name);
}
/**
* Sets the any combination of light values.
*
* @param light Light properties to set. Must conform to the [Mapbox Style Specification](https://www.mapbox.com/mapbox-gl-style-spec/).
* @returns {Map} `this`
*/
setLight(light: LightSpecification) {
this.style.setLight(light);
this._update(true);
return this;
}
/**
* Returns the value of the light object.
*
* @returns {Object} light Light properties of the style.
*/
getLight() {
return this.style.getLight();
}
/**
* Returns the map's containing HTML element.
*
* @returns {HTMLElement} The map's container.
*/
getContainer() {
return this._container;
}
/**
* Returns the HTML element containing the map's `<canvas>` element.
*
* If you want to add non-GL overlays to the map, you should append them to this element.
*
* This is the element to which event bindings for map interactivity (such as panning and zooming) are
* attached. It will receive bubbled events from child elements such as the `<canvas>`, but not from
* map controls.
*
* @returns {HTMLElement} The container of the map's `<canvas>`.
* @see [Create a draggable point](https://www.mapbox.com/mapbox-gl-js/example/drag-a-point/)
* @see [Highlight features within a bounding box](https://www.mapbox.com/mapbox-gl-js/example/using-box-queryrenderedfeatures/)
*/
getCanvasContainer() {
return this._canvasContainer;
}
/**
* Returns the map's `<canvas>` element.
*
* @returns {HTMLCanvasElement} The map's `<canvas>` element.
* @see [Measure distances](https://www.mapbox.com/mapbox-gl-js/example/measure/)
* @see [Display a popup on hover](https://www.mapbox.com/mapbox-gl-js/example/popup-on-hover/)
* @see [Center the map on a clicked symbol](https://www.mapbox.com/mapbox-gl-js/example/center-on-symbol/)
*/
getCanvas() {
return this._canvas;
}
_containerDimensions() {
let width = 0;
let height = 0;
if (this._container) {
width = this._container.offsetWidth || 400;
height = this._container.offsetHeight || 300;
}
return [width, height];
}
_setupContainer() {
const container = this._container;
container.classList.add('mapboxgl-map');
const missingCSSContainer = this._missingCSSContainer = DOM.create('div', 'mapboxgl-missing-css', container);
missingCSSContainer.innerHTML = 'Missing Mapbox GL JS CSS';
const canvasContainer = this._canvasContainer = DOM.create('div', 'mapboxgl-canvas-container', container);
if (this._interactive) {
canvasContainer.classList.add('mapboxgl-interactive');
}
this._canvas = DOM.create('canvas', 'mapboxgl-canvas', canvasContainer);
this._canvas.style.position = 'absolute';
this._canvas.addEventListener('webglcontextlost', this._contextLost, false);
this._canvas.addEventListener('webglcontextrestored', this._contextRestored, false);
this._canvas.setAttribute('tabindex', '0');
this._canvas.setAttribute('aria-label', 'Map');
const dimensions = this._containerDimensions();
this._resizeCanvas(dimensions[0], dimensions[1]);
const controlContainer = this._controlContainer = DOM.create('div', 'mapboxgl-control-container', container);
const positions = this._controlPositions = {};
['top-left', 'top-right', 'bottom-left', 'bottom-right'].forEach((positionName) => {
positions[positionName] = DOM.create('div', `mapboxgl-ctrl-${positionName}`, controlContainer);
});
}
_resizeCanvas(width: number, height: number) {
const pixelRatio = window.devicePixelRatio || 1;
// Request the required canvas size taking the pixelratio into account.
this._canvas.width = pixelRatio * width;
this._canvas.height = pixelRatio * height;
// Maintain the same canvas size, potentially downscaling it for HiDPI displays
this._canvas.style.width = `${width}px`;
this._canvas.style.height = `${height}px`;
}
_setupPainter() {
const attributes = util.extend({
failIfMajorPerformanceCaveat: this._failIfMajorPerformanceCaveat,
preserveDrawingBuffer: this._preserveDrawingBuffer
}, isSupported.webGLContextAttributes);
const gl = this._canvas.getContext('webgl', attributes) ||
this._canvas.getContext('experimental-webgl', attributes);
if (!gl) {
this.fire('error', { error: new Error('Failed to initialize WebGL') });
return;
}
this.painter = new Painter(gl, this.transform);
}
_contextLost(event: Event) {
event.preventDefault();
if (this._frameId) {
browser.cancelFrame(this._frameId);
this._frameId = null;
}
this.fire('webglcontextlost', {originalEvent: event});
}
_contextRestored(event: Event) {
this._setupPainter();
this.resize();
this._update();
this.fire('webglcontextrestored', {originalEvent: event});
}
/**
* Returns a Boolean indicating whether the map is fully loaded.
*
* Returns `false` if the style is not yet fully loaded,
* or if there has been a change to the sources or style that
* has not yet fully loaded.
*
* @returns {boolean} A Boolean indicating whether the map is fully loaded.
*/
loaded() {
if (this._styleDirty || this._sourcesDirty)
return false;
if (!this.style || !this.style.loaded())
return false;
return true;
}
/**
* Update this map's style and sources, and re-render the map.
*
* @param {boolean} updateStyle mark the map's style for reprocessing as
* well as its sources
* @returns {Map} this
* @private
*/
_update(updateStyle?: boolean) {
if (!this.style) return this;
this._styleDirty = this._styleDirty || updateStyle;
this._sourcesDirty = true;
this._rerender();
return this;
}
/**
* Call when a (re-)render of the map is required:
* - The style has changed (`setPaintProperty()`, etc.)
* - Source data has changed (e.g. tiles have finished loading)
* - The map has is moving (or just finished moving)
* - A transition is in progress
*
* @returns {Map} this
* @private
*/
_render() {
// If the style has changed, the map is being zoomed, or a transition
// is in progress:
// - Apply style changes (in a batch)
// - Recalculate zoom-dependent paint properties.
if (this.style && this._styleDirty) {
this._styleDirty = false;
this.style.update();
this.style._recalculate(this.transform.zoom);
}
// If we are in _render for any reason other than an in-progress paint
// transition, update source caches to check for and load any tiles we
// need for the current transform
if (this.style && this._sourcesDirty) {
this._sourcesDirty = false;
this.style._updateSources(this.transform);
}
// Actually draw
this.painter.render(this.style, {
showTileBoundaries: this.showTileBoundaries,
showOverdrawInspector: this._showOverdrawInspector,
rotating: this.rotating,
zooming: this.zooming
});
this.fire('render');
if (this.loaded() && !this._loaded) {
this._loaded = true;
this.fire('load');
}
this._frameId = null;
// Flag an ongoing transition
if (!this.animationLoop.stopped()) {
this._styleDirty = true;
}
// Schedule another render frame if it's needed.
//
// Even though `_styleDirty` and `_sourcesDirty` are reset in this
// method, synchronous events fired during Style#update or
// Style#_updateSources could have caused them to be set again.
if (this._sourcesDirty || this._repaint || this._styleDirty) {
this._rerender();
}
return this;
}
/**
* Clean up and release all internal resources associated with this map.
*
* This includes DOM elements, event bindings, web workers, and WebGL resources.
*
* Use this method when you are done using the map and wish to ensure that it no
* longer consumes browser resources. Afterwards, you must not call any other
* methods on the map.
*/
remove() {
if (this._hash) this._hash.remove();
browser.cancelFrame(this._frameId);
this._frameId = null;
this.setStyle(null);
if (typeof window !== 'undefined') {
window.removeEventListener('resize', this._onWindowResize, false);
window.removeEventListener('online', this._onWindowOnline, false);
}
const extension = this.painter.gl.getExtension('WEBGL_lose_context');
if (extension) extension.loseContext();
removeNode(this._canvasContainer);
removeNode(this._controlContainer);
removeNode(this._missingCSSContainer);
this._container.classList.remove('mapboxgl-map');
this.fire('remove');
}
_rerender() {
if (this.style && !this._frameId) {
this._frameId = browser.frame(this._render);
}
}
_onWindowOnline() {
this._update();
}
_onWindowResize() {
if (this._trackResize) {
this.stop().resize()._update();
}
}
/**
* Gets and sets a Boolean indicating whether the map will render an outline
* around each tile. These tile boundaries are useful for debugging.
*
* @name showTileBoundaries
* @type {boolean}
* @instance
* @memberof Map
*/
get showTileBoundaries(): boolean { return !!this._showTileBoundaries; }
set showTileBoundaries(value: boolean) {
if (this._showTileBoundaries === value) return;
this._showTileBoundaries = value;
this._update();
}
/**
* Gets and sets a Boolean indicating whether the map will render boxes
* around all symbols in the data source, revealing which symbols
* were rendered or which were hidden due to collisions.
* This information is useful for debugging.
*
* @name showCollisionBoxes
* @type {boolean}
* @instance
* @memberof Map
*/
get showCollisionBoxes(): boolean { return !!this._showCollisionBoxes; }
set showCollisionBoxes(value: boolean) {
if (this._showCollisionBoxes === value) return;
this._showCollisionBoxes = value;
this.style._redoPlacement();
}
/*
* Gets and sets a Boolean indicating whether the map should color-code
* each fragment to show how many times it has been shaded.
* White fragments have been shaded 8 or more times.
* Black fragments have been shaded 0 times.
* This information is useful for debugging.
*
* @name showOverdraw
* @type {boolean}
* @instance
* @memberof Map
*/
get showOverdrawInspector(): boolean { return !!this._showOverdrawInspector; }
set showOverdrawInspector(value: boolean) {
if (this._showOverdrawInspector === value) return;
this._showOverdrawInspector = value;
this._update();
}
/**
* Gets and sets a Boolean indicating whether the map will
* continuously repaint. This information is useful for analyzing performance.
*
* @name repaint
* @type {boolean}
* @instance
* @memberof Map
*/
get repaint(): boolean { return !!this._repaint; }
set repaint(value: boolean) { this._repaint = value; this._update(); }
// show vertices
get vertices(): boolean { return !!this._vertices; }
set vertices(value: boolean) { this._vertices = value; this._update(); }
_onData(event: MapDataEvent) {
this._update(event.dataType === 'style');
this.fire(`${event.dataType}data`, event);
}
_onDataLoading(event: MapDataEvent) {
this.fire(`${event.dataType}dataloading`, event);
}
}
module.exports = Map;
function removeNode(node) {
if (node.parentNode) {
node.parentNode.removeChild(node);
}
}
/**
* Interface for interactive controls added to the map. This is an
* specification for implementers to model: it is not
* an exported method or class.
*
* Controls must implement `onAdd` and `onRemove`, and must own an
* element, which is often a `div` element. To use Mapbox GL JS's
* default control styling, add the `mapboxgl-ctrl` class to your control's
* node.
*
* @interface IControl
* @example
* // Control implemented as ES6 class
* class HelloWorldControl {
* onAdd(map) {
* this._map = map;
* this._container = document.createElement('div');
* this._container.className = 'mapboxgl-ctrl';
* this._container.textContent = 'Hello, world';
* return this._container;
* }
*
* onRemove() {
* this._container.parentNode.removeChild(this._container);
* this._map = undefined;
* }
* }
*
* // Control implemented as ES5 prototypical class
* function HelloWorldControl() { }
*
* HelloWorldControl.prototype.onAdd = function(map) {
* this._map = map;
* this._container = document.createElement('div');
* this._container.className = 'mapboxgl-ctrl';
* this._container.textContent = 'Hello, world';
* return this._container;
* };
*
* HelloWorldControl.prototype.onRemove = function () {
* this._container.parentNode.removeChild(this._container);
* this._map = undefined;
* };
*/
/**
* Register a control on the map and give it a chance to register event listeners
* and resources. This method is called by {@link Map#addControl}
* internally.
*
* @function
* @memberof IControl
* @instance
* @name onAdd
* @param {Map} map the Map this control will be added to
* @returns {HTMLElement} The control's container element. This should
* be created by the control and returned by onAdd without being attached
* to the DOM: the map will insert the control's element into the DOM
* as necessary.
*/
/**
* Unregister a control on the map and give it a chance to detach event listeners
* and resources. This method is called by {@link Map#removeControl}
* internally.
*
* @function
* @memberof IControl
* @instance
* @name onRemove
* @param {Map} map the Map this control will be removed from
* @returns {undefined} there is no required return value for this method
*/
/**
* Optionally provide a default position for this control. If this method
* is implemented and {@link Map#addControl} is called without the `position`
* parameter, the value returned by getDefaultPosition will be used as the
* control's position.
*
* @function
* @memberof IControl
* @instance
* @name getDefaultPosition
* @returns {string} a control position, one of the values valid in addControl.
*/
/**
* A [`Point` geometry](https://github.com/mapbox/point-geometry) object, which has
* `x` and `y` properties representing screen coordinates in pixels.
*
* @typedef {Object} Point
*/
/**
* A {@link Point} or an array of two numbers representing `x` and `y` screen coordinates in pixels.
*
* @typedef {(Point | Array<number>)} PointLike
*/
|
package com.weather.app.email.service;
import com.weather.app.email.domain.EmailRequestStatus;
public interface EmailServiceRequest {
EmailRequestStatus sendEmailCommand(String toEmail,
String body);
}
|
<reponame>safXcode/Article-preview-component
const socials = document.querySelector(".open-share")
const shareBtn = document.getElementById("share")
const removeShareBtn = document.getElementById("remove-share")
const struggleArrow = document.getElementById("struggle")
shareBtn.addEventListener('click', () => {
socials.classList.toggle("clicked")
struggleArrow.classList.toggle("open-share-beta")
})
removeShareBtn.addEventListener('click', () => {
socials.classList.remove("clicked")
})
|
<gh_stars>0
export interface MSignatures {
privateKey: string
publicKey: string
}
export class Signatures implements MSignatures{
privateKey: string;
publicKey: string;
constructor(privateKey: string, publicKey: string) {
this.privateKey = privateKey;
this.publicKey = publicKey;
}
}
|
-- The extension requires that the pg_stat_statements DSO is loaded
-- via shared_preload_libraries. Luckily, this is only needed when
-- trying to read from the pg_stat_statements view. Extension
-- creation will succeed either way
CREATE EXTENSION IF NOT EXISTS pg_stat_statements;
|
import React from 'react';
import ProForm, { ProFormText } from '@ant-design/pro-form';
import { updateUserInfo } from '@/services/anew/user';
import { useModel } from 'umi';
import { message } from 'antd';
export type BaseFormProps = {
values?: API.UserInfo;
};
const BaseForm: React.FC<BaseFormProps> = (props) => {
const { values } = props;
const { initialState, setInitialState } = useModel('@@initialState');
return (
<ProForm
onFinish={async (v: API.UserInfo) => {
await updateUserInfo(v, values?.id).then((res) => {
if (res.code === 200 && res.status === true) {
message.success(res.message);
let userInfo = {};
Object.assign(userInfo, initialState?.currentUser);
(userInfo as API.UserInfo).name = v.name;
(userInfo as API.UserInfo).mobile = v.mobile;
(userInfo as API.UserInfo).email = v.email
setInitialState({ ...initialState, currentUser: userInfo as API.UserInfo });
}
});
}}
>
<ProForm.Group>
<ProFormText
name="name"
label="姓名"
width="md"
rules={[{ required: true }]}
initialValue={values?.name}
/>
<ProFormText
name="mobile"
label="手机"
width="md"
rules={[
{
pattern: /^1(?:70\d|(?:9[89]|8[0-24-9]|7[135-8]|66|5[0-35-9])\d|3(?:4[0-8]|[0-35-9]\d))\d{7}$/,
message: '请输入正确的手机号码',
},
]}
initialValue={values?.mobile}
/>
<ProFormText
name="email"
label="邮箱"
width="md"
rules={[
{
type: 'email',
message: '请输入正确的邮箱地址',
},
]}
initialValue={values?.email}
/>
</ProForm.Group>
</ProForm>
);
};
export default BaseForm;
|
#!/bin/bash
FN="RnaSeqSampleSizeData_1.24.0.tar.gz"
URLS=(
"https://bioconductor.org/packages/3.13/data/experiment/src/contrib/RnaSeqSampleSizeData_1.24.0.tar.gz"
"https://bioarchive.galaxyproject.org/RnaSeqSampleSizeData_1.24.0.tar.gz"
"https://depot.galaxyproject.org/software/bioconductor-rnaseqsamplesizedata/bioconductor-rnaseqsamplesizedata_1.24.0_src_all.tar.gz"
)
MD5="aa8a48d9a69e94a5e959ab84fc5fba19"
# Use a staging area in the conda dir rather than temp dirs, both to avoid
# permission issues as well as to have things downloaded in a predictable
# manner.
STAGING=$PREFIX/share/$PKG_NAME-$PKG_VERSION-$PKG_BUILDNUM
mkdir -p $STAGING
TARBALL=$STAGING/$FN
SUCCESS=0
for URL in ${URLS[@]}; do
curl $URL > $TARBALL
[[ $? == 0 ]] || continue
# Platform-specific md5sum checks.
if [[ $(uname -s) == "Linux" ]]; then
if md5sum -c <<<"$MD5 $TARBALL"; then
SUCCESS=1
break
fi
else if [[ $(uname -s) == "Darwin" ]]; then
if [[ $(md5 $TARBALL | cut -f4 -d " ") == "$MD5" ]]; then
SUCCESS=1
break
fi
fi
fi
done
if [[ $SUCCESS != 1 ]]; then
echo "ERROR: post-link.sh was unable to download any of the following URLs with the md5sum $MD5:"
printf '%s\n' "${URLS[@]}"
exit 1
fi
# Install and clean up
R CMD INSTALL --library=$PREFIX/lib/R/library $TARBALL
rm $TARBALL
rmdir $STAGING
|
#!/usr/bin/env bash
echo "preparing..."
export GCLOUD_PROJECT=$(gcloud config get-value project)
export INSTANCE_REGION=europe-west6
export INSTANCE_ZONE=europe-west6-a
export PROJECT_NAME=batchjob
export CLUSTER_NAME=${PROJECT_NAME}-cluster
export CONTAINER_NAME=${PROJECT_NAME}-container
echo "setup"
gcloud config set compute/zone ${INSTANCE_ZONE}
echo "name replace"
sed -i "s/PROJECT_NAME/${GCLOUD_PROJECT}/g" ../k8s/sequential-jobs.yaml
echo "create sequential job"
kubectl apply -f ../k8s/sequential-jobs.yaml |
#!/bin/bash -f
#*********************************************************************************************************
# Vivado (TM) v2021.1 (64-bit)
#
# Filename : design_reciever.sh
# Simulator : Synopsys Verilog Compiler Simulator
# Description : Simulation script for compiling, elaborating and verifying the project source files.
# The script will automatically create the design libraries sub-directories in the run
# directory, add the library logical mappings in the simulator setup file, create default
# 'do/prj' file, execute compilation, elaboration and simulation steps.
#
# Generated by Vivado on Fri Jan 14 22:31:43 +0800 2022
# SW Build 3247384 on Thu Jun 10 19:36:33 MDT 2021
#
# Copyright 1986-2021 Xilinx, Inc. All Rights Reserved.
#
# usage: design_reciever.sh [-help]
# usage: design_reciever.sh [-lib_map_path]
# usage: design_reciever.sh [-noclean_files]
# usage: design_reciever.sh [-reset_run]
#
# Prerequisite:- To compile and run simulation, you must compile the Xilinx simulation libraries using the
# 'compile_simlib' TCL command. For more information about this command, run 'compile_simlib -help' in the
# Vivado Tcl Shell. Once the libraries have been compiled successfully, specify the -lib_map_path switch
# that points to these libraries and rerun export_simulation. For more information about this switch please
# type 'export_simulation -help' in the Tcl shell.
#
# You can also point to the simulation libraries by either replacing the <SPECIFY_COMPILED_LIB_PATH> in this
# script with the compiled library directory path or specify this path with the '-lib_map_path' switch when
# executing this script. Please type 'design_reciever.sh -help' for more information.
#
# Additional references - 'Xilinx Vivado Design Suite User Guide:Logic simulation (UG900)'
#
#*********************************************************************************************************
# Directory path for design sources and include directories (if any) wrt this path
ref_dir="."
# Override directory with 'export_sim_ref_dir' env path value if set in the shell
if [[ (! -z "$export_sim_ref_dir") && ($export_sim_ref_dir != "") ]]; then
ref_dir="$export_sim_ref_dir"
fi
# Set vlogan compile options
vlogan_opts="-full64"
# Set vhdlan compile options
vhdlan_opts="-full64"
# Set vcs elaboration options
vcs_elab_opts="-full64 -debug_acc+pp+dmptf -t ps -licqueue -l elaborate.log"
# Set vcs simulation options
vcs_sim_opts="-ucli -licqueue -l simulate.log"
# Design libraries
design_libs=(xilinx_vip xpm xil_defaultlib lib_pkg_v1_0_2 fifo_generator_v13_2_5 lib_fifo_v1_0_14 lib_srl_fifo_v1_0_2 lib_cdc_v1_0_2 axi_datamover_v5_1_26 axi_sg_v4_1_13 axi_dma_v7_1_25 xlconcat_v2_1_4 xlconstant_v1_1_7 xlslice_v1_0_2 proc_sys_reset_v5_0_13 axi_lite_ipif_v3_0_4 interrupt_control_v3_1_4 axi_gpio_v2_0_26 smartconnect_v1_0 axi_infrastructure_v1_1_0 axi_register_slice_v2_1_24 axi_vip_v1_1_10 processing_system7_vip_v1_0_12)
# Simulation root library directory
sim_lib_dir="vcs_lib"
# Script info
echo -e "design_reciever.sh - Script generated by export_simulation (Vivado v2021.1 (64-bit)-id)\n"
# Main steps
run()
{
check_args $# $1
setup $1 $2
compile
elaborate
simulate
}
# RUN_STEP: <compile>
compile()
{
vlogan -work xilinx_vip $vlogan_opts -sverilog +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/6dcf" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/80cc/hdl/verilog" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/22b9/hdl/verilog" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/ec67/hdl" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/f42d/hdl" +incdir+"C:/Xilinx/Vivado/2021.1/data/xilinx_vip/include" \
"C:/Xilinx/Vivado/2021.1/data/xilinx_vip/hdl/axi4stream_vip_axi4streampc.sv" \
"C:/Xilinx/Vivado/2021.1/data/xilinx_vip/hdl/axi_vip_axi4pc.sv" \
"C:/Xilinx/Vivado/2021.1/data/xilinx_vip/hdl/xil_common_vip_pkg.sv" \
"C:/Xilinx/Vivado/2021.1/data/xilinx_vip/hdl/axi4stream_vip_pkg.sv" \
"C:/Xilinx/Vivado/2021.1/data/xilinx_vip/hdl/axi_vip_pkg.sv" \
"C:/Xilinx/Vivado/2021.1/data/xilinx_vip/hdl/axi4stream_vip_if.sv" \
"C:/Xilinx/Vivado/2021.1/data/xilinx_vip/hdl/axi_vip_if.sv" \
"C:/Xilinx/Vivado/2021.1/data/xilinx_vip/hdl/clk_vip_if.sv" \
"C:/Xilinx/Vivado/2021.1/data/xilinx_vip/hdl/rst_vip_if.sv" \
2>&1 | tee -a vlogan.log
vlogan -work xpm $vlogan_opts -sverilog +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/6dcf" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/80cc/hdl/verilog" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/22b9/hdl/verilog" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/ec67/hdl" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/f42d/hdl" +incdir+"C:/Xilinx/Vivado/2021.1/data/xilinx_vip/include" \
"C:/Xilinx/Vivado/2021.1/data/ip/xpm/xpm_cdc/hdl/xpm_cdc.sv" \
"C:/Xilinx/Vivado/2021.1/data/ip/xpm/xpm_fifo/hdl/xpm_fifo.sv" \
"C:/Xilinx/Vivado/2021.1/data/ip/xpm/xpm_memory/hdl/xpm_memory.sv" \
2>&1 | tee -a vlogan.log
vhdlan -work xpm $vhdlan_opts \
"C:/Xilinx/Vivado/2021.1/data/ip/xpm/xpm_VCOMP.vhd" \
2>&1 | tee -a vhdlan.log
vlogan -work xil_defaultlib $vlogan_opts +v2k +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/6dcf" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/80cc/hdl/verilog" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/22b9/hdl/verilog" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/ec67/hdl" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/f42d/hdl" +incdir+"C:/Xilinx/Vivado/2021.1/data/xilinx_vip/include" \
"$ref_dir/../../../bd/urllc_core_inst_2/sim/urllc_core_inst_2.v" \
2>&1 | tee -a vlogan.log
vhdlan -work lib_pkg_v1_0_2 $vhdlan_opts \
"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/0513/hdl/lib_pkg_v1_0_rfs.vhd" \
2>&1 | tee -a vhdlan.log
vlogan -work fifo_generator_v13_2_5 $vlogan_opts +v2k +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/6dcf" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/80cc/hdl/verilog" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/22b9/hdl/verilog" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/ec67/hdl" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/f42d/hdl" +incdir+"C:/Xilinx/Vivado/2021.1/data/xilinx_vip/include" \
"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/276e/simulation/fifo_generator_vlog_beh.v" \
2>&1 | tee -a vlogan.log
vhdlan -work fifo_generator_v13_2_5 $vhdlan_opts \
"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/276e/hdl/fifo_generator_v13_2_rfs.vhd" \
2>&1 | tee -a vhdlan.log
vlogan -work fifo_generator_v13_2_5 $vlogan_opts +v2k +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/6dcf" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/80cc/hdl/verilog" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/22b9/hdl/verilog" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/ec67/hdl" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/f42d/hdl" +incdir+"C:/Xilinx/Vivado/2021.1/data/xilinx_vip/include" \
"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/276e/hdl/fifo_generator_v13_2_rfs.v" \
2>&1 | tee -a vlogan.log
vhdlan -work lib_fifo_v1_0_14 $vhdlan_opts \
"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/a5cb/hdl/lib_fifo_v1_0_rfs.vhd" \
2>&1 | tee -a vhdlan.log
vhdlan -work lib_srl_fifo_v1_0_2 $vhdlan_opts \
"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/51ce/hdl/lib_srl_fifo_v1_0_rfs.vhd" \
2>&1 | tee -a vhdlan.log
vhdlan -work lib_cdc_v1_0_2 $vhdlan_opts \
"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/ef1e/hdl/lib_cdc_v1_0_rfs.vhd" \
2>&1 | tee -a vhdlan.log
vhdlan -work axi_datamover_v5_1_26 $vhdlan_opts \
"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/1e21/hdl/axi_datamover_v5_1_vh_rfs.vhd" \
2>&1 | tee -a vhdlan.log
vhdlan -work axi_sg_v4_1_13 $vhdlan_opts \
"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/4919/hdl/axi_sg_v4_1_rfs.vhd" \
2>&1 | tee -a vhdlan.log
vhdlan -work axi_dma_v7_1_25 $vhdlan_opts \
"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/1faa/hdl/axi_dma_v7_1_vh_rfs.vhd" \
2>&1 | tee -a vhdlan.log
vhdlan -work xil_defaultlib $vhdlan_opts \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_axi_dma_1_0/sim/urllc_core_inst_2_axi_dma_1_0.vhd" \
2>&1 | tee -a vhdlan.log
vlogan -work xlconcat_v2_1_4 $vlogan_opts +v2k +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/6dcf" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/80cc/hdl/verilog" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/22b9/hdl/verilog" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/ec67/hdl" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/f42d/hdl" +incdir+"C:/Xilinx/Vivado/2021.1/data/xilinx_vip/include" \
"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/4b67/hdl/xlconcat_v2_1_vl_rfs.v" \
2>&1 | tee -a vlogan.log
vlogan -work xil_defaultlib $vlogan_opts +v2k +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/6dcf" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/80cc/hdl/verilog" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/22b9/hdl/verilog" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/ec67/hdl" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/f42d/hdl" +incdir+"C:/Xilinx/Vivado/2021.1/data/xilinx_vip/include" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_xlconcat_1_0/sim/urllc_core_inst_2_xlconcat_1_0.v" \
2>&1 | tee -a vlogan.log
vlogan -work xlconstant_v1_1_7 $vlogan_opts +v2k +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/6dcf" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/80cc/hdl/verilog" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/22b9/hdl/verilog" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/ec67/hdl" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/f42d/hdl" +incdir+"C:/Xilinx/Vivado/2021.1/data/xilinx_vip/include" \
"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/fcfc/hdl/xlconstant_v1_1_vl_rfs.v" \
2>&1 | tee -a vlogan.log
vlogan -work xil_defaultlib $vlogan_opts +v2k +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/6dcf" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/80cc/hdl/verilog" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/22b9/hdl/verilog" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/ec67/hdl" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/f42d/hdl" +incdir+"C:/Xilinx/Vivado/2021.1/data/xilinx_vip/include" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_xlconstant_1_0/sim/urllc_core_inst_2_xlconstant_1_0.v" \
2>&1 | tee -a vlogan.log
vlogan -work xlslice_v1_0_2 $vlogan_opts +v2k +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/6dcf" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/80cc/hdl/verilog" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/22b9/hdl/verilog" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/ec67/hdl" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/f42d/hdl" +incdir+"C:/Xilinx/Vivado/2021.1/data/xilinx_vip/include" \
"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/11d0/hdl/xlslice_v1_0_vl_rfs.v" \
2>&1 | tee -a vlogan.log
vlogan -work xil_defaultlib $vlogan_opts +v2k +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/6dcf" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/80cc/hdl/verilog" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/22b9/hdl/verilog" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/ec67/hdl" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/f42d/hdl" +incdir+"C:/Xilinx/Vivado/2021.1/data/xilinx_vip/include" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_xlslice_1_0/sim/urllc_core_inst_2_xlslice_1_0.v" \
2>&1 | tee -a vlogan.log
vhdlan -work proc_sys_reset_v5_0_13 $vhdlan_opts \
"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/8842/hdl/proc_sys_reset_v5_0_vh_rfs.vhd" \
2>&1 | tee -a vhdlan.log
vhdlan -work xil_defaultlib $vhdlan_opts \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_rst_ps7_0_200M_0/sim/urllc_core_inst_2_rst_ps7_0_200M_0.vhd" \
2>&1 | tee -a vhdlan.log
vlogan -work xil_defaultlib $vlogan_opts +v2k +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/6dcf" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/80cc/hdl/verilog" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/22b9/hdl/verilog" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/ec67/hdl" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/f42d/hdl" +incdir+"C:/Xilinx/Vivado/2021.1/data/xilinx_vip/include" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_xlconstant_0_0/sim/urllc_core_inst_2_xlconstant_0_0.v" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_xlconcat_irq_0/sim/urllc_core_inst_2_xlconcat_irq_0.v" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_mux_0_0/sim/urllc_core_inst_2_mux_0_0.v" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_mux_1_0/sim/urllc_core_inst_2_mux_1_0.v" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_mux_3_0/sim/urllc_core_inst_2_mux_3_0.v" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_ad2dma_rtl_0_0/sim/urllc_core_inst_2_ad2dma_rtl_0_0.v" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_mux_2_0/sim/urllc_core_inst_2_mux_2_0.v" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_DDCWrapper_0_0/sim/urllc_core_inst_2_DDCWrapper_0_0.v" \
"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ip/urllc_core_inst_2_FrameTrigger_0_0/sim/urllc_core_inst_2_FrameTrigger_0_0.v" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_div_n_0_0/sim/urllc_core_inst_2_div_n_0_0.v" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_mux_rtl_0/sim/urllc_core_inst_2_mux_rtl_0.v" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_ad_buf_0_0/sim/urllc_core_inst_2_ad_buf_0_0.v" \
2>&1 | tee -a vlogan.log
vhdlan -work xil_defaultlib $vhdlan_opts \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_axi_dma_0_0/sim/urllc_core_inst_2_axi_dma_0_0.vhd" \
2>&1 | tee -a vhdlan.log
vlogan -work xil_defaultlib $vlogan_opts +v2k +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/6dcf" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/80cc/hdl/verilog" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/22b9/hdl/verilog" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/ec67/hdl" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/f42d/hdl" +incdir+"C:/Xilinx/Vivado/2021.1/data/xilinx_vip/include" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_xlconcat_0_0/sim/urllc_core_inst_2_xlconcat_0_0.v" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_xlconstant_0_1/sim/urllc_core_inst_2_xlconstant_0_1.v" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_xlslice_data_0/sim/urllc_core_inst_2_xlslice_data_0.v" \
2>&1 | tee -a vlogan.log
vhdlan -work xil_defaultlib $vhdlan_opts \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_rst_ps7_0_120M_0/sim/urllc_core_inst_2_rst_ps7_0_120M_0.vhd" \
2>&1 | tee -a vhdlan.log
vlogan -work xil_defaultlib $vlogan_opts +v2k +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/6dcf" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/80cc/hdl/verilog" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/22b9/hdl/verilog" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/ec67/hdl" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/f42d/hdl" +incdir+"C:/Xilinx/Vivado/2021.1/data/xilinx_vip/include" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_xlconcat_1_1/sim/urllc_core_inst_2_xlconcat_1_1.v" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_ad2dma_rtl_0_1/sim/urllc_core_inst_2_ad2dma_rtl_0_1.v" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_xlslice_frame_avaliable_0/sim/urllc_core_inst_2_xlslice_frame_avaliable_0.v" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_DUCWrapper_0_0/sim/urllc_core_inst_2_DUCWrapper_0_0.v" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_mux_0_1/sim/urllc_core_inst_2_mux_0_1.v" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_xlconcat_irq_1/sim/urllc_core_inst_2_xlconcat_irq_1.v" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_mux_disable_sel2_0/sim/urllc_core_inst_2_mux_disable_sel2_0.v" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_xlconstant_0_2/sim/urllc_core_inst_2_xlconstant_0_2.v" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_xlslice_3_use_sender_iq_0/sim/urllc_core_inst_2_xlslice_3_use_sender_iq_0.v" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_xlslice_1_use_sender_serial_0/sim/urllc_core_inst_2_xlslice_1_use_sender_serial_0.v" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_xlslice_0_fun_out_0/sim/urllc_core_inst_2_xlslice_0_fun_out_0.v" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_xlslice_2_disable_sync_0/sim/urllc_core_inst_2_xlslice_2_disable_sync_0.v" \
2>&1 | tee -a vlogan.log
vhdlan -work axi_lite_ipif_v3_0_4 $vhdlan_opts \
"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/66ea/hdl/axi_lite_ipif_v3_0_vh_rfs.vhd" \
2>&1 | tee -a vhdlan.log
vhdlan -work interrupt_control_v3_1_4 $vhdlan_opts \
"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/a040/hdl/interrupt_control_v3_1_vh_rfs.vhd" \
2>&1 | tee -a vhdlan.log
vhdlan -work axi_gpio_v2_0_26 $vhdlan_opts \
"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/65b4/hdl/axi_gpio_v2_0_vh_rfs.vhd" \
2>&1 | tee -a vhdlan.log
vhdlan -work xil_defaultlib $vhdlan_opts \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_axi_gpio_0_0/sim/urllc_core_inst_2_axi_gpio_0_0.vhd" \
2>&1 | tee -a vhdlan.log
vlogan -work xil_defaultlib $vlogan_opts +v2k +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/6dcf" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/80cc/hdl/verilog" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/22b9/hdl/verilog" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/ec67/hdl" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/f42d/hdl" +incdir+"C:/Xilinx/Vivado/2021.1/data/xilinx_vip/include" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_xlconcat_0_1/sim/urllc_core_inst_2_xlconcat_0_1.v" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_xlslice_4_psclk_0/sim/urllc_core_inst_2_xlslice_4_psclk_0.v" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_xlslice_5_psen_0/sim/urllc_core_inst_2_xlslice_5_psen_0.v" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_xlslice_6_psincdec_0/sim/urllc_core_inst_2_xlslice_6_psincdec_0.v" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_xlslice_7_frame_trigger_clear_0/sim/urllc_core_inst_2_xlslice_7_frame_trigger_clear_0.v" \
2>&1 | tee -a vlogan.log
vhdlan -work xil_defaultlib $vhdlan_opts \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_axi_gpio_1_0/sim/urllc_core_inst_2_axi_gpio_1_0.vhd" \
2>&1 | tee -a vhdlan.log
vlogan -work xil_defaultlib $vlogan_opts +v2k +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/6dcf" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/80cc/hdl/verilog" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/22b9/hdl/verilog" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/ec67/hdl" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/f42d/hdl" +incdir+"C:/Xilinx/Vivado/2021.1/data/xilinx_vip/include" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_xlslice_8_use_input_serial_0/sim/urllc_core_inst_2_xlslice_8_use_input_serial_0.v" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_xlconcat_1_2/sim/urllc_core_inst_2_xlconcat_1_2.v" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_xlconstant_0_3/sim/urllc_core_inst_2_xlconstant_0_3.v" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_ila_0_0/sim/urllc_core_inst_2_ila_0_0.v" \
2>&1 | tee -a vlogan.log
vhdlan -work xil_defaultlib $vhdlan_opts \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_proc_clock_reset_0/sim/urllc_core_inst_2_proc_clock_reset_0.vhd" \
2>&1 | tee -a vhdlan.log
vlogan -work xil_defaultlib $vlogan_opts +v2k +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/6dcf" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/80cc/hdl/verilog" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/22b9/hdl/verilog" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/ec67/hdl" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/f42d/hdl" +incdir+"C:/Xilinx/Vivado/2021.1/data/xilinx_vip/include" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_clk_wiz_static_0/urllc_core_inst_2_clk_wiz_static_0_clk_wiz.v" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_clk_wiz_static_0/urllc_core_inst_2_clk_wiz_static_0.v" \
2>&1 | tee -a vlogan.log
vhdlan -work xil_defaultlib $vhdlan_opts \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_proc_data_reset_8M_0/sim/urllc_core_inst_2_proc_data_reset_8M_0.vhd" \
2>&1 | tee -a vhdlan.log
vlogan -work xil_defaultlib $vlogan_opts +v2k +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/6dcf" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/80cc/hdl/verilog" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/22b9/hdl/verilog" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/ec67/hdl" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/f42d/hdl" +incdir+"C:/Xilinx/Vivado/2021.1/data/xilinx_vip/include" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_0/bd_0/ip/ip_0/sim/bd_b046_one_0.v" \
2>&1 | tee -a vlogan.log
vhdlan -work xil_defaultlib $vhdlan_opts \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_0/bd_0/ip/ip_1/sim/bd_b046_psr_aclk_0.vhd" \
2>&1 | tee -a vhdlan.log
vlogan -work smartconnect_v1_0 $vlogan_opts -sverilog +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/6dcf" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/80cc/hdl/verilog" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/22b9/hdl/verilog" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/ec67/hdl" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/f42d/hdl" +incdir+"C:/Xilinx/Vivado/2021.1/data/xilinx_vip/include" \
"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/80cc/hdl/sc_util_v1_0_vl_rfs.sv" \
"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/c012/hdl/sc_switchboard_v1_0_vl_rfs.sv" \
2>&1 | tee -a vlogan.log
vlogan -work xil_defaultlib $vlogan_opts -sverilog +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/6dcf" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/80cc/hdl/verilog" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/22b9/hdl/verilog" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/ec67/hdl" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/f42d/hdl" +incdir+"C:/Xilinx/Vivado/2021.1/data/xilinx_vip/include" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_0/bd_0/ip/ip_2/sim/bd_b046_arinsw_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_0/bd_0/ip/ip_3/sim/bd_b046_rinsw_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_0/bd_0/ip/ip_4/sim/bd_b046_awinsw_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_0/bd_0/ip/ip_5/sim/bd_b046_winsw_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_0/bd_0/ip/ip_6/sim/bd_b046_binsw_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_0/bd_0/ip/ip_7/sim/bd_b046_aroutsw_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_0/bd_0/ip/ip_8/sim/bd_b046_routsw_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_0/bd_0/ip/ip_9/sim/bd_b046_awoutsw_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_0/bd_0/ip/ip_10/sim/bd_b046_woutsw_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_0/bd_0/ip/ip_11/sim/bd_b046_boutsw_0.sv" \
2>&1 | tee -a vlogan.log
vlogan -work smartconnect_v1_0 $vlogan_opts -sverilog +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/6dcf" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/80cc/hdl/verilog" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/22b9/hdl/verilog" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/ec67/hdl" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/f42d/hdl" +incdir+"C:/Xilinx/Vivado/2021.1/data/xilinx_vip/include" \
"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/22b9/hdl/sc_node_v1_0_vl_rfs.sv" \
2>&1 | tee -a vlogan.log
vlogan -work xil_defaultlib $vlogan_opts -sverilog +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/6dcf" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/80cc/hdl/verilog" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/22b9/hdl/verilog" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/ec67/hdl" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/f42d/hdl" +incdir+"C:/Xilinx/Vivado/2021.1/data/xilinx_vip/include" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_0/bd_0/ip/ip_12/sim/bd_b046_arni_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_0/bd_0/ip/ip_13/sim/bd_b046_rni_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_0/bd_0/ip/ip_14/sim/bd_b046_awni_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_0/bd_0/ip/ip_15/sim/bd_b046_wni_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_0/bd_0/ip/ip_16/sim/bd_b046_bni_0.sv" \
2>&1 | tee -a vlogan.log
vlogan -work smartconnect_v1_0 $vlogan_opts -sverilog +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/6dcf" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/80cc/hdl/verilog" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/22b9/hdl/verilog" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/ec67/hdl" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/f42d/hdl" +incdir+"C:/Xilinx/Vivado/2021.1/data/xilinx_vip/include" \
"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/ea34/hdl/sc_mmu_v1_0_vl_rfs.sv" \
2>&1 | tee -a vlogan.log
vlogan -work xil_defaultlib $vlogan_opts -sverilog +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/6dcf" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/80cc/hdl/verilog" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/22b9/hdl/verilog" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/ec67/hdl" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/f42d/hdl" +incdir+"C:/Xilinx/Vivado/2021.1/data/xilinx_vip/include" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_0/bd_0/ip/ip_17/sim/bd_b046_s00mmu_0.sv" \
2>&1 | tee -a vlogan.log
vlogan -work smartconnect_v1_0 $vlogan_opts -sverilog +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/6dcf" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/80cc/hdl/verilog" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/22b9/hdl/verilog" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/ec67/hdl" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/f42d/hdl" +incdir+"C:/Xilinx/Vivado/2021.1/data/xilinx_vip/include" \
"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/4fd2/hdl/sc_transaction_regulator_v1_0_vl_rfs.sv" \
2>&1 | tee -a vlogan.log
vlogan -work xil_defaultlib $vlogan_opts -sverilog +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/6dcf" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/80cc/hdl/verilog" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/22b9/hdl/verilog" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/ec67/hdl" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/f42d/hdl" +incdir+"C:/Xilinx/Vivado/2021.1/data/xilinx_vip/include" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_0/bd_0/ip/ip_18/sim/bd_b046_s00tr_0.sv" \
2>&1 | tee -a vlogan.log
vlogan -work smartconnect_v1_0 $vlogan_opts -sverilog +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/6dcf" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/80cc/hdl/verilog" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/22b9/hdl/verilog" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/ec67/hdl" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/f42d/hdl" +incdir+"C:/Xilinx/Vivado/2021.1/data/xilinx_vip/include" \
"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/8047/hdl/sc_si_converter_v1_0_vl_rfs.sv" \
2>&1 | tee -a vlogan.log
vlogan -work xil_defaultlib $vlogan_opts -sverilog +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/6dcf" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/80cc/hdl/verilog" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/22b9/hdl/verilog" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/ec67/hdl" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/f42d/hdl" +incdir+"C:/Xilinx/Vivado/2021.1/data/xilinx_vip/include" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_0/bd_0/ip/ip_19/sim/bd_b046_s00sic_0.sv" \
2>&1 | tee -a vlogan.log
vlogan -work smartconnect_v1_0 $vlogan_opts -sverilog +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/6dcf" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/80cc/hdl/verilog" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/22b9/hdl/verilog" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/ec67/hdl" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/f42d/hdl" +incdir+"C:/Xilinx/Vivado/2021.1/data/xilinx_vip/include" \
"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/b89e/hdl/sc_axi2sc_v1_0_vl_rfs.sv" \
2>&1 | tee -a vlogan.log
vlogan -work xil_defaultlib $vlogan_opts -sverilog +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/6dcf" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/80cc/hdl/verilog" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/22b9/hdl/verilog" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/ec67/hdl" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/f42d/hdl" +incdir+"C:/Xilinx/Vivado/2021.1/data/xilinx_vip/include" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_0/bd_0/ip/ip_20/sim/bd_b046_s00a2s_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_0/bd_0/ip/ip_21/sim/bd_b046_sarn_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_0/bd_0/ip/ip_22/sim/bd_b046_srn_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_0/bd_0/ip/ip_23/sim/bd_b046_sawn_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_0/bd_0/ip/ip_24/sim/bd_b046_swn_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_0/bd_0/ip/ip_25/sim/bd_b046_sbn_0.sv" \
2>&1 | tee -a vlogan.log
vlogan -work smartconnect_v1_0 $vlogan_opts -sverilog +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/6dcf" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/80cc/hdl/verilog" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/22b9/hdl/verilog" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/ec67/hdl" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/f42d/hdl" +incdir+"C:/Xilinx/Vivado/2021.1/data/xilinx_vip/include" \
"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/7005/hdl/sc_sc2axi_v1_0_vl_rfs.sv" \
2>&1 | tee -a vlogan.log
vlogan -work xil_defaultlib $vlogan_opts -sverilog +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/6dcf" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/80cc/hdl/verilog" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/22b9/hdl/verilog" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/ec67/hdl" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/f42d/hdl" +incdir+"C:/Xilinx/Vivado/2021.1/data/xilinx_vip/include" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_0/bd_0/ip/ip_26/sim/bd_b046_m00s2a_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_0/bd_0/ip/ip_27/sim/bd_b046_m00arn_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_0/bd_0/ip/ip_28/sim/bd_b046_m00rn_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_0/bd_0/ip/ip_29/sim/bd_b046_m00awn_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_0/bd_0/ip/ip_30/sim/bd_b046_m00wn_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_0/bd_0/ip/ip_31/sim/bd_b046_m00bn_0.sv" \
2>&1 | tee -a vlogan.log
vlogan -work smartconnect_v1_0 $vlogan_opts -sverilog +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/6dcf" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/80cc/hdl/verilog" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/22b9/hdl/verilog" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/ec67/hdl" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/f42d/hdl" +incdir+"C:/Xilinx/Vivado/2021.1/data/xilinx_vip/include" \
"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/7bd7/hdl/sc_exit_v1_0_vl_rfs.sv" \
2>&1 | tee -a vlogan.log
vlogan -work xil_defaultlib $vlogan_opts -sverilog +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/6dcf" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/80cc/hdl/verilog" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/22b9/hdl/verilog" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/ec67/hdl" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/f42d/hdl" +incdir+"C:/Xilinx/Vivado/2021.1/data/xilinx_vip/include" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_0/bd_0/ip/ip_32/sim/bd_b046_m00e_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_0/bd_0/ip/ip_33/sim/bd_b046_m01s2a_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_0/bd_0/ip/ip_34/sim/bd_b046_m01arn_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_0/bd_0/ip/ip_35/sim/bd_b046_m01rn_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_0/bd_0/ip/ip_36/sim/bd_b046_m01awn_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_0/bd_0/ip/ip_37/sim/bd_b046_m01wn_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_0/bd_0/ip/ip_38/sim/bd_b046_m01bn_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_0/bd_0/ip/ip_39/sim/bd_b046_m01e_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_0/bd_0/ip/ip_40/sim/bd_b046_m02s2a_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_0/bd_0/ip/ip_41/sim/bd_b046_m02arn_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_0/bd_0/ip/ip_42/sim/bd_b046_m02rn_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_0/bd_0/ip/ip_43/sim/bd_b046_m02awn_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_0/bd_0/ip/ip_44/sim/bd_b046_m02wn_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_0/bd_0/ip/ip_45/sim/bd_b046_m02bn_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_0/bd_0/ip/ip_46/sim/bd_b046_m02e_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_0/bd_0/ip/ip_47/sim/bd_b046_m03s2a_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_0/bd_0/ip/ip_48/sim/bd_b046_m03arn_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_0/bd_0/ip/ip_49/sim/bd_b046_m03rn_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_0/bd_0/ip/ip_50/sim/bd_b046_m03awn_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_0/bd_0/ip/ip_51/sim/bd_b046_m03wn_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_0/bd_0/ip/ip_52/sim/bd_b046_m03bn_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_0/bd_0/ip/ip_53/sim/bd_b046_m03e_0.sv" \
2>&1 | tee -a vlogan.log
vlogan -work xil_defaultlib $vlogan_opts +v2k +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/6dcf" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/80cc/hdl/verilog" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/22b9/hdl/verilog" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/ec67/hdl" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/f42d/hdl" +incdir+"C:/Xilinx/Vivado/2021.1/data/xilinx_vip/include" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_0/bd_0/sim/bd_b046.v" \
2>&1 | tee -a vlogan.log
vlogan -work axi_infrastructure_v1_1_0 $vlogan_opts +v2k +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/6dcf" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/80cc/hdl/verilog" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/22b9/hdl/verilog" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/ec67/hdl" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/f42d/hdl" +incdir+"C:/Xilinx/Vivado/2021.1/data/xilinx_vip/include" \
"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/ec67/hdl/axi_infrastructure_v1_1_vl_rfs.v" \
2>&1 | tee -a vlogan.log
vlogan -work axi_register_slice_v2_1_24 $vlogan_opts +v2k +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/6dcf" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/80cc/hdl/verilog" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/22b9/hdl/verilog" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/ec67/hdl" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/f42d/hdl" +incdir+"C:/Xilinx/Vivado/2021.1/data/xilinx_vip/include" \
"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/8f68/hdl/axi_register_slice_v2_1_vl_rfs.v" \
2>&1 | tee -a vlogan.log
vlogan -work axi_vip_v1_1_10 $vlogan_opts -sverilog +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/6dcf" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/80cc/hdl/verilog" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/22b9/hdl/verilog" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/ec67/hdl" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/f42d/hdl" +incdir+"C:/Xilinx/Vivado/2021.1/data/xilinx_vip/include" \
"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/0980/hdl/axi_vip_v1_1_vl_rfs.sv" \
2>&1 | tee -a vlogan.log
vlogan -work xil_defaultlib $vlogan_opts +v2k +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/6dcf" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/80cc/hdl/verilog" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/22b9/hdl/verilog" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/ec67/hdl" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/f42d/hdl" +incdir+"C:/Xilinx/Vivado/2021.1/data/xilinx_vip/include" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_0/sim/urllc_core_inst_2_smartconnect_outer_0.v" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_clk_wiz_dynamic_0/urllc_core_inst_2_clk_wiz_dynamic_0_mmcm_pll_drp.v" \
2>&1 | tee -a vlogan.log
vhdlan -work xil_defaultlib $vhdlan_opts \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_clk_wiz_dynamic_0/proc_common_v3_00_a/hdl/src/vhdl/urllc_core_inst_2_clk_wiz_dynamic_0_conv_funs_pkg.vhd" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_clk_wiz_dynamic_0/proc_common_v3_00_a/hdl/src/vhdl/urllc_core_inst_2_clk_wiz_dynamic_0_proc_common_pkg.vhd" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_clk_wiz_dynamic_0/proc_common_v3_00_a/hdl/src/vhdl/urllc_core_inst_2_clk_wiz_dynamic_0_ipif_pkg.vhd" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_clk_wiz_dynamic_0/proc_common_v3_00_a/hdl/src/vhdl/urllc_core_inst_2_clk_wiz_dynamic_0_family_support.vhd" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_clk_wiz_dynamic_0/proc_common_v3_00_a/hdl/src/vhdl/urllc_core_inst_2_clk_wiz_dynamic_0_family.vhd" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_clk_wiz_dynamic_0/proc_common_v3_00_a/hdl/src/vhdl/urllc_core_inst_2_clk_wiz_dynamic_0_soft_reset.vhd" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_clk_wiz_dynamic_0/proc_common_v3_00_a/hdl/src/vhdl/urllc_core_inst_2_clk_wiz_dynamic_0_pselect_f.vhd" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_clk_wiz_dynamic_0/axi_lite_ipif_v1_01_a/hdl/src/vhdl/urllc_core_inst_2_clk_wiz_dynamic_0_address_decoder.vhd" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_clk_wiz_dynamic_0/axi_lite_ipif_v1_01_a/hdl/src/vhdl/urllc_core_inst_2_clk_wiz_dynamic_0_slave_attachment.vhd" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_clk_wiz_dynamic_0/axi_lite_ipif_v1_01_a/hdl/src/vhdl/urllc_core_inst_2_clk_wiz_dynamic_0_axi_lite_ipif.vhd" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_clk_wiz_dynamic_0/urllc_core_inst_2_clk_wiz_dynamic_0_clk_wiz_drp.vhd" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_clk_wiz_dynamic_0/urllc_core_inst_2_clk_wiz_dynamic_0_axi_clk_config.vhd" \
2>&1 | tee -a vhdlan.log
vlogan -work xil_defaultlib $vlogan_opts +v2k +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/6dcf" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/80cc/hdl/verilog" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/22b9/hdl/verilog" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/ec67/hdl" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/f42d/hdl" +incdir+"C:/Xilinx/Vivado/2021.1/data/xilinx_vip/include" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_clk_wiz_dynamic_0/urllc_core_inst_2_clk_wiz_dynamic_0_clk_wiz.v" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_clk_wiz_dynamic_0/urllc_core_inst_2_clk_wiz_dynamic_0.v" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_xlconcat_0_2/sim/urllc_core_inst_2_xlconcat_0_2.v" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_clk_wiz_100M_0/urllc_core_inst_2_clk_wiz_100M_0_clk_wiz.v" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_clk_wiz_100M_0/urllc_core_inst_2_clk_wiz_100M_0.v" \
2>&1 | tee -a vlogan.log
vlogan -work processing_system7_vip_v1_0_12 $vlogan_opts -sverilog +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/6dcf" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/80cc/hdl/verilog" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/22b9/hdl/verilog" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/ec67/hdl" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/f42d/hdl" +incdir+"C:/Xilinx/Vivado/2021.1/data/xilinx_vip/include" \
"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/f42d/hdl/processing_system7_vip_v1_0_vl_rfs.sv" \
2>&1 | tee -a vlogan.log
vlogan -work xil_defaultlib $vlogan_opts +v2k +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/6dcf" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/80cc/hdl/verilog" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/22b9/hdl/verilog" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/ec67/hdl" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/f42d/hdl" +incdir+"C:/Xilinx/Vivado/2021.1/data/xilinx_vip/include" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_processing_system7_0_0/sim/urllc_core_inst_2_processing_system7_0_0.v" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_1/bd_0/ip/ip_0/sim/bd_7087_one_0.v" \
2>&1 | tee -a vlogan.log
vhdlan -work xil_defaultlib $vhdlan_opts \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_1/bd_0/ip/ip_1/sim/bd_7087_psr_aclk_0.vhd" \
2>&1 | tee -a vhdlan.log
vlogan -work xil_defaultlib $vlogan_opts -sverilog +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/6dcf" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/80cc/hdl/verilog" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/22b9/hdl/verilog" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/ec67/hdl" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/f42d/hdl" +incdir+"C:/Xilinx/Vivado/2021.1/data/xilinx_vip/include" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_1/bd_0/ip/ip_2/sim/bd_7087_arsw_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_1/bd_0/ip/ip_3/sim/bd_7087_rsw_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_1/bd_0/ip/ip_4/sim/bd_7087_awsw_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_1/bd_0/ip/ip_5/sim/bd_7087_wsw_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_1/bd_0/ip/ip_6/sim/bd_7087_bsw_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_1/bd_0/ip/ip_7/sim/bd_7087_s00mmu_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_1/bd_0/ip/ip_8/sim/bd_7087_s00tr_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_1/bd_0/ip/ip_9/sim/bd_7087_s00sic_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_1/bd_0/ip/ip_10/sim/bd_7087_s00a2s_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_1/bd_0/ip/ip_11/sim/bd_7087_sarn_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_1/bd_0/ip/ip_12/sim/bd_7087_srn_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_1/bd_0/ip/ip_13/sim/bd_7087_sawn_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_1/bd_0/ip/ip_14/sim/bd_7087_swn_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_1/bd_0/ip/ip_15/sim/bd_7087_sbn_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_1/bd_0/ip/ip_16/sim/bd_7087_s01mmu_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_1/bd_0/ip/ip_17/sim/bd_7087_s01tr_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_1/bd_0/ip/ip_18/sim/bd_7087_s01sic_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_1/bd_0/ip/ip_19/sim/bd_7087_s01a2s_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_1/bd_0/ip/ip_20/sim/bd_7087_sarn_1.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_1/bd_0/ip/ip_21/sim/bd_7087_srn_1.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_1/bd_0/ip/ip_22/sim/bd_7087_s02mmu_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_1/bd_0/ip/ip_23/sim/bd_7087_s02tr_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_1/bd_0/ip/ip_24/sim/bd_7087_s02sic_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_1/bd_0/ip/ip_25/sim/bd_7087_s02a2s_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_1/bd_0/ip/ip_26/sim/bd_7087_sawn_1.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_1/bd_0/ip/ip_27/sim/bd_7087_swn_1.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_1/bd_0/ip/ip_28/sim/bd_7087_sbn_1.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_1/bd_0/ip/ip_29/sim/bd_7087_s03mmu_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_1/bd_0/ip/ip_30/sim/bd_7087_s03tr_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_1/bd_0/ip/ip_31/sim/bd_7087_s03sic_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_1/bd_0/ip/ip_32/sim/bd_7087_s03a2s_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_1/bd_0/ip/ip_33/sim/bd_7087_sarn_2.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_1/bd_0/ip/ip_34/sim/bd_7087_srn_2.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_1/bd_0/ip/ip_35/sim/bd_7087_s04mmu_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_1/bd_0/ip/ip_36/sim/bd_7087_s04tr_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_1/bd_0/ip/ip_37/sim/bd_7087_s04sic_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_1/bd_0/ip/ip_38/sim/bd_7087_s04a2s_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_1/bd_0/ip/ip_39/sim/bd_7087_sawn_2.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_1/bd_0/ip/ip_40/sim/bd_7087_swn_2.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_1/bd_0/ip/ip_41/sim/bd_7087_sbn_2.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_1/bd_0/ip/ip_42/sim/bd_7087_m00s2a_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_1/bd_0/ip/ip_43/sim/bd_7087_m00arn_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_1/bd_0/ip/ip_44/sim/bd_7087_m00rn_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_1/bd_0/ip/ip_45/sim/bd_7087_m00awn_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_1/bd_0/ip/ip_46/sim/bd_7087_m00wn_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_1/bd_0/ip/ip_47/sim/bd_7087_m00bn_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_1/bd_0/ip/ip_48/sim/bd_7087_m00e_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_1/bd_0/ip/ip_49/sim/bd_7087_m01s2a_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_1/bd_0/ip/ip_50/sim/bd_7087_m01arn_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_1/bd_0/ip/ip_51/sim/bd_7087_m01rn_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_1/bd_0/ip/ip_52/sim/bd_7087_m01awn_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_1/bd_0/ip/ip_53/sim/bd_7087_m01wn_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_1/bd_0/ip/ip_54/sim/bd_7087_m01bn_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_1/bd_0/ip/ip_55/sim/bd_7087_m01e_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_1/bd_0/ip/ip_56/sim/bd_7087_m02s2a_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_1/bd_0/ip/ip_57/sim/bd_7087_m02arn_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_1/bd_0/ip/ip_58/sim/bd_7087_m02rn_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_1/bd_0/ip/ip_59/sim/bd_7087_m02awn_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_1/bd_0/ip/ip_60/sim/bd_7087_m02wn_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_1/bd_0/ip/ip_61/sim/bd_7087_m02bn_0.sv" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_1/bd_0/ip/ip_62/sim/bd_7087_m02e_0.sv" \
2>&1 | tee -a vlogan.log
vlogan -work xil_defaultlib $vlogan_opts +v2k +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/6dcf" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/80cc/hdl/verilog" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/22b9/hdl/verilog" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/ec67/hdl" +incdir+"$ref_dir/../../../../urllc-demo-vivado.gen/sources_1/bd/design_reciever/bd/urllc_core_inst_2/ipshared/f42d/hdl" +incdir+"C:/Xilinx/Vivado/2021.1/data/xilinx_vip/include" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_1/bd_0/sim/bd_7087.v" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_smartconnect_outer_1/sim/urllc_core_inst_2_smartconnect_outer_1.v" \
"$ref_dir/../../../bd/urllc_core_inst_2/ip/urllc_core_inst_2_xlconcat_irq_all_0/sim/urllc_core_inst_2_xlconcat_irq_all_0.v" \
"$ref_dir/../../../bd/design_reciever/ip/design_reciever_xlconstant_0_0/sim/design_reciever_xlconstant_0_0.v" \
"$ref_dir/../../../bd/design_reciever/ip/design_reciever_xlconstant_1_0/sim/design_reciever_xlconstant_1_0.v" \
"$ref_dir/../../../bd/design_reciever/sim/design_reciever.v" \
2>&1 | tee -a vlogan.log
vlogan -work xil_defaultlib $vlogan_opts +v2k \
glbl.v \
2>&1 | tee -a vlogan.log
}
# RUN_STEP: <elaborate>
elaborate()
{
vcs $vcs_elab_opts xil_defaultlib.design_reciever xil_defaultlib.glbl -o design_reciever_simv
}
# RUN_STEP: <simulate>
simulate()
{
./design_reciever_simv $vcs_sim_opts -do simulate.do
}
# STEP: setup
setup()
{
case $1 in
"-lib_map_path" )
if [[ ($2 == "") ]]; then
echo -e "ERROR: Simulation library directory path not specified (type \"./design_reciever.sh -help\" for more information)\n"
exit 1
fi
create_lib_mappings $2
;;
"-reset_run" )
reset_run
echo -e "INFO: Simulation run files deleted.\n"
exit 0
;;
"-noclean_files" )
# do not remove previous data
;;
* )
create_lib_mappings $2
esac
create_lib_dir
# Add any setup/initialization commands here:-
# <user specific commands>
}
# Define design library mappings
create_lib_mappings()
{
file="synopsys_sim.setup"
if [[ -e $file ]]; then
if [[ ($1 == "") ]]; then
return
else
rm -rf $file
fi
fi
touch $file
lib_map_path=""
if [[ ($1 != "") ]]; then
lib_map_path="$1"
fi
for (( i=0; i<${#design_libs[*]}; i++ )); do
lib="${design_libs[i]}"
mapping="$lib:$sim_lib_dir/$lib"
echo $mapping >> $file
done
if [[ ($lib_map_path != "") ]]; then
incl_ref="OTHERS=$lib_map_path/synopsys_sim.setup"
echo $incl_ref >> $file
fi
}
# Create design library directory paths
create_lib_dir()
{
if [[ -e $sim_lib_dir ]]; then
rm -rf $sim_lib_dir
fi
for (( i=0; i<${#design_libs[*]}; i++ )); do
lib="${design_libs[i]}"
lib_dir="$sim_lib_dir/$lib"
if [[ ! -e $lib_dir ]]; then
mkdir -p $lib_dir
fi
done
}
# Delete generated data from the previous run
reset_run()
{
files_to_remove=(ucli.key design_reciever_simv vlogan.log vhdlan.log compile.log elaborate.log simulate.log .vlogansetup.env .vlogansetup.args .vcs_lib_lock scirocco_command.log 64 AN.DB csrc design_reciever_simv.daidir)
for (( i=0; i<${#files_to_remove[*]}; i++ )); do
file="${files_to_remove[i]}"
if [[ -e $file ]]; then
rm -rf $file
fi
done
create_lib_dir
}
# Check command line arguments
check_args()
{
if [[ ($1 == 1 ) && ($2 != "-lib_map_path" && $2 != "-noclean_files" && $2 != "-reset_run" && $2 != "-help" && $2 != "-h") ]]; then
echo -e "ERROR: Unknown option specified '$2' (type \"./design_reciever.sh -help\" for more information)\n"
exit 1
fi
if [[ ($2 == "-help" || $2 == "-h") ]]; then
usage
fi
}
# Script usage
usage()
{
msg="Usage: design_reciever.sh [-help]\n\
Usage: design_reciever.sh [-lib_map_path]\n\
Usage: design_reciever.sh [-reset_run]\n\
Usage: design_reciever.sh [-noclean_files]\n\n\
[-help] -- Print help information for this script\n\n\
[-lib_map_path <path>] -- Compiled simulation library directory path. The simulation library is compiled\n\
using the compile_simlib tcl command. Please see 'compile_simlib -help' for more information.\n\n\
[-reset_run] -- Recreate simulator setup files and library mappings for a clean run. The generated files\n\
from the previous run will be removed. If you don't want to remove the simulator generated files, use the\n\
-noclean_files switch.\n\n\
[-noclean_files] -- Reset previous run, but do not remove simulator generated files from the previous run.\n\n"
echo -e $msg
exit 1
}
# Launch script
run $1 $2
|
/*
* JaamSim Discrete Event Simulation
* Copyright (C) 2011 Ausenco Engineering Canada Inc.
* Copyright (C) 2018-2020 JaamSim Software Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.jaamsim.ui;
import java.awt.event.KeyEvent;
import java.awt.event.KeyListener;
import java.awt.event.MouseEvent;
import java.awt.event.MouseListener;
import java.awt.event.WindowEvent;
import java.awt.event.WindowFocusListener;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.Enumeration;
import javax.swing.JFrame;
import javax.swing.JPopupMenu;
import javax.swing.JScrollPane;
import javax.swing.JTree;
import javax.swing.event.TreeModelEvent;
import javax.swing.event.TreeModelListener;
import javax.swing.event.TreeSelectionEvent;
import javax.swing.event.TreeSelectionListener;
import javax.swing.tree.DefaultMutableTreeNode;
import javax.swing.tree.DefaultTreeModel;
import javax.swing.tree.TreePath;
import javax.swing.tree.TreeSelectionModel;
import com.jaamsim.DisplayModels.IconModel;
import com.jaamsim.Graphics.DisplayEntity;
import com.jaamsim.Graphics.EntityLabel;
import com.jaamsim.basicsim.Entity;
import com.jaamsim.basicsim.EntityIterator;
import com.jaamsim.basicsim.ErrorException;
import com.jaamsim.basicsim.JaamSimModel;
import com.jaamsim.basicsim.ObjectType;
import com.jaamsim.basicsim.Simulation;
import com.jaamsim.input.Input;
import com.jaamsim.units.Unit;
public class ObjectSelector extends FrameBox {
private static ObjectSelector myInstance;
// Tree view properties
private final DefaultMutableTreeNode top;
private final DefaultTreeModel treeModel;
private final JTree tree;
private final JScrollPane treeView;
static Entity currentEntity;
private long entSequence;
public ObjectSelector() {
super( "Object Selector" );
setDefaultCloseOperation(JFrame.DO_NOTHING_ON_CLOSE);
addWindowListener(FrameBox.getCloseListener("ShowObjectSelector"));
addWindowFocusListener(new MyFocusListener());
top = new DefaultMutableTreeNode();
treeModel = new DefaultTreeModel(top);
tree = new JTree();
tree.setModel(treeModel);
tree.getSelectionModel().setSelectionMode( TreeSelectionModel.SINGLE_TREE_SELECTION );
tree.setRootVisible(false);
tree.setShowsRootHandles(true);
tree.setInvokesStopCellEditing(true);
treeView = new JScrollPane(tree);
getContentPane().add(treeView);
entSequence = 0;
addComponentListener(FrameBox.getSizePosAdapter(this, "ObjectSelectorSize", "ObjectSelectorPos"));
tree.addTreeSelectionListener( new MyTreeSelectionListener() );
treeModel.addTreeModelListener( new MyTreeModelListener(tree) );
tree.addMouseListener(new MyMouseListener());
tree.addKeyListener(new MyKeyListener());
}
@Override
public void setEntity(Entity ent) {
if (ent == currentEntity)
return;
currentEntity = ent;
if (tree == null)
return;
JaamSimModel simModel = GUIFrame.getJaamSimModel();
if (simModel == null || simModel.getSimulation() == null)
return;
long curSequence = simModel.getEntitySequence();
if (entSequence != curSequence) {
entSequence = curSequence;
updateTree(simModel);
}
if (currentEntity == null) {
tree.setSelectionPath(null);
tree.setEditable(false);
return;
}
tree.setEditable(true);
DefaultMutableTreeNode root = (DefaultMutableTreeNode)tree.getModel().getRoot();
Enumeration<?> e = root.depthFirstEnumeration();
while (e.hasMoreElements()) {
DefaultMutableTreeNode aNode = (DefaultMutableTreeNode)e.nextElement();
if (aNode.getUserObject() == currentEntity) {
TreePath path = new TreePath(aNode.getPath());
tree.scrollPathToVisible(path);
tree.setSelectionPath(path);
return;
}
}
// Entity not found in the tree
tree.setSelectionPath(null);
tree.setEditable(false);
}
@Override
public void updateValues(double simTime) {
GUIFrame gui = GUIFrame.getInstance();
JaamSimModel simModel = GUIFrame.getJaamSimModel();
if (simModel == null || simModel.getSimulation() == null)
return;
if (!this.isVisible() || gui == null || simModel.getSimState() == JaamSimModel.SIM_STATE_RUNNING)
return;
long curSequence = simModel.getEntitySequence();
if (entSequence != curSequence) {
entSequence = curSequence;
updateTree(simModel);
}
}
public static void allowUpdate() {
if (myInstance == null)
return;
myInstance.entSequence = 0;
}
/**
* Returns the only instance of the Object Selector
*/
public static synchronized ObjectSelector getInstance() {
if (myInstance == null)
myInstance = new ObjectSelector();
myInstance.treeView.getHorizontalScrollBar().getModel().setValue(0);
return myInstance;
}
private synchronized static void killInstance() {
myInstance = null;
}
@Override
public void dispose() {
killInstance();
currentEntity = null;
super.dispose();
}
private void updateTree(JaamSimModel simModel) {
if (tree == null || top == null)
return;
// Store all the expanded paths
Enumeration<TreePath> expandedPaths = tree.getExpandedDescendants(new TreePath(top));
// Identify the selected entity (cannot use currentEntity -- would race with setEntity)
Entity selectedEnt = null;
TreePath selectedPath = tree.getSelectionPath();
if (selectedPath != null) {
Object selectedObj = ((DefaultMutableTreeNode)selectedPath.getLastPathComponent()).getUserObject();
if (selectedObj instanceof Entity)
selectedEnt = (Entity)selectedObj;
}
// Clear the present tree
top.removeAllChildren();
// Add the instance for Simulation to the top of the tree as a single leaf node
Simulation simulation = simModel.getSimulation();
top.add(new DefaultMutableTreeNode(simulation, false));
// Create the tree structure for palettes and object types in the correct order
for (int i = 0; i < simModel.getObjectTypes().size(); i++) {
try {
final ObjectType type = simModel.getObjectTypes().get(i);
if (type == null)
continue;
String paletteName = type.getPaletteName();
String typeName = type.getName();
// Find or create the node for the palette
DefaultMutableTreeNode paletteNode = getNodeFor_In(paletteName, top);
if (paletteNode == null) {
paletteNode = new DefaultMutableTreeNode(paletteName);
top.add(paletteNode);
}
// Add the node for the Object Type to the palette
if (typeName == null || typeName.equals(paletteName))
continue;
DefaultMutableTreeNode typeNode = new DefaultMutableTreeNode(typeName, true);
paletteNode.add(typeNode);
}
catch (IndexOutOfBoundsException e) {}
}
// Prepare a sorted list of entities
EntityIterator<Entity> entIt = GUIFrame.getJaamSimModel().getClonesOfIterator(Entity.class);
ArrayList<Entity> entityList = new ArrayList<>();
for (Entity ent : entIt) {
try {
// The instance for Simulation has already been added
if (ent == simulation)
continue;
// Do not include the units
if (ent instanceof Unit)
continue;
// Do not include the icons for objects
if (ent instanceof IconModel)
continue;
entityList.add(ent);
}
catch (IndexOutOfBoundsException e) {}
}
try {
Collections.sort(entityList, selectorSortOrder);
}
catch (Throwable t) {}
// Loop through the entities in the model
for (int i=0; i<entityList.size(); i++) {
try {
final Entity ent = entityList.get(i);
// Determine the object type for this entity
final ObjectType type = ent.getObjectType();
if (type == null)
continue;
String paletteName = type.getPaletteName();
String typeName = type.getName();
// Find the palette node for this entity
DefaultMutableTreeNode paletteNode = getNodeFor_In(paletteName, top);
if (paletteNode == null)
continue;
// Find the object type node for this entity
DefaultMutableTreeNode typeNode = getNodeFor_In(typeName, paletteNode);
if (typeName != null && typeName.equals(paletteName)) {
typeNode = paletteNode;
}
if (typeNode == null)
continue;
// Add the entity to the object type node
DefaultMutableTreeNode entityNode = new DefaultMutableTreeNode(ent, false);
typeNode.add(entityNode);
}
catch (IndexOutOfBoundsException e) {}
}
// Remove any object type tree nodes that have no entities
ArrayList<DefaultMutableTreeNode> nodesToRemove = new ArrayList<>();
Enumeration<?> paletteEnum = top.children();
while (paletteEnum.hasMoreElements()) {
DefaultMutableTreeNode paletteNode = (DefaultMutableTreeNode)paletteEnum.nextElement();
Enumeration<?> typeEnum = paletteNode.children();
while (typeEnum.hasMoreElements()) {
DefaultMutableTreeNode typeNode = (DefaultMutableTreeNode)typeEnum.nextElement();
if (!typeNode.getAllowsChildren())
continue;
if (typeNode.isLeaf())
nodesToRemove.add(typeNode);
}
for (DefaultMutableTreeNode typeNode : nodesToRemove) {
paletteNode.remove(typeNode);
}
nodesToRemove.clear();
}
// Remove any palettes that have no object types left
paletteEnum = top.children();
while (paletteEnum.hasMoreElements()) {
DefaultMutableTreeNode paletteNode = (DefaultMutableTreeNode)paletteEnum.nextElement();
// Do not remove any of the special nodes such as the instance for Simulation
if (!paletteNode.getAllowsChildren())
continue;
if (paletteNode.isLeaf())
nodesToRemove.add(paletteNode);
}
for (DefaultMutableTreeNode paletteNode : nodesToRemove) {
top.remove(paletteNode);
}
// Refresh the tree
treeModel.reload(top);
// Restore the path to the selected entity
if (selectedEnt != null) {
TreePath path = ObjectSelector.getPathToEntity(selectedEnt, top);
if (path != null)
tree.setSelectionPath(path);
}
// Restore all the expanded paths
while (expandedPaths != null && expandedPaths.hasMoreElements()) {
TreePath oldPath = expandedPaths.nextElement();
if (oldPath.getPathCount() < 2)
continue;
// Path to a palette
DefaultMutableTreeNode oldPaletteNode = (DefaultMutableTreeNode) (oldPath.getPath())[1];
String paletteName = (String) (oldPaletteNode.getUserObject());
DefaultMutableTreeNode paletteNode = getNodeFor_In(paletteName, top);
if (paletteNode == null)
continue;
if (oldPath.getPathCount() == 2) {
Object[] nodeList = { top, paletteNode };
tree.expandPath(new TreePath(nodeList));
continue;
}
// Path to an object type
DefaultMutableTreeNode oldTypeNode = (DefaultMutableTreeNode) (oldPath.getPath())[2];
String typeName = (String) (oldTypeNode.getUserObject());
DefaultMutableTreeNode typeNode = getNodeFor_In(typeName, paletteNode);
if (typeNode == null)
continue;
Object[] nodeList = { top, paletteNode, typeNode };
tree.expandPath(new TreePath(nodeList));
}
}
private static class EntityComparator implements Comparator<Entity> {
@Override
public int compare(Entity ent0, Entity ent1) {
// Put any null entities at the end of the list
if (ent0 == null && ent1 == null)
return 0;
if (ent0 != null && ent1 == null)
return -1;
if (ent0 == null && ent1 != null)
return 1;
// Otherwise, sort in natural order
return Input.uiSortOrder.compare(ent0, ent1);
}
}
private static final Comparator<Entity> selectorSortOrder = new EntityComparator();
/**
* Returns a tree node for the specified userObject in the specified parent.
* If a node, already exists for this parent, it is returned. If it does
* not exist, then null is returned.
* @param userObject - object for the tree node.
* @param parent - object's parent
* @return tree node for the object.
*/
private static DefaultMutableTreeNode getNodeFor_In(Object userObject, DefaultMutableTreeNode parent) {
// Loop through the parent's children
Enumeration<?> enumeration = parent.children();
while (enumeration.hasMoreElements()) {
DefaultMutableTreeNode eachNode = (DefaultMutableTreeNode)enumeration.nextElement();
if (eachNode.getUserObject() == userObject ||
userObject instanceof String && ((String) userObject).equals(eachNode.getUserObject()) )
return eachNode;
}
return null;
}
private static TreePath getPathToEntity(Entity ent, DefaultMutableTreeNode root) {
final ObjectType type = ent.getObjectType();
if (type == null)
return null;
DefaultMutableTreeNode paletteNode = getNodeFor_In(type.getPaletteName(), root);
if (paletteNode == null)
return null;
DefaultMutableTreeNode typeNode = getNodeFor_In(type.getName(), paletteNode);
if (typeNode == null)
return null;
DefaultMutableTreeNode entityNode = getNodeFor_In(ent, typeNode);
if (entityNode == null)
return null;
Object[] nodeList = { root, paletteNode, typeNode, entityNode };
return new TreePath(nodeList);
}
static class MyTreeSelectionListener implements TreeSelectionListener {
@Override
public void valueChanged( TreeSelectionEvent e ) {
JTree tree = (JTree) e.getSource();
DefaultMutableTreeNode node = (DefaultMutableTreeNode)tree.getLastSelectedPathComponent();
if(node == null) {
// This occurs when we set no selected entity (null) and then
// force the tree to have a null selected node
return;
}
Object userObj = node.getUserObject();
if (userObj instanceof Entity) {
FrameBox.setSelectedEntity((Entity)userObj, false);
}
else {
FrameBox.setSelectedEntity(null, false);
}
}
}
static class MyTreeModelListener implements TreeModelListener {
private final JTree tree;
public MyTreeModelListener(JTree tree) {
this.tree = tree;
}
@Override
public void treeNodesChanged( TreeModelEvent e ) {
DefaultMutableTreeNode node = (DefaultMutableTreeNode)tree.getLastSelectedPathComponent();
String newName = ((String)node.getUserObject()).trim();
try {
GUIFrame.getInstance().renameEntity(currentEntity, newName);
if (currentEntity instanceof DisplayEntity) {
DisplayEntity dEnt = (DisplayEntity) currentEntity;
EntityLabel label = EntityLabel.getLabel(dEnt);
if (label != null)
label.updateForTargetNameChange();
}
}
catch (ErrorException err) {
GUIFrame.showErrorDialog("Input Error", err.getMessage());
}
finally {
node.setUserObject(currentEntity);
GUIFrame.updateUI();
}
}
@Override
public void treeNodesInserted(TreeModelEvent e) {}
@Override
public void treeNodesRemoved(TreeModelEvent e) {}
@Override
public void treeStructureChanged(TreeModelEvent e) {}
}
static class MyMouseListener implements MouseListener {
private final JPopupMenu menu= new JPopupMenu();
@Override
public void mouseClicked(MouseEvent e) {
if(e.getButton() != MouseEvent.BUTTON3)
return;
if(currentEntity == null)
return;
// Right mouse click on a movable DisplayEntity
menu.removeAll();
ContextMenu.populateMenu(menu, currentEntity, -1, e.getComponent(), e.getX(), e.getY());
menu.show(e.getComponent(), e.getX(), e.getY());
}
@Override
public void mouseEntered(MouseEvent e) {}
@Override
public void mouseExited(MouseEvent e) {}
@Override
public void mousePressed(MouseEvent e) {}
@Override
public void mouseReleased(MouseEvent e) {}
}
static class MyKeyListener implements KeyListener {
@Override
public void keyReleased(KeyEvent e) {
if (e.getKeyCode() != KeyEvent.VK_DELETE)
return;
try {
GUIFrame.getInstance().deleteEntity(currentEntity);
FrameBox.setSelectedEntity(null, false);
}
catch (ErrorException err) {
GUIFrame.invokeErrorDialog("User Error", err.getMessage());
}
}
@Override
public void keyPressed(KeyEvent e) {
int keyCode = e.getKeyCode();
boolean control = e.isControlDown();
if (control && keyCode == KeyEvent.VK_C) {
if (currentEntity != null) {
GUIFrame.getInstance().copyToClipboard(currentEntity);
}
}
if (control && keyCode == KeyEvent.VK_V) {
GUIFrame.getInstance().pasteEntityFromClipboard();
}
if (keyCode == KeyEvent.VK_F1) {
String topic = "";
if (currentEntity != null)
topic = currentEntity.getObjectType().getName();
HelpBox.getInstance().showDialog(topic);
}
}
@Override
public void keyTyped(KeyEvent e) {}
}
static class MyFocusListener implements WindowFocusListener {
@Override
public void windowGainedFocus(WindowEvent arg0) {}
@Override
public void windowLostFocus(WindowEvent e) {
// Complete any editing that has started
ObjectSelector.myInstance.tree.stopEditing();
}
}
}
|
<reponame>au-research/igsn-metadata-portal
'use strict'
const gulp = require('gulp')
const purgecss = require('gulp-purgecss')
const uglifycss = require('gulp-uglifycss')
const watch = require('gulp-watch')
const environments = require('gulp-environments')
const production = environments.production
const browserify = require('browserify')
const babelify = require('babelify')
const source = require('vinyl-source-stream')
const buffer = require('vinyl-buffer')
const sourcemaps = require('gulp-sourcemaps')
const uglify = require('gulp-terser')
const rename = require('gulp-rename')
const size = require('gulp-size')
gulp.task('css', function () {
const postcss = require('gulp-postcss')
return gulp.src('src/main/resources/css/portal.css')
.pipe(postcss([
require('postcss-import'),
require('tailwindcss'),
require('autoprefixer'),
]))
.pipe(production(purgecss({
content: ['src/main/resources/templates/**/*.html'],
defaultExtractor: content => content.match(/[A-Za-z0-9-_:/]+/g) || []
})))
.pipe(production(uglifycss()))
.pipe(rename('bundle.css'))
.pipe(size({showFiles:true}))
.pipe(gulp.dest('src/main/resources/static/css/'))
})
gulp.task('copy-template', function () {
return gulp.src('src/main/resources/templates/**/*.html').pipe(gulp.dest('target/classes/templates/'))
})
gulp.task('copy-static', function () {
return gulp.src('src/main/resources/static/**/*').pipe(gulp.dest('target/classes/static/'))
})
const libs = [
'qrcode-generator',
'wicket',
'leaflet',
'wicket/wicket-leaflet',
'tippy.js'
]
gulp.task('js-libs', function () {
let b = browserify({
debug: true
})
libs.forEach(function (lib) {
b.require(lib)
})
return b.bundle()
.pipe(source('vendor.js'))
.pipe(buffer())
.pipe(sourcemaps.init())
.pipe(uglify())
.pipe(sourcemaps.write('./maps'))
.pipe(size({showFiles:true}))
.pipe(gulp.dest('./src/main/resources/static/js/'))
})
gulp.task('js', function () {
let b = browserify({
entries: ['./src/main/resources/js/app.js']
})
libs.forEach(function (lib) {
b.external(lib)
})
return b
.transform(babelify.configure({
presets: ['@babel/preset-env']
}))
.bundle()
.pipe(source('bundle.js'))
.pipe(buffer())
.pipe(sourcemaps.init())
.pipe(production(uglify()))
.pipe(sourcemaps.write('./maps'))
.pipe(size({showFiles:true}))
.pipe(gulp.dest('./src/main/resources/static/js/'))
})
gulp.task('watch', function () {
watch('src/main/resources/css/**/*.css', gulp.series('css'))
watch('src/main/resources/js/**/*.js', gulp.series('js'))
watch('src/main/resources/static/**/*', gulp.series('copy-static'))
watch('src/main/resources/templates/**/*.html', gulp.series('copy-template'))
})
gulp.task('build', gulp.series('copy-template', 'css', 'js-libs', 'js', 'copy-static')) |
<filename>Boggle game/SC101_Assignment5/boggle.py<gh_stars>0
"""
File: boggle.py
Name: Sophie
----------------------------------------
TODO: after key in 16 letters with correct formate, app will show possible voc whose letters is longer than 3 letters.
"""
# This is the file name of the dictionary txt file
# we will be checking if a word exists by searching through it
FILE = 'dictionary.txt'
#globle variable
dic_list = {'a': [], 'b': [], 'c': [],'d': [],'e': [], 'f': [], 'g': [], 'h': [], 'i': [], 'j': [], 'k': [], 'l': [],
'm': [], 'n': [],'o': [], 'p': [], 'q': [], 'r': [], 's': [], 't': [], 'u': [], 'v': [], 'w': [], 'x': [],
'y': [], 'z': []}
#this control if 16 letters are key in with correct format
complete = None
def main():
"""
TODO: after key in 16 letters with correct formate, app will show possible voc whose letters is longer than 3 letters.
"""
read_dictionary()
letter_list = []
row_num = 1
if input_boggle(row_num, letter_list) is False:
pass
else:
print('Searching....')
found_voc_list = []
for i in range(len(letter_list)):
current_list = letter_list.copy()
voc_string = ''
found_boggle(i, 0, voc_string, current_list, letter_list, found_voc_list, [])
print(f'There are {len(found_voc_list)} words in total.')
def found_boggle(start_position, steps_moved, current_string, current_list, letter_list, found_voc_list, possible_route):
if len(current_string) >= 4 and current_string in dic_list[current_string[0]] and current_string not in found_voc_list:
print(f'Found "{current_string}"')
found_voc_list.append(current_string)
#explore
found_boggle(start_position, steps_moved, current_string, current_list, letter_list, found_voc_list, possible_route)
current_string = current_string[0:len(current_string) - 1]
pass
#un-chose
else:
if len(current_string) == 0:
current_string += current_list[start_position]
current_list[start_position] = '1'
#defined possible moving route
if start_position == 0 or start_position == 4 or start_position == 8 or start_position == 12:
possible_route = [-4, -3, 1, 4, 5]
elif start_position == 3 or start_position == 7 or start_position == 11 or start_position == 15:
possible_route = [-5, -4, -1, 3, 4]
else:
possible_route = [-5, -4, -3, -1, 1, 3, 4, 5]
for next_position in possible_route:
#choose
if 15 >= start_position + next_position >= 0:
if current_list[start_position + next_position] != '1':
next_letter = current_list[start_position + next_position]
current_string += next_letter
if len(current_string) == 3:
if has_prefix(current_string) is False:
current_string = current_string[0:len(current_string) - 1]
pass
else:
current_list[start_position + next_position] = '1'
steps_moved = next_position
found_boggle(start_position + steps_moved, steps_moved, current_string, current_list, letter_list,
found_voc_list, possible_route)
current_string = current_string[0:len(current_string) - 1]
current_list[start_position + next_position] = letter_list[start_position + next_position]
# explore
else:
current_list[start_position + next_position] = '1'
steps_moved = next_position
found_boggle(start_position + steps_moved, steps_moved, current_string, current_list, letter_list, found_voc_list, possible_route)
# un-choose
current_string = current_string[0:len(current_string)-1]
current_list[start_position + next_position] = letter_list[start_position + next_position]
def input_boggle(row_num, letter_list):
"""
This function ask user to key in letters as a start of boggle game
:param row_num: how many rows have been key in
:param letter_list: current letter list provided
:return: True or False
"""
global complete
if row_num == 5:
complete = True
return complete
else:
row = input(str(str(row_num) + ' row of letters:'))
if 8 > len(row) >= 7:
if row[1] and row[3] and row[5] != (' '):
print('Illegal input')
complete = False
return complete
else:
for i in range(0, 7, 2):
letter = row[i].lower()
letter_list.append(letter)
row_num += 1
input_boggle(row_num, letter_list)
if complete is True:
return True
else:
return False
elif len(row) >= 8:
for i in range(len(row)-7):
if row[7+i] != (' '):
print('Illegal input')
complete = False
return complete
for i in range(0, 7, 2):
letter = row[i].lower()
letter_list.append(letter)
row_num += 1
input_boggle(row_num, letter_list)
else:
print('Illegal input')
complete = False
return complete
def read_dictionary():
"""
This function reads file "dictionary.txt" stored in FILE
and appends words in each line into a Python list
"""
global dic_list
with open(FILE, 'r') as f:
for line in f:
line_1 = line.split()
voc = line_1[0]
first_letter = voc[0]
dic_list[first_letter].append(voc)
def has_prefix(sub_s):
"""
:param sub_s: (str) A substring that is constructed by neighboring letters on a 4x4 square grid
:return: (bool) If there is any words with prefix stored in sub_s
"""
search_letter = sub_s
start_letter = search_letter[0]
search_range = dic_list[start_letter]
for voc in search_range:
match = voc.startswith(search_letter)
if match is True:
return True
elif voc == search_range[len(search_range) - 1] and match is False:
return False
if __name__ == '__main__':
main()
|
import { _utils } from 'react-data-grid';
const { getMixedTypeValueRetriever, isImmutableCollection } = _utils;
export const comparer = (a, b) => {
if (a > b) {
return 1;
}
if (a < b) {
return -1;
}
return 0;
};
const sortRows = (rows, sortColumn, sortDirection) => {
const retriever = getMixedTypeValueRetriever(isImmutableCollection(rows));
const sortDirectionSign = sortDirection === 'ASC' ? 1 : -1;
const rowComparer = (a, b) => {
return sortDirectionSign * comparer(retriever.getValue(a, sortColumn), retriever.getValue(b, sortColumn));
};
if (sortDirection === 'NONE') {
return rows;
}
return rows.slice().sort(rowComparer);
};
export default sortRows;
|
dockerRun shell <<EOF
scan 'test_ns:test_table'
EOF
|
def rectangles_intersect(rect1, rect2):
# Check if the x and y coordinates of the two rectangles overlap
if rect1.x + rect1.width < rect2.x or rect2.x + rect2.width < rect1.x:
return False
if rect1.y + rect1.height < rect2.y or rect2.y + rect2.height < rect1.y:
return False
return True |
/*
* Tencent is pleased to support the open source community by making TKEStack available.
*
* Copyright (C) 2012-2019 Tencent. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use
* this file except in compliance with the License. You may obtain a copy of the
* License at
*
* https://opensource.org/licenses/Apache-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package k8s
import (
"encoding/json"
"flag"
"fmt"
"io/ioutil"
"os"
"path/filepath"
"regexp"
"strings"
glog "k8s.io/klog"
)
/*
k8s cni args
Args: [][2]string{
{"IgnoreUnknown", "1"},
{"K8S_POD_NAMESPACE", podNs},
{"K8S_POD_NAME", podName},
{"K8S_POD_INFRA_CONTAINER_ID", podInfraContainerID.ID},
}
*/
const (
K8S_POD_NAMESPACE = "K8S_POD_NAMESPACE"
K8S_POD_NAME = "K8S_POD_NAME"
K8S_POD_INFRA_CONTAINER_ID = "K8S_POD_INFRA_CONTAINER_ID"
stateDir = "/var/lib/cni/galaxy/port"
PortMappingPortsAnnotation = "tkestack.io/portmapping"
)
type Port struct {
// This must be a valid port number, 0 <= x < 65536.
// If HostNetwork is specified, this must match ContainerPort.
HostPort int32 `json:"hostPort"`
// Required: This must be a valid port number, 0 < x < 65536.
ContainerPort int32 `json:"containerPort"`
// Required: Supports "TCP" and "UDP".
Protocol string `json:"protocol"`
HostIP string `json:"hostIP,omitempty"`
PodName string `json:"podName"`
PodIP string `json:"podIP"`
}
func SavePort(containerID string, data []byte) error {
if err := os.MkdirAll(stateDir, 0700); err != nil {
return err
}
path := filepath.Join(stateDir, containerID)
return ioutil.WriteFile(path, data, 0600)
}
func RemovePortFile(containerID string) error {
return os.Remove(filepath.Join(stateDir, containerID))
}
func ConsumePort(containerID string) ([]Port, error) {
path := filepath.Join(stateDir, containerID)
data, err := ioutil.ReadFile(path)
if err != nil {
return nil, err
}
if len(data) == 0 {
return nil, nil
}
var ports []Port
if err := json.Unmarshal(data, &ports); err != nil {
return nil, err
}
return ports, nil
}
// GetPodFullName returns a name that uniquely identifies a pod.
func GetPodFullName(podName, namespace string) string {
return podName + "_" + namespace
}
var flagHostnameOverride = flag.String("hostname-override", "", "kubelet hostname override, if set, galaxy use this"+
" as node name to get node from apiserver")
// GetHostname returns OS's hostname if 'hostnameOverride' is empty and environment 'MY_NODE_NAME'; otherwise, return 'hostnameOverride'.
func GetHostname() string {
hostname := *flagHostnameOverride
if hostname == "" {
hostname = os.Getenv("MY_NODE_NAME")
if hostname == "" {
nodename, err := os.Hostname()
if err != nil {
glog.Fatalf("Couldn't determine hostname: %v", err)
}
hostname = nodename
}
}
return strings.ToLower(strings.TrimSpace(hostname))
}
type PortMapConf struct {
RuntimeConfig struct {
PortMaps []Port `json:"portMappings,omitempty"`
} `json:"runtimeConfig,omitempty"`
}
//such as struct NetworkSelectionElement, function ParsePodNetworkAnnotation & parsePodNetworkObjectName all written
// in compatible with multus-cni
//reference to https://github.com/intel/multus-cni/blob/master/k8sclient/k8sclient.go
// NetworkSelectionElement represents one element of the JSON format
// Network Attachment Selection Annotation as described in section 4.1.2
// of the CRD specification.
type NetworkSelectionElement struct {
// Name contains the name of the Network object this element selects
Name string `json:"name"`
// Namespace contains the optional namespace that the network referenced
// by Name exists in
Namespace string `json:"namespace,omitempty"`
// IPRequest contains an optional requested IP address for this network
// attachment
IPRequest string `json:"ips,omitempty"`
// MacRequest contains an optional requested MAC address for this
// network attachment
MacRequest string `json:"mac,omitempty"`
// InterfaceRequest contains an optional requested name for the
// network interface this attachment will create in the container
InterfaceRequest string `json:"interface,omitempty"`
}
func ParsePodNetworkAnnotation(podNetworks string) ([]*NetworkSelectionElement, error) {
var networks []*NetworkSelectionElement
if podNetworks == "" {
return nil, fmt.Errorf("parsePodNetworkAnnotation: pod annotation should be written as " +
"<namespace>/<network name>@<ifname>")
}
//In multus-cni, network annotation written as <namespace>/<network name>@<ifname>
//Actually, namespace in annotation will be ignored in parsing
if strings.IndexAny(podNetworks, "[{\"") >= 0 {
if err := json.Unmarshal([]byte(podNetworks), &networks); err != nil {
return nil, fmt.Errorf("parsePodNetworkAnnotation: failed to parse pod Network Attachment Selection "+
"Annotation JSON format: %v", err)
}
} else {
// Comma-delimited list of network attachment object names
for _, item := range strings.Split(podNetworks, ",") {
// Remove leading and trailing whitespace.
item = strings.TrimSpace(item)
// Parse network name (i.e. <namespace>/<network name>@<ifname>)
_, networkName, netIfName, err := parsePodNetworkObjectName(item)
if err != nil {
return nil, fmt.Errorf("parsePodNetworkAnnotation: %v", err)
}
networks = append(networks, &NetworkSelectionElement{
Name: networkName,
InterfaceRequest: netIfName,
})
}
}
return networks, nil
}
func parsePodNetworkObjectName(podNetwork string) (string, string, string, error) {
var netNsName string
var netIfName string
var networkName string
glog.V(5).Infof("parsePodNetworkObjectName: %s", podNetwork)
slashItems := strings.Split(podNetwork, "/")
if len(slashItems) == 2 {
netNsName = strings.TrimSpace(slashItems[0])
networkName = slashItems[1]
} else if len(slashItems) == 1 {
networkName = slashItems[0]
} else {
return "", "", "", fmt.Errorf("Invalid network object %s (failed at '/') ", podNetwork)
}
atItems := strings.Split(networkName, "@")
networkName = strings.TrimSpace(atItems[0])
if len(atItems) == 2 {
netIfName = strings.TrimSpace(atItems[1])
} else if len(atItems) != 1 {
return "", "", "", fmt.Errorf("Invalid network object (failed at '@') ")
}
// Check and see if each item matches the specification for valid attachment name.
// "Valid attachment names must be comprised of units of the DNS-1123 label format"
// [a-z0-9]([-a-z0-9]*[a-z0-9])?
// And we allow at (@), and forward slash (/) (units separated by commas)
// It must start and end alphanumerically.
allItems := []string{netNsName, networkName, netIfName}
for i := range allItems {
matched, _ := regexp.MatchString("^[a-z0-9]([-a-z0-9]*[a-z0-9])?$", allItems[i])
if !matched && len([]rune(allItems[i])) > 0 {
return "", "", "", fmt.Errorf("Failed to parse: one or more items did not match comma-delimited format"+
" (must consist of lower case alphanumeric characters). Must start and end with an alphanumeric"+
" character), mismatch @ '%v' ", allItems[i])
}
}
glog.V(5).Infof("parsePodNetworkObjectName: parsed: %s, %s, %s", netNsName, networkName, netIfName)
return netNsName, networkName, netIfName, nil
}
|
"""
Content-Based Recommendation System
"""
def generate_recommendation(users, content):
# Get the user interests
user_interests = [user['interests'] for user in users]
# Generate a dictionary mapping each interest to its associated content
content_by_interest = {}
for interest in user_interests:
content_by_interest[interest] = [c for c in content if c['category'] == interest]
# Generate a mapping of each user to their recommended content
user_recommendations = {}
for user in users:
user_interests = user['interests']
user_recommendations[user['name']] = [
c for c in content_by_interest[interest]
for interest in user_interests
]
return user_recommendations
if __name__ == '__main__':
users = [
{'name': 'John', 'interests': ['sport', 'music']},
{'name': 'Sarah', 'interests': ['photography', 'movies']},
{'name': 'Sam', 'interests': ['travel', 'books']},
]
content = [
{'name': 'Sports app', 'category': 'sport'},
{'name': 'Music app', 'category': 'music'},
{'name': 'Photography app', 'category': 'photography'},
{'name': 'Movies app', 'category': 'movies'},
{'name': 'Travel app', 'category': 'travel'},
{'name': 'Books app', 'category': 'books'},
]
user_recommendations = generate_recommendation(users, content)
print(user_recommendations) |
#!/usr/bin/env bash
CUDA_VISIBLE_DEVICES=0,1,2,3,4,5,6,7 python ./scripts/action-recognition/train_recognizer.py \
--dataset kinetics400 \
--data-dir /home/ubuntu/third_disk/data/kinetics400/rawframes_train \
--val-data-dir /home/ubuntu/third_disk/data/kinetics400/rawframes_val \
--train-list /home/ubuntu/third_disk/data/kinetics400/kinetics400_train_list_rawframes_full.txt \
--val-list /home/ubuntu/third_disk/data/kinetics400/kinetics400_val_list_rawframes_full.txt \
--model resnet101_v1b_kinetics400 \
--mode hybrid \
--dtype float32 \
--prefetch-ratio 1.0 \
--num-classes 400 \
--batch-size 8 \
--num-segments 7 \
--num-gpus 8 \
--num-data-workers 32 \
--new-height 256 \
--new-width 340 \
--new-length 1 \
--new-step 1 \
--input-size 224 \
--lr-mode cosine \
--lr 0.01 \
--momentum 0.9 \
--wd 0.0001 \
--num-epochs 100 \
--scale-ratios 1.0,0.8 \
--save-frequency 5 \
--clip-grad 40 \
--log-interval 50 \
--logging-file resnet101_v1b_kinetics400_tsn.log \
--save-dir ./logs/
|
#!/bin/bash
set -e
mkdir -p bin/ 2>/dev/null
for GOOS in $OS; do
for GOARCH in $ARCH; do
arch="$GOOS-$GOARCH"
binary="terraform-provider-rke_v${CURRENT_VERSION}_x${PROTOCOL_VERSION}"
if [ "$GOOS" = "windows" ]; then
binary="${binary}.exe"
fi
echo "Building $binary $arch"
GOOS=$GOOS GOARCH=$GOARCH CGO_ENABLED=0 \
go build \
-ldflags "$BUILD_LDFLAGS" \
-o bin/$binary \
main.go
if [ -n "$ARCHIVE" ]; then
(cd bin/; zip -r "terraform-provider-rke_${CURRENT_VERSION}_$arch.zip" $binary)
rm -f bin/$binary
fi
done
done
|
package kbasesearchengine.system;
import java.io.BufferedInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.InputStream;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import org.yaml.snakeyaml.Yaml;
import org.yaml.snakeyaml.constructor.SafeConstructor;
import kbasesearchengine.tools.Utils;
/** A type mapping parser for YAML formatted input with a specific document structure.
*
* TODO DOCS documentation of the file structure.
* @author <EMAIL>
*
*/
public class YAMLTypeMappingParser implements TypeMappingParser {
//TODO TEST
//TODO CODE there's got to be some code to validate the structure of arbitrary java objects, have a look around. Try jsonschema
//TODO CODE error checking sucks
private static final String TYPES_PATH = "/types";
@Override
public Set<TypeMapping> parse(InputStream input, String sourceInfo)
throws TypeParseException {
Utils.nonNull(input, "input");
if (!(input instanceof BufferedInputStream)) {
input = new BufferedInputStream(input);
}
if (Utils.isNullOrEmpty(sourceInfo)) {
sourceInfo = null;
}
final Yaml yaml = new Yaml(new SafeConstructor()); // not thread safe
final Object predata = yaml.load(input);
if (!(predata instanceof Map)) {
throw new TypeParseException(
"Expected mapping in top level YAML." + sourceInfo);
}
@SuppressWarnings("unchecked")
final Map<String, Object> data = (Map<String, Object>) predata;
final String storageCode = getString(
data, "storage-type", "storage-type", sourceInfo);
final Map<String, Object> types = getStringMap(
data, "types", TYPES_PATH, sourceInfo);
final Set<TypeMapping> ret = new HashSet<>();
for (final String key: types.keySet()) {
final Map<String, Object> typeinfo = getStringMap(
types, key, TYPES_PATH + "/" + key, sourceInfo);
ret.add(processType(storageCode, key, typeinfo, sourceInfo));
}
return ret;
}
private TypeMapping processType(
final String storageCode,
final String type,
final Map<String, Object> typeinfo,
final String sourceInfo)
throws TypeParseException {
final TypeMapping.Builder b = TypeMapping.getBuilder(storageCode, type)
.withNullableSourceInfo(sourceInfo);
String pathPrefix = TYPES_PATH + "/" + type + "/";
final List<SearchObjectType> searchTypes = getSearchTypes(
typeinfo, "types", pathPrefix + "types", sourceInfo, false);
if (!searchTypes.isEmpty()) {
searchTypes.stream().forEach(t -> b.withDefaultSearchType(t));
return b.build();
}
getSearchTypes(typeinfo, "default", pathPrefix + "default", sourceInfo, false).stream()
.forEach(t -> b.withDefaultSearchType(t));
final String verPathPrefix = pathPrefix + "versions";
final Map<Integer, Object> versions = getIntMap(
typeinfo, "versions", verPathPrefix, sourceInfo);
for (final Integer v: versions.keySet()) {
final String verpath = verPathPrefix + "/" + v;
if (v < 0) {
throw new TypeParseException(String.format(
"Version less than 0 at %s.%s", verpath, fmt(sourceInfo)));
}
getSearchTypes(versions, v, verpath, sourceInfo, true).stream()
.forEach(t -> b.withVersion(v, t));
}
if (!b.isBuildReady()) {
throw new TypeParseException(String.format(
"No type mappings provided at %s.%s", pathPrefix, fmt(sourceInfo)));
}
return b.build();
}
private List<SearchObjectType> getSearchTypes(
final Map<?, Object> map,
final Object key,
final String path,
final String sourceInfo,
final boolean required)
throws TypeParseException {
final Object value = map.get(key);
if (value == null) {
if (required) {
throw new TypeParseException("Missing value at " + path + "." + fmt(sourceInfo));
} else {
return Collections.emptyList();
}
}
if (!(value instanceof List)) {
throw new TypeParseException("Expected list at " + path + "." + fmt(sourceInfo));
}
@SuppressWarnings("unchecked")
final List<Object> otypes = (List<Object>) value;
final List<SearchObjectType> ret = new LinkedList<>();
for (int i = 0; i < otypes.size(); i++) {
final Object putativeType = otypes.get(i);
if (!(putativeType instanceof Map)) {
throw new TypeParseException(String.format(
"Expected mapping at %s position %s.%s", path, i, fmt(sourceInfo)));
}
@SuppressWarnings("unchecked")
final Map<Object, Object> putativeType2 = (Map<Object, Object>) putativeType;
final Object typeName = putativeType2.get("type");
final Object typeVer = putativeType2.get("version");
if (typeName == null || !(typeName instanceof String) ||
Utils.isNullOrEmpty((String) typeName)) {
throw new TypeParseException(String.format(
"Expected type name at %s/%s/type.%s", path, i, fmt(sourceInfo)));
}
if (typeVer == null || !(typeVer instanceof Integer)) {
throw new TypeParseException(String.format(
"Expected type version at %s/%s/version.%s", path, i, fmt(sourceInfo)));
}
ret.add(new SearchObjectType((String) typeName, (int) typeVer));
}
return ret;
}
private String fmt(final String sourceInfo) {
return sourceInfo == null ? "" :
sourceInfo.trim().isEmpty() ? "" : " Source: " + sourceInfo;
}
// never required
private Map<Integer, Object> getIntMap(
final Map<String, Object> map,
final String key,
final String path,
final String sourceInfo)
throws TypeParseException {
final Object value = map.get(key);
if (value == null) {
return Collections.emptyMap();
}
if (!(value instanceof Map)) {
throw new TypeParseException(
String.format("Expected map, got %s at %s.%s", value, path, fmt(sourceInfo)));
}
@SuppressWarnings("unchecked")
final Map<Object, Object> value2 = (Map<Object, Object>) value;
final Map<Integer, Object> ret = new HashMap<>();
for (final Entry<Object, Object> e: value2.entrySet()) {
if (!(e.getKey() instanceof Integer)) {
throw new TypeParseException(String.format(
"Expected map with int keys, got key %s at %s.%s",
e.getKey(), path, fmt(sourceInfo)));
}
ret.put((Integer) e.getKey(), e.getValue());
}
return ret;
}
// always required.
// could probably use generics here, but f it. C&P FTW
private Map<String, Object> getStringMap(
final Map<String, Object> map,
final String key,
final String path,
final String sourceInfo)
throws TypeParseException {
final Object value = map.get(key);
if (value == null) {
throw new TypeParseException("Missing value at " + path + "." + fmt(sourceInfo));
}
if (!(value instanceof Map)) {
throw new TypeParseException(
String.format("Expected map, got %s at %s.%s", value, path, fmt(sourceInfo)));
}
@SuppressWarnings("unchecked")
final Map<Object, Object> value2 = (Map<Object, Object>) value;
final Map<String, Object> ret = new HashMap<>();
for (final Entry<Object, Object> e: value2.entrySet()) {
if (!(e.getKey() instanceof String)) {
throw new TypeParseException(String.format(
"Expected map with string keys, got key %s at %s.%s",
e.getKey(), path, fmt(sourceInfo)));
}
ret.put((String) e.getKey(), e.getValue());
}
return ret;
}
// always required
private String getString(
final Map<?, Object> map,
final Object key,
final String path,
final String sourceInfo)
throws TypeParseException {
final Object value = map.get(key);
if (value == null) {
throw new TypeParseException("Missing value at " + path + "." + fmt(sourceInfo));
}
if (!(value instanceof String)) {
throw new TypeParseException(
String.format("Expected string, got %s at %s.%s",
value, path, fmt(sourceInfo)));
}
if (Utils.isNullOrEmpty((String) value)) {
throw new TypeParseException("Missing value at " + path + "." + fmt(sourceInfo));
}
return (String) value;
}
public static void main(final String[] args) throws FileNotFoundException, TypeParseException {
System.out.println(new YAMLTypeMappingParser().parse(
new FileInputStream(
new File("resources/typemappings/GenomeAndAssembly.yaml.example")),
"me bum"));
}
}
|
#!/usr/bin/env sh
# 确保脚本抛出遇到的错误
set -e
# 生成静态文件
npm run docs:build
# 进入生成的文件夹
cd docs/.vuepress/dist
# 如果是发布到自定义域名
# echo 'www.example.com' > CNAME
git init
git add -A
git commit -m 'deploy'
# 如果发布到 https://<USERNAME>.github.io
# git push -f git@github.com:<USERNAME>/<USERNAME>.github.io.git master
# 如果发布到 https://<USERNAME>.github.io/<REPO>
git push -f git@github.com:houyaxin/vuepress-blog.git master:gh-pages
cd - |
<reponame>acouvreur/skeleton-generator
package org.sklsft.generator.model.domain.database;
import java.util.ArrayList;
import java.util.List;
import org.sklsft.generator.model.domain.business.Property;
/**
* representation of a unique constraint<br/>
* Properties are willingly public because of their intensive use in file write
* commands<br/>
*
* @author <NAME>
*
*/
public class UniqueConstraint {
public String name;
public List<Column> columns = new ArrayList<>();
public List<Property> properties = new ArrayList<>();
}
|
<reponame>PinoEire/archi
package com.archimatetool.editor.ui.dialog;
import org.eclipse.osgi.util.NLS;
public class Messages extends NLS {
private static final String BUNDLE_NAME = "com.archimatetool.editor.ui.dialog.messages"; //$NON-NLS-1$
public static String AboutDialog_0;
public static String AboutDialog_1;
public static String AboutDialog_2;
public static String AboutDialog_3;
public static String AboutDialog_4;
public static String AboutDialog_5;
public static String AboutDialog_6;
public static String AboutDialog_7;
public static String RelationshipsMatrixDialog_0;
public static String RelationshipsMatrixDialog_1;
public static String RelationshipsMatrixDialog_2;
static {
// initialize resource bundle
NLS.initializeMessages(BUNDLE_NAME, Messages.class);
}
private Messages() {
}
}
|
#include "global.h"
#include "Model.h"
#include "ModelTypes.h"
#include "RageMath.h"
#include "RageDisplay.h"
#include "RageUtil.h"
#include "RageTextureManager.h"
#include "XmlFile.h"
#include "RageFile.h"
#include "RageLog.h"
#include "ActorUtil.h"
#include "ModelManager.h"
#include "Foreach.h"
#include "LuaBinding.h"
#include "PrefsManager.h"
REGISTER_ACTOR_CLASS( Model );
static const float FRAMES_PER_SECOND = 30;
static const RString DEFAULT_ANIMATION_NAME = "default";
Model::Model()
{
m_bTextureWrapping = true;
SetUseZBuffer( true );
SetCullMode( CULL_BACK );
m_pGeometry = NULL;
m_pCurAnimation = NULL;
m_fDefaultAnimationRate = 1;
m_fCurAnimationRate = 1;
m_bLoop = true;
m_bDrawCelShaded = false;
m_pTempGeometry = NULL;
}
Model::~Model()
{
Clear();
}
void Model::Clear()
{
if( m_pGeometry )
{
MODELMAN->UnloadModel( m_pGeometry );
m_pGeometry = NULL;
}
m_vpBones.clear();
m_Materials.clear();
m_mapNameToAnimation.clear();
m_pCurAnimation = NULL;
RecalcAnimationLengthSeconds();
if( m_pTempGeometry )
DISPLAY->DeleteCompiledGeometry( m_pTempGeometry );
}
void Model::Load( const RString &sFile )
{
if( sFile == "" ) return;
RString sExt = GetExtension(sFile);
sExt.MakeLower();
if( sExt=="txt" )
LoadMilkshapeAscii( sFile );
RecalcAnimationLengthSeconds();
}
#define THROW RageException::Throw( "Parse error in \"%s\" at line %d: \"%s\".", sPath.c_str(), iLineNum, sLine.c_str() )
// TODO: Move MS3D loading into its own class. - Colby
void Model::LoadMilkshapeAscii( const RString &sPath )
{
LoadPieces( sPath, sPath, sPath );
}
void Model::LoadPieces( const RString &sMeshesPath, const RString &sMaterialsPath, const RString &sBonesPath )
{
Clear();
// TRICKY: Load materials before geometry so we can figure out whether the materials require normals.
LoadMaterialsFromMilkshapeAscii( sMaterialsPath );
ASSERT( m_pGeometry == NULL );
m_pGeometry = MODELMAN->LoadMilkshapeAscii( sMeshesPath, this->MaterialsNeedNormals() );
// Validate material indices.
for( unsigned i = 0; i < m_pGeometry->m_Meshes.size(); ++i )
{
const msMesh *pMesh = &m_pGeometry->m_Meshes[i];
if( pMesh->nMaterialIndex >= (int) m_Materials.size() )
RageException::Throw( "Model \"%s\" mesh \"%s\" references material index %i, but there are only %i materials.",
sMeshesPath.c_str(), pMesh->sName.c_str(), pMesh->nMaterialIndex, (int)m_Materials.size() );
}
if( LoadMilkshapeAsciiBones( DEFAULT_ANIMATION_NAME, sBonesPath ) )
PlayAnimation( DEFAULT_ANIMATION_NAME );
// Setup temp vertices (if necessary)
if( m_pGeometry->HasAnyPerVertexBones() )
{
m_vTempMeshes = m_pGeometry->m_Meshes;
m_pTempGeometry = DISPLAY->CreateCompiledGeometry();
m_pTempGeometry->Set( m_vTempMeshes, this->MaterialsNeedNormals() );
}
RecalcAnimationLengthSeconds();
}
void Model::LoadFromNode( const XNode* pNode )
{
RString s1, s2, s3;
ActorUtil::GetAttrPath( pNode, "Meshes", s1 );
ActorUtil::GetAttrPath( pNode, "Materials", s2 );
ActorUtil::GetAttrPath( pNode, "Bones", s3 );
if( !s1.empty() || !s2.empty() || !s3.empty() )
{
ASSERT( !s1.empty() && !s2.empty() && !s3.empty() );
LoadPieces( s1, s2, s3 );
}
Actor::LoadFromNode( pNode );
RecalcAnimationLengthSeconds();
}
void Model::LoadMaterialsFromMilkshapeAscii( const RString &_sPath )
{
RString sPath = _sPath;
FixSlashesInPlace(sPath);
const RString sDir = Dirname( sPath );
RageFile f;
if( !f.Open( sPath ) )
RageException::Throw( "Model::LoadMilkshapeAscii Could not open \"%s\": %s", sPath.c_str(), f.GetError().c_str() );
RString sLine;
int iLineNum = 0;
while( f.GetLine( sLine ) > 0 )
{
iLineNum++;
if( !strncmp (sLine, "//", 2) )
continue;
int nFrame;
if( sscanf(sLine, "Frames: %d", &nFrame) == 1 )
{
// ignore
// m_pModel->nTotalFrames = nFrame;
}
if( sscanf(sLine, "Frame: %d", &nFrame) == 1 )
{
// ignore
// m_pModel->nFrame = nFrame;
}
// materials
int nNumMaterials = 0;
if( sscanf(sLine, "Materials: %d", &nNumMaterials) == 1 )
{
m_Materials.resize( nNumMaterials );
char szName[256];
for( int i = 0; i < nNumMaterials; i++ )
{
msMaterial& Material = m_Materials[i];
// name
if( f.GetLine( sLine ) <= 0 )
THROW;
if( sscanf(sLine, "\"%255[^\"]\"", szName) != 1 )
THROW;
Material.sName = szName;
// ambient
if( f.GetLine( sLine ) <= 0 )
THROW;
RageVector4 Ambient;
if( sscanf(sLine, "%f %f %f %f", &Ambient[0], &Ambient[1], &Ambient[2], &Ambient[3]) != 4 )
THROW;
memcpy( &Material.Ambient, &Ambient, sizeof(Material.Ambient) );
// diffuse
if( f.GetLine( sLine ) <= 0 )
THROW;
RageVector4 Diffuse;
if( sscanf(sLine, "%f %f %f %f", &Diffuse[0], &Diffuse[1], &Diffuse[2], &Diffuse[3]) != 4 )
THROW;
memcpy( &Material.Diffuse, &Diffuse, sizeof(Material.Diffuse) );
// specular
if( f.GetLine( sLine ) <= 0 )
THROW;
RageVector4 Specular;
if( sscanf(sLine, "%f %f %f %f", &Specular[0], &Specular[1], &Specular[2], &Specular[3]) != 4 )
THROW;
memcpy( &Material.Specular, &Specular, sizeof(Material.Specular) );
// emissive
if( f.GetLine( sLine ) <= 0 )
THROW;
RageVector4 Emissive;
if( sscanf (sLine, "%f %f %f %f", &Emissive[0], &Emissive[1], &Emissive[2], &Emissive[3]) != 4 )
THROW;
memcpy( &Material.Emissive, &Emissive, sizeof(Material.Emissive) );
// shininess
if( f.GetLine( sLine ) <= 0 )
THROW;
float fShininess;
if( !StringConversion::FromString(sLine, fShininess) )
THROW;
Material.fShininess = fShininess;
// transparency
if( f.GetLine( sLine ) <= 0 )
THROW;
float fTransparency;
if( !StringConversion::FromString(sLine, fTransparency) )
THROW;
Material.fTransparency = fTransparency;
// diffuse texture
if( f.GetLine( sLine ) <= 0 )
THROW;
strcpy( szName, "" );
sscanf( sLine, "\"%255[^\"]\"", szName );
RString sDiffuseTexture = szName;
if( sDiffuseTexture == "" )
{
Material.diffuse.LoadBlank();
}
else
{
RString sTexturePath = sDir + sDiffuseTexture;
FixSlashesInPlace( sTexturePath );
CollapsePath( sTexturePath );
if( !IsAFile(sTexturePath) )
RageException::Throw( "\"%s\" references a texture \"%s\" that does not exist.", sPath.c_str(), sTexturePath.c_str() );
Material.diffuse.Load( sTexturePath );
}
// alpha texture
if( f.GetLine( sLine ) <= 0 )
THROW;
strcpy( szName, "" );
sscanf( sLine, "\"%255[^\"]\"", szName );
RString sAlphaTexture = szName;
if( sAlphaTexture == "" )
{
Material.alpha.LoadBlank();
}
else
{
RString sTexturePath = sDir + sAlphaTexture;
FixSlashesInPlace( sTexturePath );
CollapsePath( sTexturePath );
if( !IsAFile(sTexturePath) )
RageException::Throw( "\"%s\" references a texture \"%s\" that does not exist.", sPath.c_str(), sTexturePath.c_str() );
Material.alpha.Load( sTexturePath );
}
}
}
}
}
bool Model::LoadMilkshapeAsciiBones( const RString &sAniName, const RString &sPath )
{
m_mapNameToAnimation[sAniName] = msAnimation();
msAnimation &Animation = m_mapNameToAnimation[sAniName];
if( Animation.LoadMilkshapeAsciiBones( sAniName, sPath ) )
{
m_mapNameToAnimation.erase( sAniName );
return false;
}
return true;
}
bool Model::EarlyAbortDraw() const
{
return m_pGeometry == NULL || m_pGeometry->m_Meshes.empty();
}
void Model::DrawCelShaded()
{
// First pass: shell. We only want the backfaces for this.
DISPLAY->SetCelShaded(1);
DISPLAY->SetCullMode(CULL_FRONT);
this->SetZWrite(false); // XXX: Why on earth isn't the culling working? -Colby
this->Draw();
// Second pass: cel shading
DISPLAY->SetCelShaded(2);
DISPLAY->SetCullMode(CULL_BACK);
this->SetZWrite(true);
this->Draw();
DISPLAY->SetCelShaded(0);
}
void Model::DrawPrimitives()
{
Actor::SetGlobalRenderStates(); // set Actor-specified render states
// Don't if we're fully transparent
if( m_pTempState->diffuse[0].a < 0.001f && m_pTempState->glow.a < 0.001f )
return;
DISPLAY->Scale( 1, -1, 1 ); // flip Y so positive is up
//////////////////////
// render the diffuse pass
//////////////////////
if( m_pTempState->diffuse[0].a > 0 )
{
DISPLAY->SetTextureMode( TextureUnit_1, TextureMode_Modulate );
for( unsigned i = 0; i < m_pGeometry->m_Meshes.size(); ++i )
{
const msMesh *pMesh = &m_pGeometry->m_Meshes[i];
if( pMesh->nMaterialIndex != -1 ) // has a material
{
// apply material
msMaterial& mat = m_Materials[ pMesh->nMaterialIndex ];
RageColor Emissive = mat.Emissive;
RageColor Ambient = mat.Ambient;
RageColor Diffuse = mat.Diffuse;
Emissive *= m_pTempState->diffuse[0];
Ambient *= m_pTempState->diffuse[0];
Diffuse *= m_pTempState->diffuse[0];
DISPLAY->SetMaterial( Emissive, Ambient, Diffuse, mat.Specular, mat.fShininess );
RageVector2 vTexTranslate = mat.diffuse.GetTextureTranslate();
if( vTexTranslate.x != 0 || vTexTranslate.y != 0 )
{
DISPLAY->TexturePushMatrix();
DISPLAY->TextureTranslate( vTexTranslate.x, vTexTranslate.y );
}
/* There's some common code that could be folded out here, but
* it seems clearer to keep it separate. */
bool bUseMultitexture = PREFSMAN->m_bAllowMultitexture && DISPLAY->GetNumTextureUnits() >= 2;
if( bUseMultitexture )
{
// render the diffuse texture with texture unit 1
DISPLAY->SetTexture( TextureUnit_1, mat.diffuse.GetCurrentTexture() ? mat.diffuse.GetCurrentTexture()->GetTexHandle() : 0 );
Actor::SetTextureRenderStates(); // set Actor-specified render states
DISPLAY->SetSphereEnvironmentMapping( TextureUnit_1, mat.diffuse.m_bSphereMapped );
// render the additive texture with texture unit 2
if( mat.alpha.GetCurrentTexture() )
{
DISPLAY->SetTexture( TextureUnit_2, mat.alpha.GetCurrentTexture() ? mat.alpha.GetCurrentTexture()->GetTexHandle() : 0 );
Actor::SetTextureRenderStates(); // set Actor-specified render states
DISPLAY->SetSphereEnvironmentMapping( TextureUnit_2, mat.alpha.m_bSphereMapped );
DISPLAY->SetTextureMode( TextureUnit_2, TextureMode_Add );
DISPLAY->SetTextureFiltering( TextureUnit_2, true );
}
else
{
DISPLAY->SetTexture( TextureUnit_2, 0 );
// set current texture back to 0 or else texture
// transform applied above isn't used. Why?!?
DISPLAY->SetTexture( TextureUnit_1, mat.diffuse.GetCurrentTexture() ? mat.diffuse.GetCurrentTexture()->GetTexHandle() : 0 );
}
// go
DrawMesh(i);
// Turn off environment mapping on tex unit 0.
DISPLAY->SetSphereEnvironmentMapping( TextureUnit_1, false );
}
else
{
// render the diffuse texture
DISPLAY->SetTexture( TextureUnit_1, mat.diffuse.GetCurrentTexture() ? mat.diffuse.GetCurrentTexture()->GetTexHandle() : 0 );
Actor::SetTextureRenderStates(); // set Actor-specified render states
DISPLAY->SetSphereEnvironmentMapping( TextureUnit_1, mat.diffuse.m_bSphereMapped );
DrawMesh( i );
// render the additive texture
if( mat.alpha.GetCurrentTexture() )
{
DISPLAY->SetTexture( TextureUnit_1, mat.alpha.GetCurrentTexture() ? mat.alpha.GetCurrentTexture()->GetTexHandle() : 0 );
Actor::SetTextureRenderStates(); // set Actor-specified render states
DISPLAY->SetSphereEnvironmentMapping( TextureUnit_1, mat.alpha.m_bSphereMapped );
// UGLY: This overrides the Actor's BlendMode.
DISPLAY->SetBlendMode( BLEND_ADD );
DISPLAY->SetTextureFiltering( TextureUnit_1, true );
DrawMesh( i );
}
}
if( vTexTranslate.x != 0 || vTexTranslate.y != 0 )
DISPLAY->TexturePopMatrix();
}
else
{
static const RageColor emissive( 0,0,0,0 );
static const RageColor ambient( 0.2f,0.2f,0.2f,1 );
static const RageColor diffuse( 0.7f,0.7f,0.7f,1 );
static const RageColor specular( 0.2f,0.2f,0.2f,1 );
static const float shininess = 1;
DISPLAY->SetMaterial( emissive, ambient, diffuse, specular, shininess );
DISPLAY->ClearAllTextures();
DISPLAY->SetSphereEnvironmentMapping( TextureUnit_1, false );
DrawMesh( i );
}
DISPLAY->SetSphereEnvironmentMapping( TextureUnit_1, false );
DISPLAY->SetBlendMode( BLEND_NORMAL );
}
}
// render the glow pass
if( m_pTempState->glow.a > 0.0001f )
{
DISPLAY->SetTextureMode( TextureUnit_1, TextureMode_Glow );
for( unsigned i = 0; i < m_pGeometry->m_Meshes.size(); ++i )
{
const msMesh *pMesh = &m_pGeometry->m_Meshes[i];
// apply material
RageColor emissive = RageColor(0,0,0,0);
RageColor ambient = RageColor(0,0,0,0);
RageColor diffuse = m_pTempState->glow;
RageColor specular = RageColor(0,0,0,0);
float shininess = 1;
DISPLAY->SetMaterial( emissive, ambient, diffuse, specular, shininess );
DISPLAY->ClearAllTextures();
if( pMesh->nMaterialIndex != -1 )
{
msMaterial& mat = m_Materials[ pMesh->nMaterialIndex ];
DISPLAY->SetTexture( TextureUnit_1, mat.diffuse.GetCurrentTexture() ? mat.diffuse.GetCurrentTexture()->GetTexHandle() : 0 );
Actor::SetTextureRenderStates(); // set Actor-specified render states
}
else
{
// hey why is this otherwise empty else block here? -aj
}
DrawMesh( i );
}
}
}
void Model::DrawMesh( int i ) const
{
const msMesh *pMesh = &m_pGeometry->m_Meshes[i];
// apply mesh-specific bone (if any)
if( pMesh->m_iBoneIndex != -1 )
{
DISPLAY->PushMatrix();
const RageMatrix &mat = m_vpBones[pMesh->m_iBoneIndex].m_Final;
DISPLAY->PreMultMatrix( mat );
}
// Draw it
const RageCompiledGeometry* TempGeometry = m_pTempGeometry ? m_pTempGeometry : m_pGeometry->m_pCompiledGeometry;
DISPLAY->DrawCompiledGeometry( TempGeometry, i, m_pGeometry->m_Meshes );
if( pMesh->m_iBoneIndex != -1 )
DISPLAY->PopMatrix();
}
void Model::SetDefaultAnimation( RString sAnimation, float fPlayRate )
{
m_sDefaultAnimation = sAnimation;
m_fDefaultAnimationRate = fPlayRate;
}
void Model::PlayAnimation( const RString &sAniName, float fPlayRate )
{
if( m_mapNameToAnimation.find(sAniName) == m_mapNameToAnimation.end() )
return;
const msAnimation *pNewAnimation = &m_mapNameToAnimation[sAniName];
m_fCurFrame = 0;
m_fCurAnimationRate = fPlayRate;
if( m_pCurAnimation == pNewAnimation )
return;
m_pCurAnimation = pNewAnimation;
// setup bones
m_vpBones.resize( m_pCurAnimation->Bones.size() );
for( unsigned i = 0; i < m_pCurAnimation->Bones.size(); i++ )
{
const msBone *pBone = &m_pCurAnimation->Bones[i];
const RageVector3 &vRot = pBone->Rotation;
RageMatrixAngles( &m_vpBones[i].m_Relative, vRot );
m_vpBones[i].m_Relative.m[3][0] = pBone->Position[0];
m_vpBones[i].m_Relative.m[3][1] = pBone->Position[1];
m_vpBones[i].m_Relative.m[3][2] = pBone->Position[2];
int nParentBone = m_pCurAnimation->FindBoneByName( pBone->sParentName );
if( nParentBone != -1 )
{
RageMatrixMultiply( &m_vpBones[i].m_Absolute, &m_vpBones[nParentBone].m_Absolute, &m_vpBones[i].m_Relative );
}
else
{
m_vpBones[i].m_Absolute = m_vpBones[i].m_Relative;
}
m_vpBones[i].m_Final = m_vpBones[i].m_Absolute;
}
// subtract out the bone's resting position
for( unsigned i = 0; i < m_pGeometry->m_Meshes.size(); ++i )
{
msMesh *pMesh = &m_pGeometry->m_Meshes[i];
vector<RageModelVertex> &Vertices = pMesh->Vertices;
for( unsigned j = 0; j < Vertices.size(); j++ )
{
// int iBoneIndex = (pMesh->m_iBoneIndex!=-1) ? pMesh->m_iBoneIndex : bone;
RageVector3 &pos = Vertices[j].p;
int8_t bone = Vertices[j].bone;
if( bone != -1 )
{
pos[0] -= m_vpBones[bone].m_Absolute.m[3][0];
pos[1] -= m_vpBones[bone].m_Absolute.m[3][1];
pos[2] -= m_vpBones[bone].m_Absolute.m[3][2];
RageVector3 vTmp;
RageMatrix inverse;
RageMatrixTranspose( &inverse, &m_vpBones[bone].m_Absolute ); // transpose = inverse for rotation matrices
RageVec3TransformNormal( &vTmp, &pos, &inverse );
pos = vTmp;
}
}
}
// Set up m_vpBones, just in case we're drawn without being Update()d.
SetBones( m_pCurAnimation, m_fCurFrame, m_vpBones );
UpdateTempGeometry();
}
void Model::SetPosition( float fSeconds )
{
m_fCurFrame = FRAMES_PER_SECOND * fSeconds;
m_fCurFrame = clamp( m_fCurFrame, 0, (float) m_pCurAnimation->nTotalFrames );
}
void Model::AdvanceFrame( float fDeltaTime )
{
if( m_pGeometry == NULL ||
m_pGeometry->m_Meshes.empty() ||
!m_pCurAnimation )
{
return; // bail early
}
// LOG->Trace( "m_fCurFrame = %f", m_fCurFrame );
m_fCurFrame += FRAMES_PER_SECOND * fDeltaTime * m_fCurAnimationRate;
if( m_fCurFrame < 0 || m_fCurFrame >= m_pCurAnimation->nTotalFrames )
{
if( m_sDefaultAnimation != "" )
{
this->PlayAnimation( m_sDefaultAnimation, m_fDefaultAnimationRate );
/* XXX: add to m_fCurFrame the wrapover from the previous
* m_fCurFrame-m_pCurAnimation->nTotalFrames, so it doesn't skip */
}
else if( m_bLoop )
wrap( m_fCurFrame, (float) m_pCurAnimation->nTotalFrames );
else
m_fCurFrame = clamp( m_fCurFrame, 0, (float) m_pCurAnimation->nTotalFrames );
}
SetBones( m_pCurAnimation, m_fCurFrame, m_vpBones );
UpdateTempGeometry();
}
void Model::SetBones( const msAnimation* pAnimation, float fFrame, vector<myBone_t> &vpBones )
{
for( size_t i = 0; i < pAnimation->Bones.size(); ++i )
{
const msBone *pBone = &pAnimation->Bones[i];
if( pBone->PositionKeys.size() == 0 && pBone->RotationKeys.size() == 0 )
{
vpBones[i].m_Final = vpBones[i].m_Absolute;
continue;
}
// search for the adjacent position keys
const msPositionKey *pLastPositionKey = NULL, *pThisPositionKey = NULL;
for( size_t j = 0; j < pBone->PositionKeys.size(); ++j )
{
const msPositionKey *pPositionKey = &pBone->PositionKeys[j];
if( pPositionKey->fTime >= fFrame )
{
pThisPositionKey = pPositionKey;
break;
}
pLastPositionKey = pPositionKey;
}
RageVector3 vPos;
if( pLastPositionKey != NULL && pThisPositionKey != NULL )
{
const float s = SCALE( fFrame, pLastPositionKey->fTime, pThisPositionKey->fTime, 0, 1 );
vPos = pLastPositionKey->Position + (pThisPositionKey->Position - pLastPositionKey->Position) * s;
}
else if( pLastPositionKey == NULL )
vPos = pThisPositionKey->Position;
else if( pThisPositionKey == NULL )
vPos = pLastPositionKey->Position;
// search for the adjacent rotation keys
const msRotationKey *pLastRotationKey = NULL, *pThisRotationKey = NULL;
for( size_t j = 0; j < pBone->RotationKeys.size(); ++j )
{
const msRotationKey *pRotationKey = &pBone->RotationKeys[j];
if( pRotationKey->fTime >= fFrame )
{
pThisRotationKey = pRotationKey;
break;
}
pLastRotationKey = pRotationKey;
}
RageVector4 vRot;
if( pLastRotationKey != NULL && pThisRotationKey != NULL )
{
const float s = SCALE( fFrame, pLastRotationKey->fTime, pThisRotationKey->fTime, 0, 1 );
RageQuatSlerp( &vRot, pLastRotationKey->Rotation, pThisRotationKey->Rotation, s );
}
else if( pLastRotationKey == NULL )
{
vRot = pThisRotationKey->Rotation;
}
else if( pThisRotationKey == NULL )
{
vRot = pLastRotationKey->Rotation;
}
RageMatrix m;
RageMatrixIdentity( &m );
RageMatrixFromQuat( &m, vRot );
m.m[3][0] = vPos[0];
m.m[3][1] = vPos[1];
m.m[3][2] = vPos[2];
RageMatrix RelativeFinal;
RageMatrixMultiply( &RelativeFinal, &vpBones[i].m_Relative, &m );
int iParentBone = pAnimation->FindBoneByName( pBone->sParentName );
if( iParentBone == -1 )
vpBones[i].m_Final = RelativeFinal;
else
RageMatrixMultiply( &vpBones[i].m_Final, &vpBones[iParentBone].m_Final, &RelativeFinal );
}
}
void Model::UpdateTempGeometry()
{
if( m_pGeometry == NULL || m_pTempGeometry == NULL )
return;
for( unsigned i = 0; i < m_pGeometry->m_Meshes.size(); ++i )
{
const msMesh &origMesh = m_pGeometry->m_Meshes[i];
msMesh &tempMesh = m_vTempMeshes[i];
const vector<RageModelVertex> &origVertices = origMesh.Vertices;
vector<RageModelVertex> &tempVertices = tempMesh.Vertices;
for( unsigned j = 0; j < origVertices.size(); j++ )
{
RageVector3 &tempPos = tempVertices[j].p;
RageVector3 &tempNormal = tempVertices[j].n;
const RageVector3 &originalPos = origVertices[j].p;
const RageVector3 &originalNormal = origVertices[j].n;
int8_t bone = origVertices[j].bone;
if( bone == -1 )
{
tempNormal = originalNormal;
tempPos = originalPos;
}
else
{
RageVec3TransformNormal( &tempNormal, &originalNormal, &m_vpBones[bone].m_Final );
RageVec3TransformCoord( &tempPos, &originalPos, &m_vpBones[bone].m_Final );
}
}
}
// send the new vertices to the graphics card
m_pTempGeometry->Change( m_vTempMeshes );
}
void Model::Update( float fDelta )
{
Actor::Update( fDelta );
AdvanceFrame( fDelta );
for( unsigned i = 0; i < m_Materials.size(); ++i )
{
m_Materials[i].diffuse.Update( fDelta );
m_Materials[i].alpha.Update( fDelta );
}
}
int Model::GetNumStates() const
{
int iMaxStates = 0;
FOREACH_CONST( msMaterial, m_Materials, m )
iMaxStates = max( iMaxStates, m->diffuse.GetNumStates() );
return iMaxStates;
}
void Model::SetState( int iNewState )
{
FOREACH( msMaterial, m_Materials, m )
{
m->diffuse.SetState( iNewState );
m->alpha.SetState( iNewState );
}
}
void Model::RecalcAnimationLengthSeconds()
{
m_animation_length_seconds= 0;
FOREACH_CONST(msMaterial, m_Materials, m)
{
m_animation_length_seconds= max(m_animation_length_seconds,
m->diffuse.GetAnimationLengthSeconds());
}
}
void Model::SetSecondsIntoAnimation( float fSeconds )
{
FOREACH( msMaterial, m_Materials, m )
{
m->diffuse.SetSecondsIntoAnimation( fSeconds );
m->alpha.SetSecondsIntoAnimation( fSeconds );
}
}
bool Model::MaterialsNeedNormals() const
{
FOREACH_CONST( msMaterial, m_Materials, m )
{
if( m->NeedsNormals() )
return true;
}
return false;
}
// lua start
#include "LuaBinding.h"
/** @brief Allow Lua to have access to the Model. */
class LunaModel: public Luna<Model>
{
public:
static int position( T* p, lua_State *L ) { p->SetPosition( FArg(1) ); COMMON_RETURN_SELF; }
static int playanimation( T* p, lua_State *L ) { p->PlayAnimation(SArg(1),FArg(2)); COMMON_RETURN_SELF; }
static int SetDefaultAnimation( T* p, lua_State *L ) { p->SetDefaultAnimation(SArg(1),FArg(2)); COMMON_RETURN_SELF; }
static int GetDefaultAnimation( T* p, lua_State *L ) { lua_pushstring( L, p->GetDefaultAnimation() ); return 1; }
static int loop( T* p, lua_State *L ) { p->SetLoop(BArg(1)); COMMON_RETURN_SELF; }
static int rate( T* p, lua_State *L ) { p->SetRate(FArg(1)); COMMON_RETURN_SELF; }
static int GetNumStates( T* p, lua_State *L ) { lua_pushnumber( L, p->GetNumStates() ); return 1; }
//static int CelShading( T* p, lua_State *L ) { p->SetCelShading(BArg(1)); COMMON_RETURN_SELF; }
LunaModel()
{
ADD_METHOD( position );
ADD_METHOD( playanimation );
ADD_METHOD( SetDefaultAnimation );
ADD_METHOD( GetDefaultAnimation );
ADD_METHOD( loop );
ADD_METHOD( rate );
// sm-ssc adds:
ADD_METHOD( GetNumStates );
//ADD_METHOD( CelShading );
// LoadMilkshapeAsciiBones?
}
};
LUA_REGISTER_DERIVED_CLASS( Model, Actor )
// lua end
/*
* (c) 2003-2004 <NAME>
* All rights reserved.
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, and/or sell copies of the Software, and to permit persons to
* whom the Software is furnished to do so, provided that the above
* copyright notice(s) and this permission notice appear in all copies of
* the Software and that both the above copyright notice(s) and this
* permission notice appear in supporting documentation.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
* OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT OF
* THIRD PARTY RIGHTS. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR HOLDERS
* INCLUDED IN THIS NOTICE BE LIABLE FOR ANY CLAIM, OR ANY SPECIAL INDIRECT
* OR CONSEQUENTIAL DAMAGES, OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
* OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
* OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
* PERFORMANCE OF THIS SOFTWARE.
*/
|
<filename>pecado-ims/pecado-ims-web/src/main/java/me/batizhao/ims/mapper/UserRoleMapper.java
package me.batizhao.ims.mapper;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import me.batizhao.ims.api.domain.UserRole;
import org.apache.ibatis.annotations.Mapper;
/**
* @author batizhao
* @since 2020-09-14
*/
@Mapper
public interface UserRoleMapper extends BaseMapper<UserRole> {
}
|
use Symfony\Component\DependencyInjection\Compiler\CompilerPassInterface;
use Symfony\Component\DependencyInjection\ContainerBuilder;
use Symfony\Component\DependencyInjection\Reference;
class StoragePass implements CompilerPassInterface
{
public function process(ContainerBuilder $container)
{
// Get all service ids with the specific tag
$taggedServices = $container->findTaggedServiceIds('your_specific_tag');
foreach ($taggedServices as $id => $tags) {
// Get the service definition
$definition = $container->getDefinition($id);
// Add a method call to the service definition
$definition->addMethodCall('your_specific_method', [new Reference('your_dependency')]);
}
}
} |
package io.opensphere.myplaces.importer;
import java.awt.Color;
import java.io.Serializable;
import java.util.Collection;
import java.util.Comparator;
import java.util.List;
import java.util.stream.Collectors;
import io.opensphere.core.control.action.MenuOption;
import io.opensphere.core.export.ExportException;
import io.opensphere.core.model.LatLonAlt;
import io.opensphere.core.util.AwesomeIconSolid;
import io.opensphere.core.util.swing.GenericFontIcon;
import io.opensphere.mantle.MantleToolbox;
import io.opensphere.mantle.data.DataTypeInfo;
import io.opensphere.mantle.data.MapVisualizationType;
import io.opensphere.mantle.data.element.DataElement;
import io.opensphere.mantle.data.element.MapDataElement;
import io.opensphere.mantle.data.geom.MapLocationGeometrySupport;
import io.opensphere.mantle.data.geom.style.VisualizationStyle;
import io.opensphere.mantle.data.geom.style.impl.AbstractFeatureVisualizationStyle;
import io.opensphere.myplaces.models.MyPlacesEditListener;
import io.opensphere.myplaces.specific.factory.TypeControllerFactory;
import io.opensphere.tracktool.model.TrackNode;
import io.opensphere.tracktool.model.impl.DefaultTrackNode;
import io.opensphere.tracktool.registry.TrackRegistry;
/**
* An exporter that creates tracks from {@link DataElement}s.
*/
public class DataElementTrackExporter extends AbstractDataElementExporter
{
/** The name of the command / menu option to create track from selection. */
private static final String CREATE_TRACK_COMMAND = "Create Track From Selection";
@Override
public MenuOption getMenuOption()
{
return new MenuOption(CREATE_TRACK_COMMAND, CREATE_TRACK_COMMAND,
"Creates a new track by linking selected features in time order",
new GenericFontIcon(AwesomeIconSolid.SHARE_ALT, Color.WHITE));
}
@Override
protected void export(DataTypeInfo dataType, Collection<? extends MapDataElement> elements) throws ExportException
{
final AbstractFeatureVisualizationStyle style = getStyle(dataType);
List<TrackNode> trackNodes = elements.stream()
.filter(e -> e.getMapGeometrySupport() instanceof MapLocationGeometrySupport).sorted(new TimeComparator())
.map(e -> createTrackNode(e, style)).collect(Collectors.toList());
if (!trackNodes.isEmpty())
{
MyPlacesEditListener listener = TypeControllerFactory.getInstance()
.getController(MapVisualizationType.USER_TRACK_ELEMENTS);
TrackRegistry.getInstance().createNewTrackFromNodes(trackNodes, getToolbox(), getParentGroup(), listener);
}
else
{
throw new ExportException("The selected data do not contain any points.");
}
}
/**
* Gets the style for the data type.
*
* @param dataType the data type
* @return the style
*/
private AbstractFeatureVisualizationStyle getStyle(DataTypeInfo dataType)
{
AbstractFeatureVisualizationStyle style = null;
MantleToolbox mantleToolbox = getToolbox().getPluginToolboxRegistry().getPluginToolbox(MantleToolbox.class);
VisualizationStyle visStyle = mantleToolbox.getVisualizationStyleRegistry().getStyle(MapLocationGeometrySupport.class,
dataType.getTypeKey(), true);
if (visStyle instanceof AbstractFeatureVisualizationStyle)
{
style = (AbstractFeatureVisualizationStyle)visStyle;
}
return style;
}
/**
* Creates a track node from the data element.
*
* @param dataElement the data element
* @param style the style
* @return the track node
*/
private static DefaultTrackNode createTrackNode(MapDataElement dataElement, AbstractFeatureVisualizationStyle style)
{
LatLonAlt loc = ((MapLocationGeometrySupport)dataElement.getMapGeometrySupport()).getLocation();
if (style != null && style.isUseAltitude())
{
double alt = 0.;
Double altitudeColumnValueM = style.getAltitudeColumnValueM(dataElement.getMetaData());
if (altitudeColumnValueM != null)
{
alt = altitudeColumnValueM.doubleValue();
}
loc = LatLonAlt.createFromDegreesMeters(loc.getLatD(), loc.getLonD(), alt, loc.getAltitudeReference());
}
return new DefaultTrackNode(loc, dataElement.getTimeSpan(), Long.valueOf(dataElement.getIdInCache()));
}
/** A comparator for sorting data elements by time. */
private static class TimeComparator implements Comparator<DataElement>, Serializable
{
/** Serial. */
private static final long serialVersionUID = 1L;
@Override
public int compare(DataElement o1, DataElement o2)
{
if (o1.getTimeSpan() != null)
{
return o2.getTimeSpan() != null ? o1.getTimeSpan().compareTo(o2.getTimeSpan()) : -1;
}
else if (o2.getTimeSpan() != null)
{
return 1;
}
return o1.hashCode() < o2.hashCode() ? -1 : o1.hashCode() > o2.hashCode() ? 1 : 0;
}
}
}
|
package com.example.wesense_wearos.networkClasses;
public class TCP_Client {
}
|
#!/bin/bash
export PATH=${PATH//cbmc-5190/cbmc-trunk-diffblue-control-synthesis}
export PATH=${PATH}:/users/pkesseli/software/cpp/cbmc/cbmc-trunk-diffblue-control-synthesis/src/cegis:/users/pkesseli/software/cpp/cbmc/cbmc-trunk-diffblue-control-synthesis-analyzer/src/goto-analyzer
benchmark_basedir='/users/pkesseli/software/cpp/cbmc/cbmc-trunk-diffblue-control-synthesis/regression'
cd ${benchmark_basedir}
benchmark_dir=$1
cd ${benchmark_dir}
for word_width in {8..64..2}; do
echo "width: $word_width"
cegis -D _FIXEDBV -D _CONTROL_FLOAT_WIDTH=${word_width} --fixedbv --cegis-control --cegis-statistics --cegis-max-size 1 *.c >/dev/null 2>&1
if [ $? -eq 0 ]; then exit 0; fi
done
exit 10
|
<gh_stars>0
/// <reference types="jquery" />
export function Json(url: string, method: string, data: any): JQueryXHR {
// TODO does not work reliably: cache set to false for all controller actions by default (reason: ipad bug where function was httpGET originally and cached response was used for same function as httpPOST)
let request: JQueryXHR;
if (data !== null) {
request = jQuery.ajax(url, { method: method, data: JSON.stringify(data), contentType: "application/json; charset=utf-8" });
}
else {
request = jQuery.ajax(url, { method: method });
}
request.fail((data: JQueryXHR): void => {
if ((data.responseJSON !== undefined) && (data.responseJSON.StatusText !== undefined))
{
console.log(data.responseJSON.StatusText);
}
});
return request; // TODO multiple queues, running, done, fail, reschedule; compare revision and
// TODO pause when detecting holes
};
export function Action(url: string, method: string, data: any): JQueryXHR {
// TODO does not work reliably: cache set to false for all controller actions by default (reason: ipad bug where function was httpGET originally and cached response was used for same function as httpPOST)
let request: JQueryXHR;
if (data !== null) {
request = jQuery.ajax(url, { method: method, data: JSON.stringify(data), contentType: "application/json; charset=utf-8" });
}
else {
request = jQuery.ajax(url, { method: method });
}
request.fail((data: JQueryXHR): void => {
if (data.statusText !== undefined) {
console.log(data.statusText);
}
});
return request;
}; |
<gh_stars>0
from app import app
from flask import Blueprint, jsonify
from flask_swagger import swagger
from flask_swagger_ui import get_swaggerui_blueprint
SWAGGER_URL = "/api/docs"
API_URL = "http://localhost:5000/api/spec"
swag = Blueprint('swagger', __name__)
@swag.route("/spec")
def spec():
return jsonify(swagger(app))
swaggerui_blueprint = get_swaggerui_blueprint(
SWAGGER_URL,
API_URL,
config={
"app_name": "Fidelio"
}
)
|
#! /bin/sh
# Start up mongo-orchestration (a server to spawn mongodb clusters) and set up a cluster.
#
# Specify the following environment variables:
#
# MONGODB_VERSION: latest, 4.2, 4.0, 3.6
# TOPOLOGY: server, replica_set, sharded_cluster
# AUTH: auth, noauth
# AUTHSOURCE
# IPV4_ONLY: off, on
# SSL: openssl, darwinssl, winssl, nossl
# ORCHESTRATION_FILE: <file name in orchestration_configs/ without .json>
# If this is not set, the file name is constructed from other options.
# OCSP: off, on
# REQUIRE_API_VERSION: set to a non-empty string to set the requireApiVersion parameter
# This is currently only supported for standalone servers
#
# This script may be run locally.
#
set -o errexit # Exit the script with error if any of the commands fail
DIR=$(dirname $0)
# Functions to fetch MongoDB binaries
. $DIR/download-mongodb.sh
get_distro
GENERIC_LINUX_URL=$(get_mongodb_download_url_for "linux-x86_64" "$MONGODB_VERSION")
get_mongodb_download_url_for "$DISTRO" "$MONGODB_VERSION"
if [ "$MONGODB_DOWNLOAD_URL" = "$GENERIC_LINUX_URL" -a ! "$SSL" = "nossl" ]; then
echo "Requested a version of MongoDB with SSL, but only generic (non-SSL) Linux version available"
exit 1;
fi
DRIVERS_TOOLS=./ download_and_extract "$MONGODB_DOWNLOAD_URL" "$EXTRACT"
OS=$(uname -s | tr '[:upper:]' '[:lower:]')
AUTH=${AUTH:-noauth}
SSL=${SSL:-nossl}
TOPOLOGY=${TOPOLOGY:-server}
OCSP=${OCSP:-off}
REQUIRE_API_VERSION=${REQUIRE_API_VERSION}
# If caller of script specifies an ORCHESTRATION_FILE, do not attempt to modify it. Otherwise construct it.
if [ -z "$ORCHESTRATION_FILE" ]; then
ORCHESTRATION_FILE="basic"
if [ "$AUTH" = "auth" ]; then
ORCHESTRATION_FILE="auth"
fi
if [ "$IPV4_ONLY" = "on" ]; then
ORCHESTRATION_FILE="${ORCHESTRATION_FILE}-ipv4-only"
fi
if [ -n "$AUTHSOURCE" ]; then
ORCHESTRATION_FILE="${ORCHESTRATION_FILE}-${AUTHSOURCE}"
fi
if [ "$SSL" != "nossl" ]; then
ORCHESTRATION_FILE="${ORCHESTRATION_FILE}-ssl"
fi
fi
# Set up mongo orchestration home.
case "$OS" in
cygwin*)
export MONGO_ORCHESTRATION_HOME="c:/data/MO"
FULL_PATH=$(cygpath -m -a .)
;;
*)
export MONGO_ORCHESTRATION_HOME=$(pwd)"/MO"
FULL_PATH=$(pwd)
;;
esac
rm -rf "$MONGO_ORCHESTRATION_HOME"
mkdir -p "$MONGO_ORCHESTRATION_HOME/lib"
mkdir -p "$MONGO_ORCHESTRATION_HOME/db"
# Replace ABSOLUTE_PATH_REPLACEMENT_TOKEN with path to mongo-c-driver.
find orchestration_configs -name \*.json | xargs perl -p -i -e "s|ABSOLUTE_PATH_REPLACEMENT_TOKEN|$FULL_PATH|g"
# mongo-orchestration expects client.pem to be in MONGO_ORCHESTRATION_HOME. So always copy it.
cp -f src/libmongoc/tests/x509gen/* $MONGO_ORCHESTRATION_HOME/lib/
# find print0 and xargs -0 not available on Solaris. Lets hope for good paths
find orchestration_configs -name \*.json | xargs perl -p -i -e "s|/tmp/orchestration-home|$MONGO_ORCHESTRATION_HOME/lib|g"
export ORCHESTRATION_FILE="orchestration_configs/${TOPOLOGY}s/${ORCHESTRATION_FILE}.json"
export ORCHESTRATION_URL="http://localhost:8889/v1/${TOPOLOGY}s"
export TMPDIR=$MONGO_ORCHESTRATION_HOME/db
echo From shell `date` > $MONGO_ORCHESTRATION_HOME/server.log
case "$OS" in
cygwin*)
PYTHON=python.exe
# Python has problems with unix style paths in cygwin. Must use c:\\ paths
rm -rf /cygdrive/c/mongodb
cp -r mongodb /cygdrive/c/mongodb
echo "{ \"releases\": { \"default\": \"c:\\\\mongodb\\\\bin\" }}" > orchestration.config
# Make sure MO is running latest version
$PYTHON -m virtualenv venv
cd venv
. Scripts/activate
rm -rf mongo-orchestration
git clone --depth 1 git@github.com:10gen/mongo-orchestration.git
cd mongo-orchestration
pip install .
cd ../..
ls `pwd`/mongodb/bin/mongo* || true
nohup mongo-orchestration -f orchestration.config -e default --socket-timeout-ms=60000 --bind=127.0.0.1 --enable-majority-read-concern -s wsgiref start > $MONGO_ORCHESTRATION_HOME/out.log 2> $MONGO_ORCHESTRATION_HOME/err.log < /dev/null &
;;
*)
echo "{ \"releases\": { \"default\": \"`pwd`/mongodb/bin\" } }" > orchestration.config
if [ -f /opt/python/2.7/bin/python ]; then
# Python toolchain installation.
PYTHON=/opt/python/2.7/bin/python
elif [ "x$(lsb_release -cs)" = "xtrusty" -a -f /opt/mongodbtoolchain/v2/bin/python ]; then
# Python toolchain installation.
PYTHON=/opt/mongodbtoolchain/v2/bin/python
else
PYTHON=python
fi
PIP=pip
if ! $PYTHON -c "import virtualenv" ; then
PYTHON=python3
PIP=pip3
fi
$PYTHON -m virtualenv venv
cd venv
. bin/activate
rm -rf mongo-orchestration
# Make sure MO is running latest version
git clone --depth 1 git@github.com:10gen/mongo-orchestration.git
cd mongo-orchestration
# Our zSeries machines are static-provisioned, cache corruptions persist.
if [ $(uname -m) = "s390x" ]; then
echo "Disabling pip cache"
PIP_PARAM="--no-cache-dir"
fi
$PIP $PIP_PARAM install .
cd ../..
mongo-orchestration -f orchestration.config -e default --socket-timeout-ms=60000 --bind=127.0.0.1 --enable-majority-read-concern start > $MONGO_ORCHESTRATION_HOME/out.log 2> $MONGO_ORCHESTRATION_HOME/err.log < /dev/null &
;;
esac
sleep 15
echo "Checking that mongo-orchestration is running"
curl http://localhost:8889/ -sS --max-time 120 --fail | python -m json.tool
sleep 5
pwd
curl -sS --data @"$ORCHESTRATION_FILE" "$ORCHESTRATION_URL" --max-time 300 --fail | python -m json.tool
sleep 15
if [ "$AUTH" = "auth" ]; then
MONGO_SHELL_CONNECTION_FLAGS="-ubob -ppwd123"
fi
if [ -n "$AUTHSOURCE" ]; then
MONGO_SHELL_CONNECTION_FLAGS="${MONGO_SHELL_CONNECTION_FLAGS} --authenticationDatabase ${AUTHSOURCE}"
fi
if [ "$OCSP" != "off" ]; then
MONGO_SHELL_CONNECTION_FLAGS="${MONGO_SHELL_CONNECTION_FLAGS} --host localhost --tls --tlsAllowInvalidCertificates"
elif [ "$SSL" != "nossl" ]; then
MONGO_SHELL_CONNECTION_FLAGS="${MONGO_SHELL_CONNECTION_FLAGS} --host localhost --ssl --sslCAFile=$MONGO_ORCHESTRATION_HOME/lib/ca.pem --sslPEMKeyFile=$MONGO_ORCHESTRATION_HOME/lib/client.pem"
fi
if [ ! -z "$REQUIRE_API_VERSION" ]; then
MONGO_SHELL_CONNECTION_FLAGS="${MONGO_SHELL_CONNECTION_FLAGS} --apiVersion=1"
# Set the requireApiVersion parameter.
`pwd`/mongodb/bin/mongo $MONGO_SHELL_CONNECTION_FLAGS $DIR/require-api-version.js
fi
echo $MONGO_SHELL_CONNECTION_FLAGS
`pwd`/mongodb/bin/mongo $MONGO_SHELL_CONNECTION_FLAGS --eval 'printjson(db.serverBuildInfo())' admin
`pwd`/mongodb/bin/mongo $MONGO_SHELL_CONNECTION_FLAGS --eval 'printjson(db.isMaster())' admin
|
<reponame>Mammut-FE/vscode-hive-languageservice
import {
Expr,
getPath,
ICol,
Keyword,
Node,
NodeType,
Program,
Select,
SubSelect,
TableName,
Use
} from '@mammut-fe/hive-parser';
import {
CompletionItem,
CompletionItemKind,
CompletionList,
InsertTextFormat,
Position,
Range,
TextDocument,
TextEdit
} from 'vscode-languageserver-types';
import { ICompletionParticipant, IDatabase } from '../hiveLanguageTypes';
import { updateDatabase } from './databaseService';
import * as languageFacts from './languageFacts';
const SnippetFormat = InsertTextFormat.Snippet;
export class HiveCompletion {
position: Position;
offset: number;
currentWord: string;
textDocument: TextDocument;
program: Program;
defaultReplaceRange: Range;
nodePath: Node[];
completionParticipants: ICompletionParticipant[] = [];
private getCompletionRangeFromNode(existingNode: Node): Range {
if (existingNode && existingNode.offset <= this.offset) {
const index = existingNode.getText().indexOf('.');
let end, start;
if (index > -1) {
start = this.textDocument.positionAt(existingNode.offset + index + 1);
end = start;
} else {
start = this.textDocument.positionAt(existingNode.offset + 1);
end = existingNode.end !== -1 ? this.textDocument.positionAt(existingNode.end) : this.position;
}
return Range.create(start, end);
}
return this.defaultReplaceRange;
}
private findBeforeExprNode(node: Expr): Node {
let offset = node.offset - 1;
let prev = this.program.findChildAtOffset(offset, true);
while (prev.type === NodeType.Expr) {
offset = prev.offset - 1;
prev = this.program.findChildAtOffset(offset, true);
}
return prev;
}
private findBeforeSemicolonExpr(node: Node): Node {
let offset = node.offset - 1;
let prev = this.program.findChildAtOffset(offset, true);
while (prev.type !== NodeType.Expr) {
offset = prev.offset - 1;
prev = this.program.findChildAtOffset(offset, true);
}
return prev;
}
private getCurrentDatabase(offset: number): string {
const block = this.program.getBlockNode();
const useNodes = block.getChildren().filter(node => {
return node.type === NodeType.Use;
});
for (let i = useNodes.length - 1; i >= 0; i--) {
let useNode = useNodes[i];
if (offset > useNode.end) {
return (useNode as Use).getUseDbName();
}
}
return null;
}
private getCurrentTableInfo(node: Node, str: string) {
let [dbName, tableName] = str.split('.');
let columns: ICol[] = [];
const select = node.findParent(NodeType.Select) as Select;
const cteTables = select.getCteTables();
if (cteTables.length > 0) {
const rawTale = cteTables.filter(cteTable => {
return cteTable.name === dbName;
})[0];
if (rawTale) {
const { origin } = rawTale;
const fromTables = origin.getFromTables();
if (fromTables.length === 1) {
[dbName, tableName] = (fromTables[0].rawTable as string).split('.');
}
columns = origin.getSelectCols();
}
}
if (tableName === undefined) {
tableName = dbName;
dbName = this.getCurrentDatabase(node.findParent(NodeType.Select).offset);
}
return {
dbName,
tableName,
columns
};
}
public doComplete(
document: TextDocument,
position: Position,
program: Program,
databases?: IDatabase[]
): CompletionList {
this.offset = document.offsetAt(position);
this.position = position;
this.currentWord = getCurrentWord(document, this.offset);
this.defaultReplaceRange = Range.create(
Position.create(this.position.line, this.position.character - this.currentWord.length),
this.position
);
this.textDocument = document;
this.program = program;
if (databases) {
updateDatabase(databases);
}
try {
let result: CompletionList = { isIncomplete: false, items: [] };
this.nodePath = getPath(this.program, this.offset);
for (let i = this.nodePath.length - 1; i >= 0; i--) {
let node = this.nodePath[i];
if (node.type === NodeType.Use) {
} else if (node.type === NodeType.SubSelect) {
this.getCompletionsForSelectList(node, result);
} else if (node.type === NodeType.SelectList) {
this.getCompletionsForSelectList(node.findParent(NodeType.SubSelect), result);
} else if (node.type === NodeType.TableName) {
this.getCompletionsForFrom(node, result);
} else if (node.type === NodeType.Expr) {
this.getCompletionsForExpr(node as Expr, result);
} else if (node.type === NodeType.Keyword) {
this.getCompletionsForKeywords(node as Keyword, result);
} else if (node.type === NodeType.Semicolon) {
this.getCompletionsSemicolon(node, result);
} else if (node.parent === null) {
this.getCompletionsForTopLevel(result);
} else {
continue;
}
if (result.items.length > 0 || this.offset > node.offset) {
return this.finalize(result);
}
}
this.getCompletionForProgram(result);
return this.finalize(result);
} finally {
// don't hold on any state, clear symbolContext
this.position = null;
this.currentWord = null;
this.textDocument = null;
this.program = null;
this.defaultReplaceRange = null;
this.nodePath = null;
}
}
private finalize(result: CompletionList): CompletionList {
let needSortText = result.items.some(i => !!i.sortText);
if (needSortText) {
for (let i of result.items) {
if (!i.sortText) {
i.sortText = 'd';
}
}
}
return result;
}
public getValueEnumProposals(
entry: languageFacts.IEntry,
existingNode: Node,
result: CompletionList
): CompletionList {
if (entry.values) {
for (let value of entry.values) {
let insertString = value.name;
let insertTextFormat;
if (insertString.endsWith(')')) {
let from = insertString.lastIndexOf('(');
if (from !== -1) {
insertString = insertString.substr(0, from) + '($1)';
insertTextFormat = SnippetFormat;
}
}
let item: CompletionItem = {
label: value.name,
documentation: languageFacts.getEntryDescription(value),
textEdit: TextEdit.replace(this.getCompletionRangeFromNode(existingNode), insertString),
kind: value.kind,
insertTextFormat,
sortText: 'e',
detail: value.detail
};
result.items.push(item);
}
}
return result;
}
public getCompletionForProgram(result: CompletionList): CompletionList {
if (this.program === null) {
return this.getCompletionsForTopLevel(result);
}
let node = this.program.findFirstChildBeforeOffset(this.offset);
let prev: Node;
if (!node) {
return this.getCompletionsForTopLevel(result);
}
while (node) {
prev = node;
node = node.findFirstChildBeforeOffset(this.offset);
}
if (prev instanceof Expr) {
this.getCompletionsForExpr(prev, result);
}
if (prev.type === NodeType.ID) {
this.getCompletionsForIdentifier(prev, result);
}
return result;
}
public getCompletionsForTopLevel(result: CompletionList): CompletionList {
if (this.currentWord.endsWith('.')) {
const [dbName] = this.currentWord.split('.');
for (let entry of languageFacts.getTableEntryList(dbName)) {
result.items.push({
label: entry.name,
documentation: languageFacts.getEntryDescription(entry),
kind: CompletionItemKind.Text,
textEdit: TextEdit.replace(Range.create(this.defaultReplaceRange.end, this.defaultReplaceRange.end), entry.name),
detail: entry.detail
});
}
} else {
this.getCompletionsForKeywords(null, result);
this.getCompletionsForFunctions(result);
this.getCompletionsForDatabase(null, result, 'z');
}
return result;
}
public getCompletionsForFunctions(result: CompletionList): CompletionList {
for (let entry of languageFacts.getFunctionsEntryList()) {
result.items.push({
label: entry.name,
documentation: languageFacts.getEntryDescription(entry),
kind: CompletionItemKind.Function,
detail: 'function',
sortText: 'g'
});
}
return result;
}
public getCompletionsForKeywords(node: Node, result: CompletionList): CompletionList {
if (node) {
switch (node.getText().toLowerCase()) {
case 'select':
this.getCompletionsForSelectList(node, result);
break;
}
} else {
for (let entry of languageFacts.getKeywordEntryList()) {
result.items.push({
label: entry.name,
documentation: languageFacts.getEntryDescription(entry),
kind: CompletionItemKind.Keyword,
detail: 'keyword',
sortText: 'f'
});
}
}
return result;
}
public getCompletionsForExpr(node: Expr, result: CompletionList): CompletionList {
const exprText = node.getText().toLowerCase();
if (exprText.indexOf('.') !== -1) {
if (node.parent.type === NodeType.SelectListItem) {
const [aliasName] = exprText.split('.');
this.getCompletionsForSelectList(node, result, aliasName);
} else {
const [dbName] = exprText.split('.');
this.getCompletionsForTable(node, result, dbName, 'a');
}
} else {
switch (node.getText().toLowerCase()) {
case 'use':
this.getCompletionsForUse(node, result);
break;
case 'from':
this.getCompletionsForFrom(node, result);
break;
case 'select':
this.getCompletionsForSelectList(node, result);
break;
case 'join':
this.getCompletionsForJoin(node, result);
break;
default:
this.getCompletionsForTopLevel(result);
}
}
return result;
}
public getCompletionsForUse(node: Node, result: CompletionList): CompletionList {
for (let entry of languageFacts.getUseStmtEntryList()) {
this.getValueEnumProposals(entry, null, result);
}
this.getCompletionsForDatabase(null, result, 'a');
return result;
}
public getCompletionsForFrom(node: Node, result: CompletionList): CompletionList {
if (node.type === NodeType.TableName) {
/**
* select * from db.|
* 优先提示 db 对应的表
*/
let [db] = (node as TableName).getTableName().split('.');
this.getCompletionsForTable(node, result, db, 'a');
} else if (node.type === NodeType.Expr) {
/**
* select * from |
* 依次提示 cte_table, use db, 所有的 db
*/
const prev = this.findBeforeExprNode(node);
const selectNode = prev.findParent(NodeType.Select) as Select;
if (selectNode) {
this.getTableCompletionList(selectNode, result);
}
}
return result;
}
public getCompletionsForSelectList(node: Node, result: CompletionList, tableAliasName?: string): CompletionList {
if (node instanceof Expr) {
if (tableAliasName) {
const subSelect = node.findParent(NodeType.SubSelect) as SubSelect;
const fromTables = subSelect.getFromTables();
const { rawTable } = fromTables.filter(({ rawTable, aliasName }) => {
return aliasName ? aliasName === tableAliasName : rawTable === tableAliasName;
})[0];
if (rawTable) {
const { dbName, tableName, columns } = this.getCurrentTableInfo(node, rawTable as string);
for (let entry of languageFacts.getColumnEntryList(dbName, tableName, columns)) {
result.items.push({
label: entry.name,
documentation: languageFacts.getEntryDescription(entry),
kind: CompletionItemKind.Text,
textEdit: TextEdit.replace(this.getCompletionRangeFromNode(node), entry.name),
detail: entry.detail,
sortText: 'a'
});
}
}
}
for (let entry of languageFacts.getSelectStmtEntryList()) {
this.getValueEnumProposals(entry, null, result);
}
} else if (node instanceof Keyword || node instanceof SubSelect) {
const subSelect = node.findParent(NodeType.SubSelect) as SubSelect;
const selectCols = subSelect.getSelectCols();
const fromTable = selectCols.filter(col => {
return col.name.toLowerCase() === 'from';
});
if (fromTable.length > 0) {
const { dbName, tableName, columns } = this.getCurrentTableInfo(node, fromTable[0].aliasName);
for (let entry of languageFacts.getColumnEntryList(dbName, tableName, columns)) {
result.items.push({
label: entry.name,
documentation: languageFacts.getEntryDescription(entry),
kind: CompletionItemKind.Text,
textEdit: TextEdit.replace(this.getCompletionRangeFromNode(null), entry.name),
detail: entry.detail,
sortText: 'a'
});
}
}
}
return result;
}
public getCompletionsForIdentifier(node: Node, result: CompletionList): CompletionList {
if (node.findParent(NodeType.FromClause)) {
this.getCompletionsForJoin(node, result);
}
return result;
}
public getCompletionsForJoin(node: Node, result: CompletionList): CompletionList {
let selectNode;
if (node.type === NodeType.ID) {
selectNode = node.findParent(NodeType.Select) as Select;
} else if (node.type === NodeType.Expr) {
const prev = this.findBeforeExprNode(node);
selectNode = prev.findParent(NodeType.Select) as Select;
}
if (selectNode) {
this.getTableCompletionList(selectNode, result);
}
return result;
}
public getTableCompletionList(node: Select, result: CompletionList): CompletionList {
const cteTables = node.getCteTables();
for (let entry of languageFacts.getCteTableEntryList(cteTables)) {
result.items.push({
label: entry.name,
documentation: languageFacts.getEntryDescription(entry),
kind: CompletionItemKind.Variable,
textEdit: TextEdit.replace(this.getCompletionRangeFromNode(null), entry.name),
detail: entry.detail,
sortText: 'a'
});
}
const currentDatabase = this.getCurrentDatabase(node.offset);
if (currentDatabase) {
this.getCompletionsForTable(null, result, currentDatabase, 'b');
}
this.getCompletionsForDatabase(null, result, 'c');
return result;
}
private getCompletionsSemicolon(node: Node, result: CompletionList): CompletionList {
const prevNode = this.findBeforeSemicolonExpr(node);
this.getCompletionsForExpr(prevNode as Expr, result);
return result;
}
private getCompletionsForDatabase(node: Node, result: CompletionList, sortText = 'd') {
for (let entry of languageFacts.getDatabaseEntryList()) {
result.items.push({
label: entry.name,
documentation: languageFacts.getEntryDescription(entry),
kind: CompletionItemKind.Text,
textEdit: TextEdit.replace(this.getCompletionRangeFromNode(node), entry.name),
detail: entry.detail,
sortText: sortText
});
}
}
private getCompletionsForTable(node: Node, result: CompletionList, databaseName: string, sortText = 'd') {
for (let entry of languageFacts.getTableEntryList(databaseName)) {
result.items.push({
label: entry.name,
documentation: languageFacts.getEntryDescription(entry),
kind: CompletionItemKind.Text,
textEdit: TextEdit.replace(this.getCompletionRangeFromNode(node), entry.name),
detail: entry.detail,
sortText
});
}
}
}
function getCurrentWord(document: TextDocument, offset: number) {
let i = offset - 1;
let text = document.getText();
while (i >= 0 && ' \t\n\r":{[()]},*>+'.indexOf(text.charAt(i)) === -1) {
i--;
}
return text.substring(i + 1, offset);
}
|
//
// Copyright 2020 IBM Corporation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
package staticroute
import (
"errors"
"net"
"testing"
iksv1 "github.com/IBM/staticroute-operator/pkg/apis/iks/v1"
"github.com/IBM/staticroute-operator/pkg/routemanager"
v1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/runtime"
"sigs.k8s.io/controller-runtime/pkg/client/fake"
)
func TestReconcileImpl(t *testing.T) {
params, _ := getReconcileContextForAddFlow(nil, true)
res, err := reconcileImpl(*params)
if res != finished {
t.Error("Result must be finished")
}
if err != nil {
t.Errorf("Error must be nil: %s", err.Error())
}
}
func TestReconcileImplCRGetFatalError(t *testing.T) {
//err "no kind is registered for the type v1."" because fake client doesn't have CRD
params, _ := getReconcileContextForAddFlow(nil, true)
params.client = fake.NewFakeClient()
res, err := reconcileImpl(*params)
if res != crGetError {
t.Error("Result must be notFound")
}
if err == nil {
t.Error("Error must be not nil")
}
}
func TestReconcileImplCRGetNotFound(t *testing.T) {
route := &iksv1.StaticRoute{}
params, _ := getReconcileContextForAddFlow(route, true)
res, err := reconcileImpl(*params)
if res != crNotFound {
t.Error("Result must be crNotFound")
}
if err != nil {
t.Errorf("Error must be nil: %s", err.Error())
}
}
func TestReconcileImplNotSameZone(t *testing.T) {
params, _ := getReconcileContextForAddFlow(nil, true)
params.options.Zone = "a"
res, err := reconcileImpl(*params)
if res != notSameZone {
t.Error("Result must be notSameZone")
}
if err != nil {
t.Errorf("Error must be nil: %s", err.Error())
}
}
func TestReconcileImplNotDeleted(t *testing.T) {
route := newStaticRouteWithValues(true, false)
params, mockClient := getReconcileContextForAddFlow(route, true)
mockClient.postfixGet = func(obj runtime.Object) {
obj.(*iksv1.StaticRoute).SetDeletionTimestamp(&v1.Time{})
}
res, err := reconcileImpl(*params)
if res != alreadyDeleted {
t.Error("Result must be alreadyDeleted")
}
if err != nil {
t.Errorf("Error must be nil: %s", err.Error())
}
}
func TestReconcileImplUpdated(t *testing.T) {
route := newStaticRouteWithValues(true, false)
route.Status = iksv1.StaticRouteStatus{
NodeStatus: []iksv1.StaticRouteNodeStatus{
iksv1.StaticRouteNodeStatus{
Hostname: "hostname",
State: iksv1.StaticRouteSpec{
Subnet: "192.168.3.11/16",
Gateway: "10.0.0.1",
},
},
},
}
params, _ := getReconcileContextForAddFlow(route, true)
res, err := reconcileImpl(*params)
if res != updateFinished {
t.Error("Result must be updateFinished")
}
if err != nil {
t.Errorf("Error must be nil: %s", err.Error())
}
}
func TestReconcileImplDeleted(t *testing.T) {
params, mockClient := getReconcileContextForAddFlow(nil, true)
mockClient.postfixGet = func(obj runtime.Object) {
obj.(*iksv1.StaticRoute).SetDeletionTimestamp(&v1.Time{})
}
res, err := reconcileImpl(*params)
if res != deletionFinished {
t.Error("Result must be deletionFinished")
}
if err != nil {
t.Errorf("Error must be nil: %s", err.Error())
}
}
func TestReconcileImplDeletedIfRouteNotFound(t *testing.T) {
params, mockClient := getReconcileContextForAddFlow(nil, true)
params.options.RouteManager = routeManagerMock{
deRegisterRouteErr: routemanager.ErrNotFound,
}
mockClient.postfixGet = func(obj runtime.Object) {
obj.(*iksv1.StaticRoute).SetDeletionTimestamp(&v1.Time{})
}
res, err := reconcileImpl(*params)
if res != deletionFinished {
t.Error("Result must be deletionFinished")
}
if err != nil {
t.Errorf("Error must be nil: %s", err.Error())
}
}
func TestReconcileImplDeletedButCantDeregister(t *testing.T) {
params, mockClient := getReconcileContextForAddFlow(nil, true)
params.options.RouteManager = routeManagerMock{
deRegisterRouteErr: errors.New("Couldn't deregister route"),
}
mockClient.postfixGet = func(obj runtime.Object) {
obj.(*iksv1.StaticRoute).SetDeletionTimestamp(&v1.Time{})
}
res, err := reconcileImpl(*params)
if res != deRegisterError {
t.Error("Result must be deRegisterError")
}
if err == nil {
t.Error("Error must be not nil")
}
}
func TestReconcileImplDeletedButCantDeleteStatus(t *testing.T) {
params, mockClient := getReconcileContextForAddFlow(nil, true)
mockClient.postfixGet = func(obj runtime.Object) {
obj.(*iksv1.StaticRoute).SetDeletionTimestamp(&v1.Time{})
}
mockClient.statusWriteMock = statusWriterMock{
updateErr: errors.New("Couldn't update status"),
}
res, err := reconcileImpl(*params)
if res != delStatusUpdateError {
t.Error("Result must be delStatusUpdateError")
}
if err == nil {
t.Error("Error must be not nil")
}
}
func TestReconcileImplDeletedButCantEmptyFinalizers(t *testing.T) {
params, mockClient := getReconcileContextForAddFlow(nil, true)
mockClient.postfixGet = func(obj runtime.Object) {
obj.(*iksv1.StaticRoute).SetDeletionTimestamp(&v1.Time{})
}
mockClient.updateErr = errors.New("Couldn't empty finalizers")
res, err := reconcileImpl(*params)
if res != emptyFinalizerError {
t.Error("Result must be emptyFinalizerError")
}
if err == nil {
t.Error("Error must be not nil")
}
}
func TestReconcileImplIsNewButCantSetFinalizers(t *testing.T) {
params, mockClient := getReconcileContextForAddFlow(nil, true)
mockClient.updateErr = errors.New("Couldn't fill finalizers")
res, err := reconcileImpl(*params)
if res != setFinalizerError {
t.Error("Result must be setFinalizerError")
}
if err == nil {
t.Error("Error must be not nil")
}
}
func TestReconcileImplIsNotRegisteredButCantParseSubnet(t *testing.T) {
route := newStaticRouteWithValues(true, false)
route.Spec.Subnet = "invalid-subnet"
params, _ := getReconcileContextForAddFlow(route, false)
res, err := reconcileImpl(*params)
if res != parseSubnetError {
t.Error("Result must be parseSubnetError")
}
if err != nil {
t.Errorf("Error must be nil: %s", err.Error())
}
}
func TestReconcileImplIsNotRegisteredButCantRegister(t *testing.T) {
params, _ := getReconcileContextForAddFlow(nil, true)
params.options.RouteManager = routeManagerMock{
registerRouteErr: errors.New("Couldn't register route"),
}
res, err := reconcileImpl(*params)
if res != registerRouteError {
t.Error("Result must be registerRouteError")
}
if err == nil {
t.Error("Error must be not nil")
}
}
func TestReconcileImplIsRegisteredButCantAddStatus(t *testing.T) {
params, mockClient := getReconcileContextForAddFlow(nil, true)
params.options.Hostname = "hostname2"
mockClient.statusWriteMock = statusWriterMock{
updateErr: errors.New("Couldn't update status"),
}
res, err := reconcileImpl(*params)
if res != addStatusUpdateError {
t.Error("Result must be addStatusUpdateError")
}
if err == nil {
t.Error("Error must be not nil")
}
}
func TestReconcileImplInvalidGateway(t *testing.T) {
route := newStaticRouteWithValues(true, true)
route.Spec.Gateway = "invalid-gateway"
params, _ := getReconcileContextForAddFlow(route, true)
res, err := reconcileImpl(*params)
if res != invalidGatewayError {
t.Error("Result must be invalidGatewayError")
}
if err != nil {
t.Errorf("Error must be nil: %s", err.Error())
}
}
func TestReconcileImplCantDetermineGateway(t *testing.T) {
route := newStaticRouteWithValues(true, true)
route.Spec.Gateway = ""
params, _ := getReconcileContextForAddFlow(route, true)
params.options.RouteGet = func() (net.IP, error) {
return nil, errors.New("Can't determine gateway")
}
res, err := reconcileImpl(*params)
if res != routeGetError {
t.Error("Result must be routeGetError")
}
if err == nil {
t.Error("Error must be not nil")
}
}
func TestReconcileImplDetermineGateway(t *testing.T) {
var gatewayParam string
route := newStaticRouteWithValues(true, true)
route.Spec.Gateway = ""
params, _ := getReconcileContextForAddFlow(route, false)
params.options.RouteGet = func() (net.IP, error) {
return net.IP{10, 0, 0, 1}, nil
}
params.options.RouteManager = routeManagerMock{
isRegistered: false,
registeredCallback: func(n string, r routemanager.Route) error {
gatewayParam = r.Gw.String()
return nil
},
}
//nolint:errcheck
reconcileImpl(*params)
if gatewayParam != "10.0.0.1" {
t.Errorf("Wrong gateway selected: %s", gatewayParam)
}
}
func getReconcileContextForAddFlow(route *iksv1.StaticRoute, isRegistered bool) (*reconcileImplParams, *reconcileImplClientMock) {
if route == nil {
route = newStaticRouteWithValues(true, true)
}
mockClient := reconcileImplClientMock{
client: newFakeClient(route),
}
params := newReconcileImplParams(&mockClient)
params.options.Zone = "zone"
params.options.Hostname = "hostname"
params.options.RouteManager = routeManagerMock{
isRegistered: isRegistered,
}
return params, &mockClient
}
|
<filename>src/EditLexers/stlProbes.c
#include "EditLexer.h"
#include "EditStyleX.h"
static KEYWORDLIST Keywords_Probes = { { "break","continue" } };
static EDITSTYLE Styles_Probes[] = {
EDITSTYLE_DEFAULT,
{ SCE_NMAPSP_COMMENT_CASE, NP2StyleX_TypeKeyword, L"fore:#0000FF" },
{ SCE_NMAPSP_COMMENT_CASE_CONTENT, NP2StyleX_Preprocessor, L"fore:#FF8000" },
{ SCE_NMAPSP_PROBE_SCOPE_BEGIN, NP2StyleX_Type, L"fore:#007F7F" },
{ SCE_NMAPSP_PROBE, NP2StyleX_Class, L"fore:#0000FF" },
{ SCE_NMAPSP_PROBE_TYPE, NP2StyleX_Structure, L"fore:#0080FF" },
{ SCE_NMAPSP_PROBE_BAD, NP2StyleX_Union, L"fore:#EEEEEE" },
{ SCE_NMAPSP_PROBE_NAME, NP2StyleX_Interface, L"bold; fore:#1E90FF" },
{ SCE_NMAPSP_PROBE_QUERY, NP2StyleX_Function, L"fore:#A46000" },
{ SCP_NMAPSP_TEMPLATE_KEY, NP2StyleX_Function, L"fore:#A46000" },
{ SCE_NMAPSP_PROBE_QUERY_CONTENT, NP2StyleX_Enumeration, L"fore:#FF8000" },
{ SCP_NMAPSP_TEMPLATE, NP2StyleX_Enumeration, L"fore:#FF8000" },
{ SCP_NMAPSP_TEMPLATE_FLAG, NP2StyleX_Field, L"fore:#FF80FF" },
{ SCE_NMAPSP_DEMILITER, NP2StyleX_Constant, L"fore:#B000B0" },
{ SCP_NMAPSP_TEMPLATE_DEMILITER, NP2StyleX_Constant, L"fore:#B000B0" },
{ SCE_NMAPSP_COMMENTLINE, NP2StyleX_Comment, L"fore:#608060" },
{ SCE_NMAPSP_PROBE_QUERY_END, NP2StyleX_DocComment, L"fore:#408040" },
{ SCP_NMAPSP_KEY, NP2StyleX_Class, L"fore:#0000FF" },
{ SCP_NMAPSP_MATCH, NP2StyleX_Class, L"fore:#0000FF" },
{ SCP_NMAPSP_VALUE_BAD, NP2StyleX_Union, L"fore:#EEEEEE" },
{ SCP_NMAPSP_TEMPLATE_BAD, NP2StyleX_Union, L"fore:#EEEEEE" },
{ SCP_NMAPSP_VALUE, NP2StyleX_Value, L"bold; fore:#FF0E0E" },
{ SCP_NMAPSP_SERVICE, NP2StyleX_Value, L"bold; fore:#FF0E0E" },
};
EDITLEXER lexProbes = {
SCLEX_NMAP_SERVICE_PROBE, NP2LEX_PROBES,
EDITLEXER_HOLE(L"Nmap Service Probes", Styles_Probes),
L"probes",
&Keywords_Probes,
Styles_Probes
};
|
/*
* Copyright 2015 Textocat
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.textocat.textokit.postagger;
import com.google.common.base.Function;
import com.google.common.base.Joiner;
import com.google.common.collect.Collections2;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.textocat.textokit.commons.cas.FSUtils;
import com.textocat.textokit.morph.dictionary.resource.GramModel;
import com.textocat.textokit.morph.dictionary.resource.MorphDictionary;
import com.textocat.textokit.morph.dictionary.resource.MorphDictionaryHolder;
import com.textocat.textokit.morph.fs.Word;
import com.textocat.textokit.morph.model.Wordform;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.apache.uima.UimaContext;
import org.apache.uima.analysis_engine.AnalysisEngineProcessException;
import org.apache.uima.fit.component.JCasAnnotator_ImplBase;
import org.apache.uima.fit.descriptor.ConfigurationParameter;
import org.apache.uima.fit.descriptor.ExternalResource;
import org.apache.uima.fit.descriptor.OperationalProperties;
import org.apache.uima.fit.util.JCasUtil;
import org.apache.uima.jcas.JCas;
import org.apache.uima.resource.ResourceInitializationException;
import java.io.*;
import java.util.BitSet;
import java.util.List;
import java.util.Set;
import static com.textocat.textokit.morph.dictionary.WordUtils.normalizeToDictionaryForm;
import static com.textocat.textokit.morph.dictionary.resource.MorphDictionaryUtils.toGramBits;
import static com.textocat.textokit.morph.model.Wordform.allGramBitsFunction;
/**
* @author <NAME>
*/
@OperationalProperties(multipleDeploymentAllowed = false)
public class DictionaryComplianceChecker extends JCasAnnotator_ImplBase {
public static final String PARAM_OUT_FILE = "outFile";
public static final String PARAM_TARGET_POS_CATEGORIES = "targetPosCategories";
public static final String RESOURCE_DICTIONARY = "MorphDictionary";
// config fields
@ExternalResource(key = RESOURCE_DICTIONARY, mandatory = true)
private MorphDictionaryHolder dictHolder;
private MorphDictionary dict;
private GramModel gramModel;
@ConfigurationParameter(name = PARAM_OUT_FILE, mandatory = true)
private File outFile;
@ConfigurationParameter(name = PARAM_TARGET_POS_CATEGORIES, mandatory = true)
private Set<String> targetPosCategories;
private PosTrimmer posTrimmer;
// state fields
private PrintWriter out;
private int notDictNum;
private int matchedNum;
private int notMatchedNum;
@Override
public void initialize(UimaContext ctx) throws ResourceInitializationException {
super.initialize(ctx);
dict = dictHolder.getDictionary();
gramModel = dict.getGramModel();
posTrimmer = new PosTrimmer(dict.getGramModel(), targetPosCategories);
try {
FileOutputStream os = FileUtils.openOutputStream(outFile);
out = new PrintWriter(new BufferedWriter(
new OutputStreamWriter(os, "utf-8")),
true);
// write header
out.println("Word\tGrams_diff\tCorpus_grams\tDict_grams");
} catch (IOException e) {
throw new ResourceInitializationException(e);
}
}
@Override
public void process(JCas jCas) throws AnalysisEngineProcessException {
for (Word word : JCasUtil.select(jCas, Word.class)) {
com.textocat.textokit.morph.fs.Wordform docWf = MorphCasUtils.getOnlyWordform(word);
if (docWf == null) {
continue;
}
String wordTxt = normalizeToDictionaryForm(word.getCoveredText());
List<Wordform> dictWfs = dict.getEntries(wordTxt);
if (dictWfs == null || dictWfs.isEmpty()) {
notDictNum++;
continue;
}
// convert to BitSet
BitSet docBits = toGramBits(gramModel, FSUtils.toSet(docWf.getGrammems()));
posTrimmer.trimInPlace(docBits);
List<BitSet> _dictBitSets = Lists.transform(dictWfs, allGramBitsFunction(dict));
Set<BitSet> dictBitSets = posTrimmer.trimAndMerge(_dictBitSets);
dictBitSets = selectClosest(docBits, dictBitSets);
if (calcDistance(docBits, dictBitSets.iterator().next()) == 0) {
matchedNum++;
} else {
notMatchedNum++;
StringBuilder record = new StringBuilder(wordTxt);
record.append('\t');
List<String> gramDiffs = Lists.newLinkedList();
for (BitSet dictBits : dictBitSets) {
List<String> grams = Lists.newLinkedList();
{
BitSet positiveBits = (BitSet) docBits.clone();
positiveBits.andNot(dictBits);
grams.addAll(Lists.transform(gramModel.toGramSet(positiveBits),
positiveGramFunc));
}
{
BitSet negativeBits = (BitSet) dictBits.clone();
negativeBits.andNot(docBits);
grams.addAll(Lists.transform(gramModel.toGramSet(negativeBits),
negativeGramFunc));
}
gramDiffs.add(gramJoiner.join(grams));
}
gramSetJoiner.appendTo(record, gramDiffs);
// write corpus grams
record.append('\t');
gramJoiner.appendTo(record, gramModel.toGramSet(docBits));
// write dict grams
record.append('\t');
gramSetJoiner.appendTo(record,
Collections2.transform(dictBitSets, gramBitsToString));
out.println(record);
}
}
}
private static final Joiner gramJoiner = Joiner.on(',');
private static final Joiner gramSetJoiner = Joiner.on("||");
private static Set<BitSet> selectClosest(final BitSet targetBits, Iterable<BitSet> srcBitSets) {
Set<BitSet> result = Sets.newHashSet();
int minDistance = Integer.MAX_VALUE;
for (final BitSet srcBits : srcBitSets) {
int curDistance = calcDistance(targetBits, srcBits);
if (curDistance < minDistance) {
result.clear();
result.add(srcBits);
minDistance = curDistance;
} else if (curDistance == minDistance) {
result.add(srcBits);
}
// else curDistance > minDistance => do nothing
}
return result;
}
private static int calcDistance(final BitSet xArg, final BitSet yArg) {
BitSet x = (BitSet) xArg.clone();
x.xor(yArg);
return x.cardinality();
}
private static Function<String, String> prefixFunction(final String prefix) {
return new Function<String, String>() {
@Override
public String apply(String arg) {
return prefix + arg;
}
};
}
private static Function<String, String> positiveGramFunc = prefixFunction("+");
private static Function<String, String> negativeGramFunc = prefixFunction("-");
private Function<BitSet, String> gramBitsToString = new Function<BitSet, String>() {
@Override
public String apply(BitSet bits) {
return gramJoiner.join(gramModel.toGramSet(bits));
}
};
@Override
public void collectionProcessComplete() throws AnalysisEngineProcessException {
super.collectionProcessComplete();
IOUtils.closeQuietly(out);
getLogger().info(String.format("%s report:\n" +
"not dictionary words: %s\n" +
"match dictionary tags: %s\n" +
"do not match dictionary tags:%s",
getClass().getSimpleName(),
notDictNum, matchedNum, notMatchedNum));
}
} |
public static boolean validateIp(String address) {
String[] nums = address.split("\\.");
if (nums.length != 4)
return false;
for (String num : nums) {
int n = Integer.parseInt(num);
if (n < 0 || n > 255)
return false;
}
return true;
} |
<reponame>yskszk63/cancel-workflow-run
package main
import (
"bytes"
"encoding/json"
"fmt"
"io"
"net/http"
"net/http/httptest"
"net/url"
"strings"
"testing"
"time"
"github.com/google/go-github/v35/github"
"github.com/labstack/echo/v4"
)
type testEnv struct {
env
conTemplate string
ghurl string
}
func newTestEnv(url string) env {
return &testEnv{
env: newEnv(),
conTemplate: url + "/%s/%s",
ghurl: url,
}
}
func (*testEnv) storageConnectionString() string {
return "AccountName=myaccount;AccountKey=cGFzcw=="
}
func (e *testEnv) gitHubBaseUrl() *string {
return &e.ghurl
}
func (e *testEnv) containerTemplate() *string {
return &e.conTemplate
}
func (e *testEnv) appId() int64 {
return 0xcafe
}
func (e *testEnv) secret() []byte {
return []byte(`-----BEGIN RSA PRIVATE KEY-----
<KEY>`)
}
func (e *testEnv) now() time.Time {
return time.Unix(0, 0)
}
type testRenderer struct{}
func (testRenderer) Render(w io.Writer, name string, data interface{}, c echo.Context) error {
return json.NewEncoder(w).Encode(data)
}
func TestHello(t *testing.T) {
e := echo.New()
e.GET("/", hello)
req := httptest.NewRequest("GET", "/", nil)
res := httptest.NewRecorder()
e.ServeHTTP(res, req)
if res.Result().StatusCode != http.StatusOK {
t.Fail()
}
}
func TestSetupGitHubApp(t *testing.T) {
cases := []struct {
name string
wantManifest string
wantState string
}{
{
name: "ok",
wantManifest: `{"name":"CancelWorkflowRun","url":"/","hook_attributes":{"url":"/api/webhook"},"redirect_url":"/","default_events":["workflow_run"],"default_permissions":{"actions":"write","metadata":"read","pull_requests":"write"}}`,
wantState: `http://xxx/myaccount/setup/azuredeploy.json?se=1970-01-01T00%3A15%3A00Z&sig=dUIFrvS7Hccv5e8zaDZrUtfsQCJeFH9WKmFbucK03IA%3D&sp=w&spr=https&sr=b&sv=2019-12-12`,
},
}
for _, c := range cases {
t.Run(c.name, func(t *testing.T) {
dummy := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
switch r.URL.Path {
case "/myaccount/setup":
if r.URL.Query().Get("restype") == "container" {
w.Header().Add("x-ms-error-code", "ContainerAlreadyExists")
w.WriteHeader(404)
}
case "/myaccount/setup/azuredeploy.json":
w.Header().Add("x-ms-error-code", "BlobNotFound")
w.WriteHeader(404)
default:
fmt.Printf("%s\n", r.URL)
w.WriteHeader(501)
}
}))
defer dummy.Close()
e := echo.New()
e.Debug = true
e.Use(injectEnv(newTestEnv(dummy.URL)))
e.Renderer = testRenderer{}
e.GET("/", setupGitHubApp)
req := httptest.NewRequest("GET", "/", nil)
res := httptest.NewRecorder()
e.ServeHTTP(res, req)
if res.Result().StatusCode != http.StatusOK {
t.Fatalf("%d %s", res.Result().StatusCode, res.Body.String())
}
body := struct {
Manifest string
State string
}{}
if err := json.Unmarshal(res.Body.Bytes(), &body); err != nil {
t.Fatal(err)
}
if body.Manifest != c.wantManifest {
t.Fatal(body.Manifest)
}
s, err := url.Parse(body.State)
if err != nil {
t.Fatal(err)
}
s.Host = "xxx"
if s.String() != c.wantState {
t.Fatal(s)
}
})
}
}
func TestPostSetupGitHubApp(t *testing.T) {
cases := []struct {
name string
locationStarts string
locationEnd string
}{
{
name: "ok",
locationStarts: "",
locationEnd: "",
},
}
for _, c := range cases {
t.Run(c.name, func(t *testing.T) {
dummy := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
switch r.URL.Path {
case "/myaccount/setup/azuredeploy.json":
w.WriteHeader(200)
case "/api/v3/app-manifests/xxx/conversions":
w.WriteHeader(200)
default:
fmt.Printf("%s\n", r.URL)
w.WriteHeader(501)
}
}))
defer dummy.Close()
e := echo.New()
e.Debug = true
e.Use(injectEnv(newTestEnv(dummy.URL)))
e.Renderer = testRenderer{}
e.GET("/", setupGitHubApp)
req := httptest.NewRequest("GET", "/?code=xxx&state="+dummy.URL+"/myaccount/setup/azuredeploy.json", nil)
res := httptest.NewRecorder()
e.ServeHTTP(res, req)
if res.Result().StatusCode != http.StatusFound {
t.Fatalf("%d %s", res.Result().StatusCode, res.Body.String())
}
location := res.Header().Get("Location")
if !strings.HasPrefix(location, c.locationStarts) {
t.Fatal(location)
}
if !strings.HasSuffix(location, c.locationEnd) {
t.Fatal(location)
}
})
}
}
func TestWebHook(t *testing.T) {
cases := []struct {
name string
eventName string
payload string
status int
hasOutput bool
}{
{
name: "ping",
eventName: "ping",
payload: "{}",
status: http.StatusNoContent,
},
{
name: "installation",
eventName: "installation",
payload: "{}",
status: http.StatusNoContent,
},
{
name: "workflow_run",
eventName: "workflow_run",
payload: `{
"workflow_run": {
"pull_requests": [
{
"number": 0
}
]
}
}`,
status: http.StatusAccepted,
hasOutput: true,
},
}
for _, c := range cases {
t.Run(c.name, func(t *testing.T) {
dummy := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
switch r.URL.Path {
default:
fmt.Printf("%s\n", r.URL)
w.WriteHeader(501)
}
}))
defer dummy.Close()
e := echo.New()
e.Debug = true
e.Use(injectEnv(newTestEnv(dummy.URL)))
e.Use(func(next echo.HandlerFunc) echo.HandlerFunc {
return func(ctx echo.Context) error {
if err := next(ctx); err != nil {
return err
}
outputs, exists := ctx.Get("Outputs").(map[string]interface{})
if exists != c.hasOutput {
t.Fail()
}
_, exists = outputs["msg"]
if exists != c.hasOutput {
t.Fail()
}
return nil
}
})
e.Renderer = testRenderer{}
e.POST("/", webhook)
req := httptest.NewRequest("POST", "/", bytes.NewBufferString(c.payload))
req.Header.Set("X-GitHub-Event", c.eventName)
res := httptest.NewRecorder()
e.ServeHTTP(res, req)
if res.Result().StatusCode != c.status {
t.Fatalf("%d %s", res.Result().StatusCode, res.Body.String())
}
})
}
}
func TestProcess(t *testing.T) {
cases := []struct {
name string
}{
{
name: "ok",
},
}
for _, c := range cases {
t.Run(c.name, func(t *testing.T) {
dummy := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
switch r.URL.Path {
case "/app/installations/0/access_tokens":
token := github.InstallationToken{}
w.WriteHeader(200)
encoder := json.NewEncoder(w)
encoder.Encode(token)
case "/api/v3/repos///actions/runs/0":
w.WriteHeader(200)
case "/api/v3/repos///actions/workflows/0":
filename := "ok.txt"
data := github.Workflow{
Path: &filename,
}
w.WriteHeader(200)
encoder := json.NewEncoder(w)
encoder.Encode(data)
case "/api/v3/repos///pulls/0":
w.WriteHeader(200)
case "/api/v3/repos///pulls/0/files":
filename := "ok.txt"
status := "added"
data := []github.CommitFile{
{
Filename: &filename,
Status: &status,
},
}
w.WriteHeader(200)
encoder := json.NewEncoder(w)
encoder.Encode(data)
case "/api/v3/repos///actions/runs/0/cancel":
w.WriteHeader(200)
case "/api/v3/repos///issues/0/comments":
w.WriteHeader(200)
default:
fmt.Printf("%s\n", r.URL)
w.WriteHeader(501)
}
}))
defer dummy.Close()
e := echo.New()
e.Debug = true
e.Use(injectEnv(newTestEnv(dummy.URL)))
e.Renderer = testRenderer{}
e.POST("/", process)
msg := queueMessage{
PullRequestNums: []int{0},
}
payload, err := newEventGridEvent("subject", "event", "version", msg)
if err != nil {
t.Fatal(err)
}
j, err := json.Marshal(payload)
if err != nil {
t.Fatal(err)
}
body, err := json.Marshal(invokeRequest{
Data: map[string]json.RawMessage{
"event": j,
},
})
if err != nil {
t.Fatal(err)
}
req := httptest.NewRequest("POST", "/", bytes.NewBuffer(body))
req.Header.Set("Content-Type", "application/json")
res := httptest.NewRecorder()
e.ServeHTTP(res, req)
if res.Result().StatusCode != http.StatusOK {
t.Fatalf("%d %s", res.Result().StatusCode, res.Body.String())
}
if strings.TrimSpace(res.Body.String()) != `{
"Outputs": null
}` {
t.Fatalf("%s", res.Body.String())
}
})
}
}
|
<filename>src/components/article-structure/horizontal-stack-display.ts
/**
* Signature/interface for a `HorizontalStackDisplay` object
* @see https://developer.apple.com/documentation/apple_news/horizontalstackdisplay
*/
export interface HorizontalStackDisplay {
type: "horizontal_stack";
}
|
#!/bin/sh
# CYBERWATCH SAS - 2017
#
# Security fix for USN-2857-2
#
# Security announcement date: 2016-01-05 00:00:00 UTC
# Script generation date: 2017-01-01 21:05:05 UTC
#
# Operating System: Ubuntu 14.04 LTS
# Architecture: x86_64
#
# Vulnerable packages fix on version:
# - linux-image-3.19.0-43-powerpc64-emb:3.19.0-43.49~14.04.1
# - linux-image-3.19.0-43-lowlatency:3.19.0-43.49~14.04.1
# - linux-image-3.19.0-43-powerpc64-smp:3.19.0-43.49~14.04.1
# - linux-image-3.19.0-43-generic-lpae:3.19.0-43.49~14.04.1
# - linux-image-3.19.0-43-powerpc-smp:3.19.0-43.49~14.04.1
# - linux-image-3.19.0-43-generic:3.19.0-43.49~14.04.1
# - linux-image-3.19.0-43-powerpc-e500mc:3.19.0-43.49~14.04.1
#
# Last versions recommanded by security team:
# - linux-image-3.19.0-43-powerpc64-emb:3.19.0-43.49~14.04.1
# - linux-image-3.19.0-43-lowlatency:3.19.0-43.49~14.04.1
# - linux-image-3.19.0-43-powerpc64-smp:3.19.0-43.49~14.04.1
# - linux-image-3.19.0-43-generic-lpae:3.19.0-43.49~14.04.1
# - linux-image-3.19.0-43-powerpc-smp:3.19.0-43.49~14.04.1
# - linux-image-3.19.0-43-generic:3.19.0-43.49~14.04.1
# - linux-image-3.19.0-43-powerpc-e500mc:3.19.0-43.49~14.04.1
#
# CVE List:
# - CVE-2015-8660
#
# More details:
# - https://www.cyberwatch.fr/vulnerabilites
#
# Licence: Released under The MIT License (MIT), See LICENSE FILE
sudo apt-get install --only-upgrade linux-image-3.19.0-43-powerpc64-emb=3.19.0-43.49~14.04.1 -y
sudo apt-get install --only-upgrade linux-image-3.19.0-43-lowlatency=3.19.0-43.49~14.04.1 -y
sudo apt-get install --only-upgrade linux-image-3.19.0-43-powerpc64-smp=3.19.0-43.49~14.04.1 -y
sudo apt-get install --only-upgrade linux-image-3.19.0-43-generic-lpae=3.19.0-43.49~14.04.1 -y
sudo apt-get install --only-upgrade linux-image-3.19.0-43-powerpc-smp=3.19.0-43.49~14.04.1 -y
sudo apt-get install --only-upgrade linux-image-3.19.0-43-generic=3.19.0-43.49~14.04.1 -y
sudo apt-get install --only-upgrade linux-image-3.19.0-43-powerpc-e500mc=3.19.0-43.49~14.04.1 -y
|
#!/bin/bash
cd ~/rlmolecule/redis_integration_example
# Use consistent redis settings
. ./redis.config
username=dduplyak
allocation=rlmolecule
# Submit sbatch script that will do actual work
jobid=$(sbatch --parsable -A ${allocation} -J ${job_tag} --export=job_tag="${job_tag}" test_jobscript.sbatch)
echo "Submitted job's ID: $jobid"
# Job accounting using Redis
key="${job_tag}_ALL"
$redis_cmd SADD ${key} $jobid
echo "Recorded this job in Redis, under set: ${key}"
key="${job_tag}_PENDING_AND_RUNNING"
$redis_cmd SADD ${key} $jobid
echo "Recorded this job in Redis, under set: ${key}"
|
package reporter
import (
"encoding/json"
"fmt"
"os"
"path/filepath"
"sort"
"sync"
"time"
"github.com/onsi/ginkgo/config"
"github.com/onsi/ginkgo/types"
v1 "k8s.io/api/core/v1"
v12 "k8s.io/apimachinery/pkg/apis/meta/v1"
"kubevirt.io/kubevirt/tests"
"kubevirt.io/client-go/kubecli"
"kubevirt.io/client-go/log"
)
type KubernetesReporter struct {
failureCount int
artifactsDir string
mux sync.Mutex
}
func NewKubernetesReporter(artifactsDir string) *KubernetesReporter {
return &KubernetesReporter{
failureCount: 0,
artifactsDir: artifactsDir,
}
}
func (r *KubernetesReporter) SpecSuiteWillBegin(config config.GinkgoConfigType, summary *types.SuiteSummary) {
}
func (r *KubernetesReporter) BeforeSuiteDidRun(setupSummary *types.SetupSummary) {
// clean up artifacts from previous run
if r.artifactsDir != "" {
os.RemoveAll(r.artifactsDir)
}
}
func (r *KubernetesReporter) SpecWillRun(specSummary *types.SpecSummary) {
}
func (r *KubernetesReporter) SpecDidComplete(specSummary *types.SpecSummary) {
r.mux.Lock()
defer r.mux.Unlock()
if r.failureCount > 10 {
return
}
if specSummary.HasFailureState() {
r.failureCount++
} else {
return
}
// If we got not directory, print to stderr
if r.artifactsDir == "" {
return
}
virtCli, err := kubecli.GetKubevirtClient()
if err != nil {
fmt.Fprintf(os.Stderr, "failed to get client: %v\n", err)
return
}
if err := os.MkdirAll(r.artifactsDir, 0777); err != nil {
fmt.Fprintf(os.Stderr, "failed to create directory: %v\n", err)
return
}
r.logEvents(virtCli, specSummary)
r.logNodes(virtCli, specSummary)
r.logPods(virtCli, specSummary)
r.logVMIs(virtCli, specSummary)
r.logDomainXMLs(virtCli, specSummary)
r.logLogs(virtCli, specSummary)
}
func (r *KubernetesReporter) logDomainXMLs(virtCli kubecli.KubevirtClient, specSummary *types.SpecSummary) {
f, err := os.OpenFile(filepath.Join(r.artifactsDir, fmt.Sprintf("%d_domains.log", r.failureCount)),
os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)
if err != nil {
fmt.Fprintf(os.Stderr, "failed to open the file: %v\n", err)
return
}
defer f.Close()
vmis, err := virtCli.VirtualMachineInstance(v1.NamespaceAll).List(&v12.ListOptions{})
if err != nil {
fmt.Fprintf(os.Stderr, "failed to fetch vmis, can't collect domain XMLs: %v\n", err)
return
}
for _, vmi := range vmis.Items {
if vmi.IsFinal() {
continue
}
domxml, err := tests.GetRunningVirtualMachineInstanceDomainXML(virtCli, &vmi)
if err == nil {
fmt.Fprintln(f, domxml)
}
}
}
func (r *KubernetesReporter) logVMIs(virtCli kubecli.KubevirtClient, specSummary *types.SpecSummary) {
f, err := os.OpenFile(filepath.Join(r.artifactsDir, fmt.Sprintf("%d_vmis.log", r.failureCount)),
os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)
if err != nil {
fmt.Fprintf(os.Stderr, "failed to open the file: %v\n", err)
return
}
defer f.Close()
vmis, err := virtCli.VirtualMachineInstance(v1.NamespaceAll).List(&v12.ListOptions{})
if err != nil {
fmt.Fprintf(os.Stderr, "failed to fetch vmis: %v\n", err)
return
}
j, err := json.MarshalIndent(vmis, "", " ")
if err != nil {
log.DefaultLogger().Reason(err).Errorf("Failed to marshal vmis")
return
}
fmt.Fprintln(f, string(j))
}
func (r *KubernetesReporter) logPods(virtCli kubecli.KubevirtClient, specSummary *types.SpecSummary) {
f, err := os.OpenFile(filepath.Join(r.artifactsDir, fmt.Sprintf("%d_pods.log", r.failureCount)),
os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)
if err != nil {
fmt.Fprintf(os.Stderr, "failed to open the file: %v", err)
return
}
defer f.Close()
pods, err := virtCli.CoreV1().Pods(v1.NamespaceAll).List(v12.ListOptions{})
if err != nil {
fmt.Fprintf(os.Stderr, "failed to fetch pods: %v\n", err)
return
}
j, err := json.MarshalIndent(pods, "", " ")
if err != nil {
log.DefaultLogger().Reason(err).Errorf("Failed to marshal pods")
return
}
fmt.Fprintln(f, string(j))
}
func (r *KubernetesReporter) logNodes(virtCli kubecli.KubevirtClient, specSummary *types.SpecSummary) {
f, err := os.OpenFile(filepath.Join(r.artifactsDir, fmt.Sprintf("%d_nodes.log", r.failureCount)),
os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)
if err != nil {
fmt.Fprintf(os.Stderr, "failed to open the file: %v\n", err)
return
}
defer f.Close()
nodes, err := virtCli.CoreV1().Nodes().List(v12.ListOptions{})
if err != nil {
fmt.Fprintf(os.Stderr, "failed to fetch nodes: %v\n", err)
return
}
j, err := json.MarshalIndent(nodes, "", " ")
if err != nil {
log.DefaultLogger().Reason(err).Errorf("Failed to marshal nodes")
return
}
fmt.Fprintln(f, string(j))
}
func (r *KubernetesReporter) logLogs(virtCli kubecli.KubevirtClient, specSummary *types.SpecSummary) {
logsdir := filepath.Join(r.artifactsDir, "pods")
if err := os.MkdirAll(logsdir, 0777); err != nil {
fmt.Fprintf(os.Stderr, "failed to create directory: %v\n", err)
return
}
startTime := time.Now().Add(-specSummary.RunTime).Add(-5 * time.Second)
pods, err := virtCli.CoreV1().Pods(v1.NamespaceAll).List(v12.ListOptions{})
if err != nil {
fmt.Fprintf(os.Stderr, "failed to fetch pods: %v\n", err)
return
}
for _, pod := range pods.Items {
for _, container := range pod.Spec.Containers {
current, err := os.OpenFile(filepath.Join(logsdir, fmt.Sprintf("%d_%s_%s-%s.log", r.failureCount, pod.Namespace, pod.Name, container.Name)), os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)
if err != nil {
fmt.Fprintf(os.Stderr, "failed to open the file: %v\n", err)
return
}
defer current.Close()
previous, err := os.OpenFile(filepath.Join(logsdir, fmt.Sprintf("%d_%s_%s-%s_previous.log", r.failureCount, pod.Namespace, pod.Name, container.Name)), os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)
if err != nil {
fmt.Fprintf(os.Stderr, "failed to open the file: %v\n", err)
return
}
defer previous.Close()
logStart := v12.NewTime(startTime)
logs, err := virtCli.CoreV1().Pods(pod.Namespace).GetLogs(pod.Name, &v1.PodLogOptions{SinceTime: &logStart, Container: container.Name}).DoRaw()
if err == nil {
fmt.Fprintln(current, string(logs))
}
logs, err = virtCli.CoreV1().Pods(pod.Namespace).GetLogs(pod.Name, &v1.PodLogOptions{SinceTime: &logStart, Container: container.Name, Previous: true}).DoRaw()
if err == nil {
fmt.Fprintln(previous, string(logs))
}
}
}
}
func (r *KubernetesReporter) logEvents(virtCli kubecli.KubevirtClient, specSummary *types.SpecSummary) {
f, err := os.OpenFile(filepath.Join(r.artifactsDir, fmt.Sprintf("%d_events.log", r.failureCount)),
os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)
if err != nil {
fmt.Fprintf(os.Stderr, "failed to open the file: %v\n", err)
return
}
defer f.Close()
startTime := time.Now().Add(-specSummary.RunTime).Add(-5 * time.Second)
events, err := virtCli.CoreV1().Events(v1.NamespaceAll).List(v12.ListOptions{})
if err != nil {
log.DefaultLogger().Reason(err).Errorf("Failed to fetch events")
return
}
e := events.Items
sort.Slice(e, func(i, j int) bool {
return e[i].LastTimestamp.After(e[j].LastTimestamp.Time)
})
eventsToPrint := v1.EventList{}
for _, event := range e {
if event.LastTimestamp.Time.After(startTime) {
eventsToPrint.Items = append(eventsToPrint.Items, event)
}
}
j, err := json.MarshalIndent(eventsToPrint, "", " ")
if err != nil {
log.DefaultLogger().Reason(err).Errorf("Failed to marshal events")
return
}
fmt.Fprintln(f, string(j))
}
func (r *KubernetesReporter) AfterSuiteDidRun(setupSummary *types.SetupSummary) {
}
func (r *KubernetesReporter) SpecSuiteDidEnd(summary *types.SuiteSummary) {
}
|
import { FromObservable } from './FromObservable';
export const /** @type {?} */ from = FromObservable.create;
|
#!/usr/bin/env bash
# Copyright 2018 The Knative Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
readonly ROOT_DIR=$(dirname $0)/..
source ${ROOT_DIR}/vendor/github.com/knative/test-infra/scripts/library.sh
set -o errexit
set -o nounset
set -o pipefail
cd ${ROOT_DIR}
# Ensure we have everything we need under vendor/
dep ensure
# The license for this is embedded in the readme.
sed -n '11,41p' vendor/bitbucket.org/ww/goautoneg/README.txt > vendor/bitbucket.org/ww/goautoneg/LICENSE
rm -rf $(find vendor/ -name 'OWNERS')
rm -rf $(find vendor/ -name '*_test.go')
update_licenses third_party/VENDOR-LICENSE "./cmd/*"
# Patch k8s.io/client-go/tools/cache and k8s.io/kbernetes/pkg/credentialprovider
# to make k8schain work with ECR. This is a workaround for:
# https://github.com/google/go-containerregistry/issues/355
#
# Once we're on 1.15 we can drop this patch, but we will have to be careful when
# bumping kubernetes dependencies.
#
# TODO(#4549): Drop this patch.
git apply ${REPO_ROOT_DIR}/hack/1996.patch
remove_broken_symlinks ./vendor
|
# Import the zipfile and os modules
import zipfile
import os
import time
# List of files within the Passwords folder
passwordfilesList = []
# Create a function that will allow users to set which password file they would like to use
def collectPasswords(file_to_use):
#Cracking animation loop just to make program look better
from time import sleep
cracking = 'CrAcKiNG...'
for i in range(11):
print(cracking[i], sep=' ', end=' ', flush=True); sleep(0.5)
# Create the file path that points to the password dictionary
filePathtoPasswordDictionary = os.path.join("./Resources/Passwords", file_to_use)
# Open the file and read it so that it is a string
wordsFile = open(filePathtoPasswordDictionary)
wordsText = wordsFile.read()
# Split the file on newlines so that it is now a list
splitWordsText = wordsText.split("\n")
# Return the passwords list
return splitWordsText
# Create a function that will be used to crack the zip folders
def crackZips(passwords):
pathtoZips = os.path.join("./Resources/ZippedFiles")
# Loop through all of the zips using the os.walk() function
for root, dirs, files in os.walk(pathtoZips):
#print(files, file=f)
# Loop through each file that is collected
for oneFile in files:
#print(oneFile, file=f)
# Check that the file is a zip file by splitting on the period and checking for zip
if ".zip" in oneFile:
# Create the path to the current file
currentFilePath = os.path.join(pathtoZips, oneFile)
#print(currentFilePath, file=f)
# Create a reference to the zipped file
zipConnection = zipfile.ZipFile(currentFilePath)
# Loop through each password in the passwords list
for password in range(len(passwords)):
#print(passwords[password], file=f)
# Try to extract all of the files in the zipped file using the current password
try:
zipConnection.extractall(pwd=passwords[password].encode())
# Print out what the password for the zipped file was
#print("PASSWORD: " + passwords[password], file=f)
print("Success! Password Found." + "\n" + "Password: " + passwords[password] + " Location: " + oneFile + "\n")
# If the file fails to open with the current password, move onto the next one
except:
pass
# Loop through the file names for the password files
tempdirlist = []
tempdirlist = os.listdir("./Resources/Passwords")
#print(tempdirlist)
for onefile in tempdirlist:
if ".txt" in onefile:
passwordfilesList.append(onefile)
#print(passwordfilesList)
# Print a header statement to give the user a simple set of operations to navigate
print("""
_|_|_|_|_| _|_|_| _|_|_| _|_|_| _|_|_| _|_| _|_|_| _| _| _|_|_|_| _|_|_|
_| _| _| _| _| _| _| _| _| _| _| _| _| _| _|
_| _| _|_|_| _| _|_|_| _|_|_|_| _| _|_| _|_|_| _|_|_|
_| _| _| _| _| _| _| _| _| _| _| _| _| _|
_|_|_|_|_| _|_|_| _| _|_|_| _| _| _| _| _|_|_| _| _| _|_|_|_| _| _|
""")
print("This program will crack your zip" + "\n")
print("Please choose from list below:" + "\n")
userimput = input(str(passwordfilesList) + ": ")
passwordlist = collectPasswords(userimput)
with open("output.txt", "w") as f:
crackZips(passwordlist)
# Ask the user if they would like to crack another
while True:
answer = input("More Cracking? (y/n): ")
if answer not in ('y', 'n'):
print("Invalid input.")
break
if answer == 'y':
print("Please choose from list below:" + "\n")
userimput = input(str(passwordfilesList) + ": ")
passwordlist = collectPasswords(userimput)
with open("output.txt", "w") as f:
crackZips(passwordlist)
print("\n")
else:
print("Done Cracking. Goodbye" + "\n")
break |
#!/bin/sh
# CYBERWATCH SAS - 2017
#
# Security fix for RHSA-2013:1754
#
# Security announcement date: 2013-11-21 05:56:19 UTC
# Script generation date: 2017-01-01 21:14:59 UTC
#
# Operating System: Red Hat 6
# Architecture: x86_64
#
# Vulnerable packages fix on version:
# - qemu-img-rhev.x86_64:0.12.1.2-2.415.el6
# - qemu-kvm-rhev.x86_64:0.12.1.2-2.415.el6
# - qemu-kvm-rhev-debuginfo.x86_64:0.12.1.2-2.415.el6
# - qemu-kvm-rhev-tools.x86_64:0.12.1.2-2.415.el6
#
# Last versions recommanded by security team:
# - qemu-img-rhev.x86_64:0.12.1.2-2.491.el6_8.3
# - qemu-kvm-rhev.x86_64:0.12.1.2-2.491.el6_8.3
# - qemu-kvm-rhev-debuginfo.x86_64:0.12.1.2-2.491.el6_8.3
# - qemu-kvm-rhev-tools.x86_64:0.12.1.2-2.491.el6_8.3
#
# CVE List:
# - CVE-2013-4344
#
# More details:
# - https://www.cyberwatch.fr/vulnerabilites
#
# Licence: Released under The MIT License (MIT), See LICENSE FILE
sudo yum install qemu-img-rhev.x86_64-0.12.1.2 -y
sudo yum install qemu-kvm-rhev.x86_64-0.12.1.2 -y
sudo yum install qemu-kvm-rhev-debuginfo.x86_64-0.12.1.2 -y
sudo yum install qemu-kvm-rhev-tools.x86_64-0.12.1.2 -y
|
<gh_stars>0
export default {
elem: 'svg',
attrs: {
xmlns: 'http://www.w3.org/2000/svg',
viewBox: '0 0 32 32',
width: 24,
height: 24,
},
content: [
{
elem: 'path',
attrs: {
d:
'M2 28h28v2H2zM13.48 5.15l5.69 8.14.8 1.15 1.35-.36 4.52-1.21a2.13 2.13 0 0 1 1.12 0A1.84 1.84 0 0 1 28.23 14 1.87 1.87 0 0 1 27 16.45l-18.71 5a1.78 1.78 0 0 1-1 0A1.87 1.87 0 0 1 6 20.12l-.25-.93L5 16.38l-.74-2.74 1.44-.39 2.3 3.1.81 1.08 1.31-.35 2.8-.75 1.94-.52-.52-1.93-2.26-8.36 1.4-.37M14.16 3a1.06 1.06 0 0 0-.5 0l-3.09.83A1 1 0 0 0 9.89 5l2.5 9.36-2.81.75-2.73-3.61a1 1 0 0 0-.52-.36 1.06 1.06 0 0 0-.5 0L2.72 12A1 1 0 0 0 2 13.16l1 3.74.75 2.8.25.94a3.87 3.87 0 0 0 2.74 2.74 3.91 3.91 0 0 0 2 0l18.7-5a3.84 3.84 0 0 0 0-7.44 4.27 4.27 0 0 0-2.16 0l-4.51 1.21-6.07-8.74a1 1 0 0 0-.54-.41z',
},
},
],
name: 'departure',
size: 24,
};
|
import RightTab from "./RightTab";
import LeftTab from "./LeftTab";
export { LeftTab, RightTab };
|
<filename>src/main/java/mekanism/client/model/MekanismModelCache.java
package mekanism.client.model;
import java.util.HashSet;
import java.util.Set;
import mekanism.common.Mekanism;
import mekanism.common.tile.qio.TileEntityQIODriveArray.DriveStatus;
import net.minecraftforge.client.event.ModelBakeEvent;
public class MekanismModelCache extends BaseModelCache {
public static final MekanismModelCache INSTANCE = new MekanismModelCache();
private final Set<Runnable> callbacks = new HashSet<>();
public final OBJModelData MEKASUIT = registerOBJ(Mekanism.rl("models/entity/mekasuit.obj"));
public final OBJModelData MEKASUIT_MODULES = registerOBJ(Mekanism.rl("models/entity/mekasuit_modules.obj"));
public final OBJModelData MEKATOOL = registerOBJ(Mekanism.rl("models/entity/mekatool.obj"));
public final JSONModelData[] QIO_DRIVES = new JSONModelData[DriveStatus.STATUSES.length];
public MekanismModelCache() {
for (DriveStatus status : DriveStatus.STATUSES) {
if (status == DriveStatus.NONE) {
continue;
}
QIO_DRIVES[status.ordinal()] = registerJSON(status.getModel());
}
}
@Override
public void onBake(ModelBakeEvent evt) {
super.onBake(evt);
callbacks.forEach(Runnable::run);
}
public void reloadCallback(Runnable callback) {
callbacks.add(callback);
}
}
|
Function avg(x, y)
avg = (x + y) / 2
End Function |
var asyncAdd = (a,b) => {
return new Promise((resolve, reject) => {
setTimeout(() => {
if(typeof a === 'number' && typeof b === 'number') {
resolve(a + b);
} else {
reject('Invalid input. Input must be number.');
}
}, 1500);
});
};
asyncAdd(1,1).then((data) => {
//console.log(data);
return asyncAdd(data, 10);
}).then((data) => {
console.log(data);
}).catch((error) => {
console.log(error);
});
console.log('hello there'); |
// Copyright 2021 <NAME>
// Author: <NAME> <<EMAIL>>
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "data_model/documentation/bus_structure/bus_submodule.h"
bool operator==(const bus_submodule &lhs, const bus_submodule &rhs) {
bool ret = true;
ret &= lhs.name == rhs.name;
ret &= lhs.instance_type == rhs.instance_type;
ret &= lhs.offset == rhs.offset;
ret &= lhs.children.size() == rhs.children.size();
if(ret){
for(int i = 0; i< lhs.children.size(); ++i){
ret &= lhs.children[i] == rhs.children[i];
}
}
return ret;
}
void bus_submodule::set_module_type(const std::string t) {
instance_type = t;
}
|
const { NotFoundException } = require('../../infra/exception');
module.exports = ({ batatinhaRepository }) => ({
execute: async ({ batatinha_header, batatinha_id }) => {
const foundBatatinha = await batatinhaRepository.get({ batatinha_header, batatinha_id });
if (!foundBatatinha)
throw new NotFoundException({
details: [
{
error_code: 'Batatinha not found',
error_message: 'Batatinha was not found with header and id informed'
}
]
});
return foundBatatinha;
}
});
|
####################################################################
# 1. Test cosmic annotations
####################################################################
echo " cosmic.t01...\c"
echo "chr1 10378 10379 None
chr1 120612001 120612005 COSM674994,COSM132830
chr5 112175900 112175904 COSM33672,COSM27546,COSM1432432,COSM25817,COSM19698,COSM177897
chr9 139277993 139278022 COSM247490" > exp
gemini query -q "select chrom, start, end, cosmic_ids from variants" test.cosmic.db > obs
check obs exp
rm obs exp |
import datetime
import secrets
from peewee import (
JOIN,
BigIntegerField,
BooleanField,
CharField,
DateField,
DateTimeField,
ForeignKeyField,
IntegerField,
TextField,
fn,
)
from playhouse.shortcuts import model_to_dict
from src.model import db
def get_exclude():
return [User.password, User.email, Content.user, Content.path]
class DatabaseInfo(db.db_wrapper.Model):
version = IntegerField()
class Content(db.db_wrapper.Model):
user = IntegerField()
path = CharField()
mime = CharField()
size = BigIntegerField()
def to_json(self):
content_dict = model_to_dict(self, exclude=get_exclude())
return content_dict
@property
def is_image(self):
return "image/" in self.mime
@property
def is_small_image(self):
return self.is_image and self.size < 1024 * 300 # 25 kb?
@classmethod
def get_user_files(cls, user):
return cls.select().where(Content.user == user)
class User(db.db_wrapper.Model):
username = CharField(unique=True)
password = <PASSWORD>Field()
email = CharField()
registration_date = DateTimeField()
last_active_date = DateTimeField()
name = CharField(null=True)
birthday = DateField(null=True)
about = TextField(null=True)
is_admin = BooleanField(default=False)
avatar = ForeignKeyField(model=Content, backref="avatar", null=True)
@classmethod
def get_admins(cls):
return cls.select().where(User.is_admin == True) # noqa E712
@classmethod
def get_users_sorted_by_active_date(cls):
return cls.select().order_by(User.last_active_date.desc())
@property
def visible_name(self):
if self.name is not None and len(self.name) > 0:
return self.name
return self.username
def to_json(self):
user_dict = model_to_dict(self, exclude=get_exclude())
return user_dict
def to_json_with_email(self):
user_dict = model_to_dict(self, exclude=[User.password, Content.user])
return user_dict
class Token(db.db_wrapper.Model):
user = ForeignKeyField(model=User, backref="tokens")
token = CharField()
valid_until = DateTimeField()
is_refresh_token = BooleanField() # deprecated
token_type = CharField()
@classmethod
def generate_access_token(cls, user):
vu = datetime.datetime.now() + datetime.timedelta(days=30)
return cls.create(
user=user,
token=secrets.token_hex(),
token_type="access",
is_refresh_token=False,
valid_until=vu,
)
@classmethod
def generate_refresh_token(cls, user):
vu = datetime.datetime.now() + datetime.timedelta(days=90)
return cls.create(
user=user,
token=secrets.token_hex(),
token_type="refresh",
is_refresh_token=True,
valid_until=vu,
)
@classmethod
def generate_recover_token(cls, user):
vu = datetime.datetime.now() + datetime.timedelta(days=1)
return cls.create(
user=user,
token=secrets.token_hex(),
token_type="recover",
is_refresh_token=False,
valid_until=vu,
)
class Feedback(db.db_wrapper.Model):
user = ForeignKeyField(model=User, backref="feedbacks")
text = TextField()
is_resolved = BooleanField(default=False)
def to_json(self):
feedback_dict = model_to_dict(self, exclude=get_exclude())
return feedback_dict
class Blog(db.db_wrapper.Model):
image = ForeignKeyField(model=Content, backref="blogs", null=True)
created_date = DateTimeField()
updated_date = DateTimeField()
description = TextField(null=True)
title = TextField(null=True)
url = CharField(null=True, unique=True)
blog_type = IntegerField(
choices=[
(1, "open"), # Visible in list. Everyone can join and write.
(2, "close"), # Visible in list.
# Writers can join only if invited by other user. Everyone can read
(3, "hidden"), # Not visible in list.
# Read and write can only invited users.
],
default=1,
)
creator = ForeignKeyField(model=User, backref="blogs")
@classmethod
def get_public_blogs(cls):
readers = fn.COUNT(BlogParticipiation.id)
return (
cls.select(Blog, readers.alias("readers"))
.join(BlogParticipiation, JOIN.LEFT_OUTER)
.where(Blog.blog_type != 3)
.group_by(Blog.id)
.order_by(readers.desc())
)
@classmethod
def get_readers_count(cls, blog):
return cls.get_readers(blog).count()
@classmethod
def get_readers(cls, blog):
return (
User.select()
.join(BlogParticipiation)
.where(BlogParticipiation.blog == blog)
)
@classmethod
def get_blogs_for_user(cls, user):
readers = fn.COUNT(BlogParticipiation.id)
return (
cls.select(Blog, readers.alias("readers"))
.join(BlogParticipiation, JOIN.LEFT_OUTER)
.where((BlogParticipiation.user == user) & (Blog.blog_type != 3))
.group_by(Blog.id)
.order_by(readers.desc())
)
@classmethod
def get_user_role(cls, blog, user):
query = BlogParticipiation.select().where(
(BlogParticipiation.user == user) & (BlogParticipiation.blog == blog)
)
if query.count() == 0:
return None
participiation = query.get()
return participiation.role
@classmethod
def has_access(cls, blog, user):
if user is not None and user.is_admin:
return True
if blog.blog_type == 3:
if user is None:
return False
role = Blog.get_user_role(blog, user)
if role is None:
return False
return True
def to_json(self):
blog_dict = model_to_dict(self, exclude=get_exclude())
blog_dict["readers"] = Blog.get_readers_count(self)
return blog_dict
class BlogParticipiation(db.db_wrapper.Model):
blog = ForeignKeyField(model=Blog)
user = ForeignKeyField(model=User)
role = IntegerField(choices=[(1, "owner"), (2, "writer"), (3, "reader")], default=1)
class BlogInvite(db.db_wrapper.Model):
blog = ForeignKeyField(model=Blog)
user_from = ForeignKeyField(model=User)
user_to = ForeignKeyField(model=User)
is_accepted = BooleanField(default=False)
role = IntegerField(choices=[(1, "owner"), (2, "writer"), (3, "reader")], default=1)
class Post(db.db_wrapper.Model):
blog = ForeignKeyField(model=Blog, null=True)
creator = ForeignKeyField(model=User)
created_date = DateTimeField()
updated_date = DateTimeField()
title = TextField(null=True)
cut_text = TextField(null=True)
text = TextField(null=True)
is_draft = BooleanField(default=True)
is_on_main = BooleanField(default=False)
reads = IntegerField(default=0)
url = CharField(null=True, unique=True)
has_cut = BooleanField(default=False)
cut_name = CharField(default=None, null=True)
@classmethod
def get_public_posts(cls):
return (
cls.select()
.join(Blog)
.where(
# (Post.is_on_main) &
(Post.is_draft == False) # noqa: E712
& (Blog.blog_type != 3) # noqa: E712
)
.order_by(Post.created_date.desc())
)
@classmethod
def get_posts_for_blog(cls, blog):
return (
cls.select()
.where((Post.is_draft == False) & (Post.blog == blog)) # noqa: E712
.order_by(Post.created_date.desc())
)
@classmethod
def get_user_posts(cls, user):
return (
cls.select()
.where((Post.is_draft == False) & (Post.creator == user)) # noqa: E712
.order_by(Post.created_date.desc())
)
@classmethod
def get_user_drafts(cls, user):
return (
cls.select()
.where((Post.is_draft == True) & (Post.creator == user)) # noqa: E712
.order_by(Post.created_date.desc())
)
@classmethod
def get_public_posts_with_tag(cls, tag):
return (
cls.select()
.join(TagMark)
.switch(Post)
.join(Blog)
.where(
(Post.is_draft == False) # noqa: E712
& (TagMark.tag == tag)
& (Blog.blog_type != 3)
)
.order_by(Post.created_date.desc())
)
def to_json(self):
post_dict = model_to_dict(self, exclude=get_exclude())
post_dict["comments"] = Comment.get_comments_count_for_post(self)
post_dict["tags"] = [t.to_json() for t in Tag.get_for_post(self)]
return post_dict
class Comment(db.db_wrapper.Model):
# post = ForeignKeyField(model=Post, default=None, null=True) # deprecated
object_type = CharField(null=True)
object_id = IntegerField(default=0)
creator = ForeignKeyField(model=User)
parent = ForeignKeyField(model="self", default=None, null=True)
level = IntegerField(default=0)
created_date = DateTimeField()
updated_date = DateTimeField()
text = TextField()
@classmethod
def get_comments_for(cls, type, object_id):
return (
cls.select()
.where((Comment.object_type == type) & (Comment.object_id == object_id))
.order_by(Comment.created_date.desc())
)
@classmethod
def get_comments_for_post(cls, post):
return (
cls.select()
.where((Comment.object_type == "post") & (Comment.object_id == post.id))
.order_by(Comment.created_date.desc())
)
@classmethod
def get_comments_count_for_post(cls, post):
return cls.get_comments_for_post(post).count()
def to_json(self):
comment_dict = model_to_dict(self, exclude=get_exclude())
return comment_dict
class Tag(db.db_wrapper.Model):
title = TextField()
created_date = DateTimeField()
@classmethod
def get_tags(cls):
ntags = fn.COUNT(TagMark.id)
return (
cls.select(Tag, ntags.alias("count"))
.join(TagMark, JOIN.LEFT_OUTER)
.group_by(Tag.id)
.order_by(ntags.desc())
)
@classmethod
def get_for_post(cls, post):
return (
cls.select(Tag).join(TagMark, JOIN.LEFT_OUTER).where(TagMark.post == post)
)
def to_json(self):
tag_dict = model_to_dict(self, exclude=get_exclude())
if hasattr(self, "count"):
tag_dict["count"] = self.count
return tag_dict
class TagMark(db.db_wrapper.Model):
tag = ForeignKeyField(model=Tag)
post = ForeignKeyField(model=Post)
class Conversation(db.db_wrapper.Model):
creator = ForeignKeyField(model=User)
created_date = DateTimeField()
title = TextField()
class ConversationParticipiant(db.db_wrapper.Model):
user = ForeignKeyField(model=User)
conversation = ForeignKeyField(model=Conversation)
class Message(db.db_wrapper.Model):
conversation = ForeignKeyField(model=Conversation)
creator = ForeignKeyField(model=User)
parent = ForeignKeyField(model="self", default=None)
level = IntegerField(default=0)
created_date = DateTimeField()
updated_date = DateTimeField()
text = TextField()
class Notification(db.db_wrapper.Model):
user = ForeignKeyField(model=User)
text = TextField()
object_type = TextField(default="")
object_id = IntegerField(default=0)
created_date = DateTimeField()
is_new = BooleanField(default=True)
@classmethod
def get_user_notifications(cls, user):
return (
cls.select()
.where((Notification.user == user))
.order_by(Notification.created_date.desc())
)
@classmethod
def get_user_unread_notifications(cls, user):
return (
cls.select()
.where(
(Notification.user == user) & (Notification.is_new == True) # noqa E712
)
.order_by(Notification.created_date.desc())
)
@classmethod
def mark_notification_as_readed(cls, user, notification_id):
notification = cls.get_or_none(
(Notification.user == user) & (Notification.id == notification_id)
)
if notification is not None:
notification.is_new = False
notification.save()
def to_json(self):
not_dict = model_to_dict(self, exclude=get_exclude() + [Notification.user])
return not_dict
class Sticker(db.db_wrapper.Model):
name = TextField()
file = ForeignKeyField(model=Content)
def to_json(self):
return model_to_dict(self, exclude=get_exclude())
class Vote(db.db_wrapper.Model):
target_id = IntegerField()
target_type = IntegerField(
choices=[(1, "user"), (2, "blog"), (3, "post"), (4, "comment")], default=3
)
voter = ForeignKeyField(model=User)
vote_value = IntegerField()
@classmethod
def add_votes_info(cls, model_dict, type, user):
model_dict["rating"] = (
Vote.select(fn.SUM(Vote.vote_value).alias("rating"))
.where(
(Vote.target_id == model_dict["id"])
& (Vote.target_type == type) # noqa: E712
)
.first()
.rating
or 0
)
if user is not None:
user_vote = Vote.get_or_none(
(Vote.target_id == model_dict["id"])
& (Vote.target_type == type) # noqa: E712
& (Vote.voter == user) # noqa: E712
)
model_dict["user_voted"] = user_vote.vote_value if user_vote else 0
else:
model_dict["user_voted"] = 0
return model_dict
def to_json(self):
return model_to_dict(self, exclude=get_exclude())
class Jam(db.db_wrapper.Model):
creator = ForeignKeyField(model=User)
blog = ForeignKeyField(model=Blog)
title = TextField(null=True, default=None)
url = CharField(null=True, unique=True)
short_description = TextField(null=True, default=None)
description = TextField(null=True, default=None)
created_date = DateTimeField()
start_date = DateTimeField(null=True, default=None)
end_date = DateTimeField(null=True, default=None)
logo = ForeignKeyField(model=Content, backref="logo", null=True)
status = IntegerField(default=0)
@classmethod
def get_current_jams(cls):
return cls.select().where(Jam.status == 1).order_by(Jam.start_date.desc())
@classmethod
def get_closest_jams(cls):
return cls.select().where(Jam.status == 0).order_by(Jam.start_date.desc())
@classmethod
def get_closed_jams(cls):
return cls.select().where(Jam.status == 2).order_by(Jam.start_date.desc())
@classmethod
def get_jams_organized_by_user(cls, user):
return cls.select().where(Jam.creator == user).order_by(Jam.created_date.desc())
def to_json(self):
jam_dict = model_to_dict(self, exclude=get_exclude())
criterias = JamCriteria.select().where(JamCriteria.jam == self)
jam_dict["criterias"] = [c.to_json() for c in criterias]
return jam_dict
class JamCriteria(db.db_wrapper.Model):
jam = ForeignKeyField(model=Jam)
title = TextField(null=True, default=None)
order = IntegerField(default=0)
def to_json(self):
return model_to_dict(self, exclude=get_exclude(), recurse=False)
class JamEntry(db.db_wrapper.Model):
creator = ForeignKeyField(model=User)
jam = ForeignKeyField(model=Jam, default=None)
title = TextField(null=True, default=None)
url = CharField(null=True, unique=True)
short_info = TextField(null=True, default=None)
info = TextField(null=True, default=None)
created_date = DateTimeField()
logo = ForeignKeyField(model=Content, backref="logo", null=True)
is_archived = BooleanField(default=False)
@classmethod
def get_user_entries(cls, user):
return (
cls.select()
.where(
(JamEntry.is_archived == False) # noqa: E712
& (JamEntry.creator == user)
)
.order_by(JamEntry.created_date.desc())
)
@classmethod
def get_entries_for_post(cls, post):
return (
cls.select()
.join(JamEntryPost)
.where(
(JamEntry.is_archived == False) # noqa: E712
& (JamEntryPost.post == post)
)
.order_by(JamEntry.created_date.desc())
)
def to_json(self):
entry_dict = model_to_dict(self, exclude=get_exclude())
links = JamEntryLink.select().where(JamEntryLink.entry == self)
entry_dict["links"] = [c.to_json() for c in links]
posts = (
Post.select(Post.id, Post.title, Post.url)
.join(JamEntryPost)
.where(
(Post.is_draft == False) & (JamEntryPost.entry == self) # noqa: E712
)
.order_by(Post.created_date.desc())
)
entry_dict["posts"] = [p.to_json() for p in posts]
return entry_dict
class JamEntryPost(db.db_wrapper.Model):
entry = ForeignKeyField(model=JamEntry)
post = ForeignKeyField(model=Post)
def to_json(self):
return model_to_dict(self, exclude=get_exclude(), recurse=False)
class JamEntryLink(db.db_wrapper.Model):
entry = ForeignKeyField(model=JamEntry)
title = TextField(null=True, default=None)
href = CharField(null=True)
order = IntegerField(default=0)
def to_json(self):
return model_to_dict(self, exclude=get_exclude(), recurse=False)
class JamEntryVote(db.db_wrapper.Model):
entry = ForeignKeyField(model=JamEntry)
voter = ForeignKeyField(model=User)
criteria = ForeignKeyField(model=JamCriteria)
vote = IntegerField(default=0)
def to_json(self):
return model_to_dict(
self,
exclude=get_exclude()
+ [JamEntryVote.entry, JamEntryVote.voter, JamCriteria.jam],
)
class JamEntryFeedback(db.db_wrapper.Model):
entry = ForeignKeyField(model=JamEntry)
voter = ForeignKeyField(model=User)
feedback = TextField()
def to_json(self):
return model_to_dict(self, exclude=get_exclude(), recurse=False)
class Achievement(db.db_wrapper.Model):
title = TextField(null=True, default=None)
image = ForeignKeyField(model=Content, backref="image", null=True)
@classmethod
def add_achievements(cls, user_dict):
achievements = (
Achievement.select(Achievement, AchievementUser.comment)
.join(AchievementUser, JOIN.LEFT_OUTER)
.where(AchievementUser.user == user_dict["id"])
)
json_achivements = []
for a in achievements:
_a = a.to_json()
_a["comment"] = a.achievementuser.comment
json_achivements.append(_a)
user_dict["achievements"] = json_achivements
return user_dict
def to_json(self):
return model_to_dict(self, exclude=get_exclude())
class AchievementUser(db.db_wrapper.Model):
achievement = ForeignKeyField(model=Achievement)
user = ForeignKeyField(model=User)
comment = TextField(null=True, default=None)
def to_json(self):
return model_to_dict(self, exclude=get_exclude())
|
#!/bin/bash
export GDOCK_PATH=$1
cd "$GDOCK_PATH" || exit
mkdir "$GDOCK_PATH"/src
cd "$GDOCK_PATH"/src || exit
# dcomplex
wget http://servers.sparks-lab.org/downloads/dcomplex2.tar.gz
tar zxfv dcomplex2.tar.gz
rm dcomplex2.tar.gz
cd "$GDOCK_PATH"/src/dcomplex_single_file || exit
sed -i "s|5400|54000|g" dcomplex.c
sed -i "s|charge_inp.dat|$GDOCK_PATH/src/dcomplex_single_file/charge_inp.dat|g" dcomplex.c
sed -i "s|\"fort.21_alla\"|\"$GDOCK_PATH/src/dcomplex_single_file/fort.21_alla\"|g" dcomplex.c
g++ -o dcomplex dcomplex.c
cd "$GDOCK_PATH"/src || exit
# fcc
git clone https://github.com/haddocking/fcc.git
cd fcc || exit
git checkout python3
cd src || exit
make
cd "$GDOCK_PATH"/src || exit
# pdb-tools
git clone https://github.com/haddocking/pdb-tools
cd "$GDOCK_PATH"/src || exit
# haddock-tools
git clone https://github.com/haddocking/haddock-tools
g++ -O2 -o contact-chainID contact-chainID.cpp
cd "$GDOCK_PATH"/src || exit
# profit
wget http://www.bioinf.org.uk/software/profit/235216/profit.tar.gz
tar zxvf profit.tar.gz
rm profit.tar.gz
cd ProFit_V3.3/src || exit
make
cd "$GDOCK_PATH"/src || exit
# edit paths
cd "$GDOCK_PATH" || exit
sed -i s"|/Users/rodrigo/repos/gdock|$GDOCK_PATH|g" etc/gdock.ini |
class Source < ActiveRecord::Base
has_many :disease_source_variants
has_many :variants, through: :disease_source_variants
has_many :drug_interactions
end
|
import org.openjdk.jmh.annotations.*;
import org.openjdk.jmh.runner.Runner;
import org.openjdk.jmh.runner.RunnerException;
import org.openjdk.jmh.runner.options.Options;
import org.openjdk.jmh.runner.options.OptionsBuilder;
import java.util.Arrays;
import java.util.concurrent.ThreadLocalRandom;
@State(Scope.Benchmark)
public class SortingBenchmark {
@Param({"1000", "10000", "100000"})
private int size;
private int[] data;
public static void main(String[] args) throws RunnerException {
Options opt = new OptionsBuilder()
.include(SortingBenchmark.class.getSimpleName())
.forks(1)
.warmupIterations(3)
.measurementIterations(5)
.build();
new Runner(opt).run();
}
@Setup
public void setup() {
data = ThreadLocalRandom.current().ints(size).toArray();
}
@Benchmark
public void bubbleSortBenchmark() {
int[] arr = Arrays.copyOf(data, data.length);
bubbleSort(arr);
}
@Benchmark
public void quickSortBenchmark() {
int[] arr = Arrays.copyOf(data, data.length);
quickSort(arr, 0, arr.length - 1);
}
private void bubbleSort(int[] arr) {
int n = arr.length;
for (int i = 0; i < n - 1; i++) {
for (int j = 0; j < n - i - 1; j++) {
if (arr[j] > arr[j + 1]) {
int temp = arr[j];
arr[j] = arr[j + 1];
arr[j + 1] = temp;
}
}
}
}
private void quickSort(int[] arr, int low, int high) {
if (low < high) {
int pi = partition(arr, low, high);
quickSort(arr, low, pi - 1);
quickSort(arr, pi + 1, high);
}
}
private int partition(int[] arr, int low, int high) {
int pivot = arr[high];
int i = (low - 1);
for (int j = low; j < high; j++) {
if (arr[j] < pivot) {
i++;
int temp = arr[i];
arr[i] = arr[j];
arr[j] = temp;
}
}
int temp = arr[i + 1];
arr[i + 1] = arr[high];
arr[high] = temp;
return i + 1;
}
} |
#!/bin/sh
RAM_ROOT=/tmp/root
[ -x /usr/bin/ldd ] || ldd() { LD_TRACE_LOADED_OBJECTS=1 $*; }
libs() { ldd $* 2>/dev/null | sed -r 's/(.* => )?(.*) .*/\2/'; }
install_file() { # <file> [ <file> ... ]
for file in "$@"; do
dest="$RAM_ROOT/$file"
[ -f $file -a ! -f $dest ] && {
dir="$(dirname $dest)"
mkdir -p "$dir"
cp $file $dest
}
done
}
install_bin() { # <file> [ <symlink> ... ]
src=$1
files=$1
[ -x "$src" ] && files="$src $(libs $src)"
install_file $files
shift
for link in "$@"; do {
dest="$RAM_ROOT/$link"
dir="$(dirname $dest)"
mkdir -p "$dir"
[ -f "$dest" ] || ln -s $src $dest
}; done
}
supivot() { # <new_root> <old_root>
/bin/mount | grep "on $1 type" 2>&- 1>&- || /bin/mount -o bind $1 $1
mkdir -p $1$2 $1/proc $1/sys $1/dev $1/tmp $1/overlay && \
/bin/mount -o noatime,move /proc $1/proc && \
pivot_root $1 $1$2 || {
/bin/umount -l $1 $1
return 1
}
/bin/mount -o noatime,move $2/sys /sys
/bin/mount -o noatime,move $2/dev /dev
/bin/mount -o noatime,move $2/tmp /tmp
/bin/mount -o noatime,move $2/overlay /overlay 2>&-
return 0
}
run_ramfs() { # <command> [...]
install_bin /bin/busybox /bin/ash /bin/sh /bin/mount /bin/umount \
/sbin/pivot_root /sbin/reboot /bin/sync /bin/dd /bin/grep \
/bin/cp /bin/mv /bin/tar /usr/bin/md5sum "/usr/bin/[" /bin/dd \
/bin/vi /bin/ls /bin/cat /usr/bin/awk /usr/bin/hexdump \
/bin/sleep /bin/zcat /usr/bin/bzcat /usr/bin/printf /usr/bin/wc \
/bin/cut /usr/bin/printf /bin/sync /bin/mkdir /bin/rmdir \
/bin/rm /usr/bin/basename /bin/kill /bin/chmod
install_bin /bin/uclient-fetch /bin/wget
install_bin /sbin/mtd
install_bin /sbin/mount_root
install_bin /sbin/snapshot
install_bin /sbin/snapshot_tool
install_bin /usr/sbin/ubiupdatevol
install_bin /usr/sbin/ubiattach
install_bin /usr/sbin/ubiblock
install_bin /usr/sbin/ubiformat
install_bin /usr/sbin/ubidetach
install_bin /usr/sbin/ubirsvol
install_bin /usr/sbin/ubirmvol
install_bin /usr/sbin/ubimkvol
install_bin /usr/sbin/partx
for file in $RAMFS_COPY_BIN; do
install_bin ${file//:/ }
done
install_file /etc/resolv.conf /lib/*.sh /lib/functions/*.sh /lib/upgrade/*.sh $RAMFS_COPY_DATA
[ -L "/lib64" ] && ln -s /lib $RAM_ROOT/lib64
supivot $RAM_ROOT /mnt || {
echo "Failed to switch over to ramfs. Please reboot."
exit 1
}
/bin/mount -o remount,ro /mnt
/bin/umount -l /mnt
grep /overlay /proc/mounts > /dev/null && {
/bin/mount -o noatime,remount,ro /overlay
/bin/umount -l /overlay
}
# spawn a new shell from ramdisk to reduce the probability of cache issues
exec /bin/busybox ash -c "$*"
}
kill_remaining() { # [ <signal> ]
local sig="${1:-TERM}"
echo -n "Sending $sig to remaining processes ... "
local my_pid=$$
local my_ppid=$(cut -d' ' -f4 /proc/$my_pid/stat)
local my_ppisupgraded=
grep -q upgraded /proc/$my_ppid/cmdline >/dev/null && {
local my_ppisupgraded=1
}
local stat
for stat in /proc/[0-9]*/stat; do
[ -f "$stat" ] || continue
local pid name state ppid rest
read pid name state ppid rest < $stat
name="${name#(}"; name="${name%)}"
local cmdline
read cmdline < /proc/$pid/cmdline
# Skip kernel threads
[ -n "$cmdline" ] || continue
if [ $$ -eq 1 ] || [ $my_ppid -eq 1 ] && [ -n "$my_ppisupgraded" ]; then
# Running as init process, kill everything except me
if [ $pid -ne $$ ] && [ $pid -ne $my_ppid ]; then
echo -n "$name "
kill -$sig $pid 2>/dev/null
fi
else
case "$name" in
# Skip essential services
*procd*|*ash*|*init*|*watchdog*|*ssh*|*dropbear*|*telnet*|*login*|*hostapd*|*wpa_supplicant*|*nas*) : ;;
# Killable process
*)
if [ $pid -ne $$ ] && [ $ppid -ne $$ ]; then
echo -n "$name "
kill -$sig $pid 2>/dev/null
fi
;;
esac
fi
done
echo ""
}
run_hooks() {
local arg="$1"; shift
for func in "$@"; do
eval "$func $arg"
done
}
ask_bool() {
local default="$1"; shift;
local answer="$default"
[ "$INTERACTIVE" -eq 1 ] && {
case "$default" in
0) echo -n "$* (y/N): ";;
*) echo -n "$* (Y/n): ";;
esac
read answer
case "$answer" in
y*) answer=1;;
n*) answer=0;;
*) answer="$default";;
esac
}
[ "$answer" -gt 0 ]
}
v() {
[ "$VERBOSE" -ge 1 ] && echo "$@"
}
rootfs_type() {
/bin/mount | awk '($3 ~ /^\/$/) && ($5 !~ /rootfs/) { print $5 }'
}
get_image() { # <source> [ <command> ]
local from="$1"
local conc="$2"
local cmd
case "$from" in
http://*|ftp://*) cmd="wget -O- -q";;
*) cmd="cat";;
esac
if [ -z "$conc" ]; then
local magic="$(eval $cmd \"$from\" 2>/dev/null | dd bs=2 count=1 2>/dev/null | hexdump -n 2 -e '1/1 "%02x"')"
case "$magic" in
1f8b) conc="zcat";;
425a) conc="bzcat";;
esac
fi
eval "$cmd \"$from\" 2>/dev/null ${conc:+| $conc}"
}
get_magic_word() {
(get_image "$@" | dd bs=2 count=1 | hexdump -v -n 2 -e '1/1 "%02x"') 2>/dev/null
}
get_magic_long() {
(get_image "$@" | dd bs=4 count=1 | hexdump -v -n 4 -e '1/1 "%02x"') 2>/dev/null
}
jffs2_copy_config() {
if grep rootfs_data /proc/mtd >/dev/null; then
# squashfs+jffs2
mtd -e rootfs_data jffs2write "$CONF_TAR" rootfs_data
else
# jffs2
mtd jffs2write "$CONF_TAR" rootfs
fi
}
# Flash firmware to MTD partition
#
# $(1): path to image
# $(2): (optional) pipe command to extract firmware, e.g. dd bs=n skip=m
default_do_upgrade() {
sync
if [ "$SAVE_CONFIG" -eq 1 ]; then
get_image "$1" "$2" | mtd $MTD_CONFIG_ARGS -j "$CONF_TAR" write - "${PART_NAME:-image}"
else
get_image "$1" "$2" | mtd write - "${PART_NAME:-image}"
fi
}
do_upgrade() {
v "Performing system upgrade..."
if type 'platform_do_upgrade' >/dev/null 2>/dev/null; then
platform_do_upgrade "$ARGV"
else
default_do_upgrade "$ARGV"
fi
if [ "$SAVE_CONFIG" -eq 1 ] && type 'platform_copy_config' >/dev/null 2>/dev/null; then
platform_copy_config
fi
v "Upgrade completed"
[ -n "$DELAY" ] && sleep "$DELAY"
ask_bool 1 "Reboot" && {
v "Rebooting system..."
reboot -f
sleep 5
echo b 2>/dev/null >/proc/sysrq-trigger
}
}
|
#!/usr/bin/env bash
# this should have effect globally, for all tests
# https://github.com/bats-core/bats-core/blob/master/docs/source/warnings/BW02.rst
bats_require_minimum_version 1.5.0
debug() {
echo 'exec 1<&-; exec 2<&-; exec 1>&3; exec 2>&1'
}
export -f debug
# redirects stdout and stderr to &3 otherwise the errors in setup, teardown would
# go unreported.
# BUT - don't do this in test functions. Everything written to stdout and
# stderr after this line will go to the terminal, but in the tests, these
# are supposed to be collected and shown only in case of test failure
# (see options --print-output-on-failure and --show-output-of-passing-tests)
eval "$(debug)"
# Allow tests to use relative paths for helper scripts.
# shellcheck disable=SC2164
cd "${TEST_DIR}"
# complain if there's a crowdsec running system-wide or leftover from a previous test
./assert-crowdsec-not-running
# we can use the filename in test descriptions
FILE="$(basename "${BATS_TEST_FILENAME}" .bats):"
export FILE
# the variables exported here can be seen in other setup/teardown/test functions
# MYVAR=something
# export MYVAR
# functions too
cscli() {
"${CSCLI}" "$@"
}
export -f cscli
config_yq() {
yq e "$@" - <"${CONFIG_YAML}"
}
export -f config_yq
# We use these functions like this:
# somecommand <(stderr)
# to provide a standard input to "somecommand".
# The alternatives echo "$stderr" or <<<"$stderr"
# ("here string" in bash jargon)
# are worse because they add a newline,
# even if the variable is empty.
# shellcheck disable=SC2154
stderr() {
printf '%s' "${stderr}"
}
export -f stderr
# shellcheck disable=SC2154
output() {
printf '%s' "${output}"
}
export -f output
is_db_postgres() {
[[ "${DB_BACKEND}" =~ ^postgres|pgx$ ]]
}
export -f is_db_postgres
is_db_mysql() {
[[ "${DB_BACKEND}" == "mysql" ]]
}
export -f is_db_mysql
is_db_sqlite() {
[[ "${DB_BACKEND}" == "sqlite" ]]
}
export -f is_db_sqlite
|
from abc import ABC, abstractmethod
class DataStructure(ABC):
@abstractmethod
def snapshot(self):
raise NotImplementedError("snapshot method must be implemented")
class CustomDataStructure(DataStructure):
def __init__(self, data):
self.data = data
def snapshot(self):
return self.data # Return a snapshot of the current state of the data structure
# Example usage
custom_ds = CustomDataStructure([1, 2, 3, 4])
print(custom_ds.snapshot()) # Output: [1, 2, 3, 4] |
<reponame>chylex/Hardcore-Ender-Expansion
package chylex.hee.packets.client;
import io.netty.buffer.ByteBuf;
import net.minecraft.client.entity.EntityClientPlayerMP;
import net.minecraft.entity.Entity;
import chylex.hee.entity.fx.FXHandler;
import chylex.hee.entity.fx.FXType;
import chylex.hee.packets.AbstractClientPacket;
import chylex.hee.system.abstractions.Pos;
import cpw.mods.fml.relauncher.Side;
import cpw.mods.fml.relauncher.SideOnly;
public class C22EffectLine extends AbstractClientPacket{
private FXType.Line type;
private double x1, y1, z1, x2, y2, z2;
public C22EffectLine(){}
public C22EffectLine(FXType.Line type, double x1, double y1, double z1, double x2, double y2, double z2){
this.type = type;
this.x1 = x1;
this.y1 = y1;
this.z1 = z1;
this.x2 = x2;
this.y2 = y2;
this.z2 = z2;
}
public C22EffectLine(FXType.Line type, Pos pos1, Pos pos2){
this.type = type;
this.x1 = pos1.getX()+0.5D;
this.y1 = pos1.getY()+0.5D;
this.z1 = pos1.getZ()+0.5D;
this.x2 = pos2.getX()+0.5D;
this.y2 = pos2.getY()+0.5D;
this.z2 = pos2.getZ()+0.5D;
}
public C22EffectLine(FXType.Line type, Entity entity1, Entity entity2){
this(type, entity1.posX, entity1.posY+entity2.height*0.5F, entity1.posZ, entity2.posX, entity2.posY+entity2.height*0.5F, entity2.posZ);
}
@Override
public void write(ByteBuf buffer){
buffer.writeByte(type.ordinal()).writeDouble(x1).writeDouble(y1).writeDouble(z1).writeDouble(x2).writeDouble(y2).writeDouble(z2);
}
@Override
public void read(ByteBuf buffer){
byte fxType = buffer.readByte();
if (fxType >= 0 && fxType < FXType.Line.values.length){
type = FXType.Line.values[fxType];
x1 = buffer.readDouble();
y1 = buffer.readDouble();
z1 = buffer.readDouble();
x2 = buffer.readDouble();
y2 = buffer.readDouble();
z2 = buffer.readDouble();
}
}
@Override
@SideOnly(Side.CLIENT)
protected void handle(EntityClientPlayerMP player){
if (type != null)FXHandler.handleLine(player.worldObj, player, type, x1, y1, z1, x2, y2, z2);
}
}
|
<filename>test-data/comp-changes/new/src/main/classNowCheckedException/ClassNowCheckedException.java
package main.classNowCheckedException;
public class ClassNowCheckedException extends Exception {
private static final long serialVersionUID = 1L;
}
|
/*
* Copyright 2011-2020 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.lettuce.core;
import static io.lettuce.core.protocol.CommandType.*;
import java.nio.charset.Charset;
import java.time.Duration;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import io.lettuce.core.GeoArgs.Unit;
import io.lettuce.core.api.StatefulConnection;
import io.lettuce.core.api.async.*;
import io.lettuce.core.cluster.api.async.RedisClusterAsyncCommands;
import io.lettuce.core.codec.RedisCodec;
import io.lettuce.core.internal.LettuceAssert;
import io.lettuce.core.output.*;
import io.lettuce.core.protocol.*;
/**
* An asynchronous and thread-safe API for a Redis connection.
*
* @param <K> Key type.
* @param <V> Value type.
* @author <NAME>
* @author <NAME>
* @author <NAME>
*/
@SuppressWarnings("unchecked")
public abstract class AbstractRedisAsyncCommands<K, V> implements RedisHashAsyncCommands<K, V>, RedisKeyAsyncCommands<K, V>,
RedisStringAsyncCommands<K, V>, RedisListAsyncCommands<K, V>, RedisSetAsyncCommands<K, V>,
RedisSortedSetAsyncCommands<K, V>, RedisScriptingAsyncCommands<K, V>, RedisServerAsyncCommands<K, V>,
RedisHLLAsyncCommands<K, V>, BaseRedisAsyncCommands<K, V>, RedisTransactionalAsyncCommands<K, V>,
RedisGeoAsyncCommands<K, V>, RedisClusterAsyncCommands<K, V> {
private final StatefulConnection<K, V> connection;
private final RedisCommandBuilder<K, V> commandBuilder;
/**
* Initialize a new instance.
*
* @param connection the connection to operate on
* @param codec the codec for command encoding
*/
public AbstractRedisAsyncCommands(StatefulConnection<K, V> connection, RedisCodec<K, V> codec) {
this.connection = connection;
this.commandBuilder = new RedisCommandBuilder<>(codec);
}
@Override
public RedisFuture<Long> append(K key, V value) {
return dispatch(commandBuilder.append(key, value));
}
@Override
public RedisFuture<String> asking() {
return dispatch(commandBuilder.asking());
}
@Override
public RedisFuture<String> auth(CharSequence password) {
LettuceAssert.notNull(password, "Password must not be null");
return dispatch(commandBuilder.auth(password));
}
public RedisFuture<String> auth(char[] password) {
LettuceAssert.notNull(password, "Password must not be null");
return dispatch(commandBuilder.auth(password));
}
@Override
public RedisFuture<String> auth(String username, CharSequence password) {
LettuceAssert.notNull(username, "Username must not be null");
LettuceAssert.notNull(password, "Password must not be null");
return dispatch(commandBuilder.auth(username, password));
}
public RedisFuture<String> auth(String username, char[] password) {
LettuceAssert.notNull(username, "Username must not be null");
LettuceAssert.notNull(password, "Password must not be null");
return dispatch(commandBuilder.auth(username, password));
}
@Override
public RedisFuture<String> bgrewriteaof() {
return dispatch(commandBuilder.bgrewriteaof());
}
@Override
public RedisFuture<String> bgsave() {
return dispatch(commandBuilder.bgsave());
}
@Override
public RedisFuture<Long> bitcount(K key) {
return dispatch(commandBuilder.bitcount(key));
}
@Override
public RedisFuture<Long> bitcount(K key, long start, long end) {
return dispatch(commandBuilder.bitcount(key, start, end));
}
@Override
public RedisFuture<List<Long>> bitfield(K key, BitFieldArgs bitFieldArgs) {
return dispatch(commandBuilder.bitfield(key, bitFieldArgs));
}
@Override
public RedisFuture<Long> bitopAnd(K destination, K... keys) {
return dispatch(commandBuilder.bitopAnd(destination, keys));
}
@Override
public RedisFuture<Long> bitopNot(K destination, K source) {
return dispatch(commandBuilder.bitopNot(destination, source));
}
@Override
public RedisFuture<Long> bitopOr(K destination, K... keys) {
return dispatch(commandBuilder.bitopOr(destination, keys));
}
@Override
public RedisFuture<Long> bitopXor(K destination, K... keys) {
return dispatch(commandBuilder.bitopXor(destination, keys));
}
@Override
public RedisFuture<Long> bitpos(K key, boolean state) {
return dispatch(commandBuilder.bitpos(key, state));
}
@Override
public RedisFuture<Long> bitpos(K key, boolean state, long start) {
return dispatch(commandBuilder.bitpos(key, state, start));
}
@Override
public RedisFuture<Long> bitpos(K key, boolean state, long start, long end) {
return dispatch(commandBuilder.bitpos(key, state, start, end));
}
@Override
public RedisFuture<KeyValue<K, V>> blpop(long timeout, K... keys) {
return dispatch(commandBuilder.blpop(timeout, keys));
}
@Override
public RedisFuture<KeyValue<K, V>> brpop(long timeout, K... keys) {
return dispatch(commandBuilder.brpop(timeout, keys));
}
@Override
public RedisFuture<V> brpoplpush(long timeout, K source, K destination) {
return dispatch(commandBuilder.brpoplpush(timeout, source, destination));
}
@Override
public RedisFuture<String> clientCaching(boolean enabled) {
return dispatch(commandBuilder.clientCaching(enabled));
}
@Override
public RedisFuture<K> clientGetname() {
return dispatch(commandBuilder.clientGetname());
}
@Override
public RedisFuture<Long> clientGetredir() {
return dispatch(commandBuilder.clientGetredir());
}
@Override
public RedisFuture<String> clientKill(String addr) {
return dispatch(commandBuilder.clientKill(addr));
}
@Override
public RedisFuture<Long> clientKill(KillArgs killArgs) {
return dispatch(commandBuilder.clientKill(killArgs));
}
@Override
public RedisFuture<String> clientList() {
return dispatch(commandBuilder.clientList());
}
@Override
public RedisFuture<Long> clientId() {
return dispatch(commandBuilder.clientId());
}
@Override
public RedisFuture<String> clientPause(long timeout) {
return dispatch(commandBuilder.clientPause(timeout));
}
@Override
public RedisFuture<String> clientSetname(K name) {
return dispatch(commandBuilder.clientSetname(name));
}
@Override
public RedisFuture<String> clientTracking(TrackingArgs args) {
return dispatch(commandBuilder.clientTracking(args));
}
@Override
public RedisFuture<Long> clientUnblock(long id, UnblockType type) {
return dispatch(commandBuilder.clientUnblock(id, type));
}
@Override
public RedisFuture<String> clusterAddSlots(int... slots) {
return dispatch(commandBuilder.clusterAddslots(slots));
}
@Override
public RedisFuture<String> clusterBumpepoch() {
return dispatch(commandBuilder.clusterBumpepoch());
}
@Override
public RedisFuture<Long> clusterCountFailureReports(String nodeId) {
return dispatch(commandBuilder.clusterCountFailureReports(nodeId));
}
@Override
public RedisFuture<Long> clusterCountKeysInSlot(int slot) {
return dispatch(commandBuilder.clusterCountKeysInSlot(slot));
}
@Override
public RedisFuture<String> clusterDelSlots(int... slots) {
return dispatch(commandBuilder.clusterDelslots(slots));
}
@Override
public RedisFuture<String> clusterFailover(boolean force) {
return dispatch(commandBuilder.clusterFailover(force));
}
@Override
public RedisFuture<String> clusterFlushslots() {
return dispatch(commandBuilder.clusterFlushslots());
}
@Override
public RedisFuture<String> clusterForget(String nodeId) {
return dispatch(commandBuilder.clusterForget(nodeId));
}
@Override
public RedisFuture<List<K>> clusterGetKeysInSlot(int slot, int count) {
return dispatch(commandBuilder.clusterGetKeysInSlot(slot, count));
}
@Override
public RedisFuture<String> clusterInfo() {
return dispatch(commandBuilder.clusterInfo());
}
@Override
public RedisFuture<Long> clusterKeyslot(K key) {
return dispatch(commandBuilder.clusterKeyslot(key));
}
@Override
public RedisFuture<String> clusterMeet(String ip, int port) {
return dispatch(commandBuilder.clusterMeet(ip, port));
}
@Override
public RedisFuture<String> clusterMyId() {
return dispatch(commandBuilder.clusterMyId());
}
@Override
public RedisFuture<String> clusterNodes() {
return dispatch(commandBuilder.clusterNodes());
}
@Override
public RedisFuture<String> clusterReplicate(String nodeId) {
return dispatch(commandBuilder.clusterReplicate(nodeId));
}
@Override
public RedisFuture<String> clusterReset(boolean hard) {
return dispatch(commandBuilder.clusterReset(hard));
}
@Override
public RedisFuture<String> clusterSaveconfig() {
return dispatch(commandBuilder.clusterSaveconfig());
}
@Override
public RedisFuture<String> clusterSetConfigEpoch(long configEpoch) {
return dispatch(commandBuilder.clusterSetConfigEpoch(configEpoch));
}
@Override
public RedisFuture<String> clusterSetSlotImporting(int slot, String nodeId) {
return dispatch(commandBuilder.clusterSetSlotImporting(slot, nodeId));
}
@Override
public RedisFuture<String> clusterSetSlotMigrating(int slot, String nodeId) {
return dispatch(commandBuilder.clusterSetSlotMigrating(slot, nodeId));
}
@Override
public RedisFuture<String> clusterSetSlotNode(int slot, String nodeId) {
return dispatch(commandBuilder.clusterSetSlotNode(slot, nodeId));
}
@Override
public RedisFuture<String> clusterSetSlotStable(int slot) {
return dispatch(commandBuilder.clusterSetSlotStable(slot));
}
@Override
public RedisFuture<List<String>> clusterSlaves(String nodeId) {
return dispatch(commandBuilder.clusterSlaves(nodeId));
}
@Override
public RedisFuture<List<Object>> clusterSlots() {
return dispatch(commandBuilder.clusterSlots());
}
@Override
public RedisFuture<List<Object>> command() {
return dispatch(commandBuilder.command());
}
@Override
public RedisFuture<Long> commandCount() {
return dispatch(commandBuilder.commandCount());
}
@Override
public RedisFuture<List<Object>> commandInfo(String... commands) {
return dispatch(commandBuilder.commandInfo(commands));
}
@Override
public RedisFuture<List<Object>> commandInfo(CommandType... commands) {
String[] stringCommands = new String[commands.length];
for (int i = 0; i < commands.length; i++) {
stringCommands[i] = commands[i].name();
}
return commandInfo(stringCommands);
}
@Override
public RedisFuture<Map<String, String>> configGet(String parameter) {
return dispatch(commandBuilder.configGet(parameter));
}
@Override
public RedisFuture<String> configResetstat() {
return dispatch(commandBuilder.configResetstat());
}
@Override
public RedisFuture<String> configRewrite() {
return dispatch(commandBuilder.configRewrite());
}
@Override
public RedisFuture<String> configSet(String parameter, String value) {
return dispatch(commandBuilder.configSet(parameter, value));
}
@Override
public RedisFuture<Long> dbsize() {
return dispatch(commandBuilder.dbsize());
}
@Override
public RedisFuture<String> debugCrashAndRecover(Long delay) {
return dispatch(commandBuilder.debugCrashAndRecover(delay));
}
@Override
public RedisFuture<String> debugHtstats(int db) {
return dispatch(commandBuilder.debugHtstats(db));
}
@Override
public RedisFuture<String> debugObject(K key) {
return dispatch(commandBuilder.debugObject(key));
}
@Override
public void debugOom() {
dispatch(commandBuilder.debugOom());
}
@Override
public RedisFuture<String> debugReload() {
return dispatch(commandBuilder.debugReload());
}
@Override
public RedisFuture<String> debugRestart(Long delay) {
return dispatch(commandBuilder.debugRestart(delay));
}
@Override
public RedisFuture<String> debugSdslen(K key) {
return dispatch(commandBuilder.debugSdslen(key));
}
@Override
public void debugSegfault() {
dispatch(commandBuilder.debugSegfault());
}
@Override
public RedisFuture<Long> decr(K key) {
return dispatch(commandBuilder.decr(key));
}
@Override
public RedisFuture<Long> decrby(K key, long amount) {
return dispatch(commandBuilder.decrby(key, amount));
}
@Override
public RedisFuture<Long> del(K... keys) {
return dispatch(commandBuilder.del(keys));
}
public RedisFuture<Long> del(Iterable<K> keys) {
return dispatch(commandBuilder.del(keys));
}
@Override
public String digest(String script) {
return digest(encodeScript(script));
}
@Override
public String digest(byte[] script) {
return LettuceStrings.digest(script);
}
@Override
public RedisFuture<String> discard() {
return dispatch(commandBuilder.discard());
}
@Override
public <T> RedisFuture<T> dispatch(ProtocolKeyword type, CommandOutput<K, V, T> output) {
LettuceAssert.notNull(type, "Command type must not be null");
LettuceAssert.notNull(output, "CommandOutput type must not be null");
return dispatch(new AsyncCommand<>(new Command<>(type, output)));
}
@Override
public <T> RedisFuture<T> dispatch(ProtocolKeyword type, CommandOutput<K, V, T> output, CommandArgs<K, V> args) {
LettuceAssert.notNull(type, "Command type must not be null");
LettuceAssert.notNull(output, "CommandOutput type must not be null");
LettuceAssert.notNull(args, "CommandArgs type must not be null");
return dispatch(new AsyncCommand<>(new Command<>(type, output, args)));
}
protected <T> RedisFuture<T> dispatch(CommandType type, CommandOutput<K, V, T> output) {
return dispatch(type, output, null);
}
protected <T> RedisFuture<T> dispatch(CommandType type, CommandOutput<K, V, T> output, CommandArgs<K, V> args) {
return dispatch(new AsyncCommand<>(new Command<>(type, output, args)));
}
public <T> AsyncCommand<K, V, T> dispatch(RedisCommand<K, V, T> cmd) {
AsyncCommand<K, V, T> asyncCommand = new AsyncCommand<>(cmd);
RedisCommand<K, V, T> dispatched = connection.dispatch(asyncCommand);
if (dispatched instanceof AsyncCommand) {
return (AsyncCommand<K, V, T>) dispatched;
}
return asyncCommand;
}
@Override
public RedisFuture<byte[]> dump(K key) {
return dispatch(commandBuilder.dump(key));
}
@Override
public RedisFuture<V> echo(V msg) {
return dispatch(commandBuilder.echo(msg));
}
@Override
@SuppressWarnings("unchecked")
public <T> RedisFuture<T> eval(String script, ScriptOutputType type, K... keys) {
return eval(encodeScript(script), type, keys);
}
@Override
public <T> RedisFuture<T> eval(byte[] script, ScriptOutputType type, K... keys) {
return (RedisFuture<T>) dispatch(commandBuilder.eval(script, type, keys));
}
@Override
@SuppressWarnings("unchecked")
public <T> RedisFuture<T> eval(String script, ScriptOutputType type, K[] keys, V... values) {
return eval(encodeScript(script), type, keys, values);
}
@Override
public <T> RedisFuture<T> eval(byte[] script, ScriptOutputType type, K[] keys, V... values) {
return (RedisFuture<T>) dispatch(commandBuilder.eval(script, type, keys, values));
}
@Override
@SuppressWarnings("unchecked")
public <T> RedisFuture<T> evalsha(String digest, ScriptOutputType type, K... keys) {
return (RedisFuture<T>) dispatch(commandBuilder.evalsha(digest, type, keys));
}
@Override
@SuppressWarnings("unchecked")
public <T> RedisFuture<T> evalsha(String digest, ScriptOutputType type, K[] keys, V... values) {
return (RedisFuture<T>) dispatch(commandBuilder.evalsha(digest, type, keys, values));
}
@Override
public RedisFuture<TransactionResult> exec() {
return dispatch(EXEC, null);
}
@Override
public RedisFuture<Long> exists(K... keys) {
return dispatch(commandBuilder.exists(keys));
}
public RedisFuture<Long> exists(Iterable<K> keys) {
return dispatch(commandBuilder.exists(keys));
}
@Override
public RedisFuture<Boolean> expire(K key, long seconds) {
return dispatch(commandBuilder.expire(key, seconds));
}
@Override
public RedisFuture<Boolean> expireat(K key, Date timestamp) {
return expireat(key, timestamp.getTime() / 1000);
}
@Override
public RedisFuture<Boolean> expireat(K key, long timestamp) {
return dispatch(commandBuilder.expireat(key, timestamp));
}
@Override
public void flushCommands() {
connection.flushCommands();
}
@Override
public RedisFuture<String> flushall() {
return dispatch(commandBuilder.flushall());
}
@Override
public RedisFuture<String> flushallAsync() {
return dispatch(commandBuilder.flushallAsync());
}
@Override
public RedisFuture<String> flushdb() {
return dispatch(commandBuilder.flushdb());
}
@Override
public RedisFuture<String> flushdbAsync() {
return dispatch(commandBuilder.flushdbAsync());
}
@Override
public RedisFuture<Long> geoadd(K key, double longitude, double latitude, V member) {
return dispatch(commandBuilder.geoadd(key, longitude, latitude, member));
}
@Override
public RedisFuture<Long> geoadd(K key, Object... lngLatMember) {
return dispatch(commandBuilder.geoadd(key, lngLatMember));
}
@Override
public RedisFuture<Double> geodist(K key, V from, V to, GeoArgs.Unit unit) {
return dispatch(commandBuilder.geodist(key, from, to, unit));
}
@Override
public RedisFuture<List<Value<String>>> geohash(K key, V... members) {
return dispatch(commandBuilder.geohash(key, members));
}
@Override
public RedisFuture<List<GeoCoordinates>> geopos(K key, V... members) {
return dispatch(commandBuilder.geopos(key, members));
}
@Override
public RedisFuture<Set<V>> georadius(K key, double longitude, double latitude, double distance, GeoArgs.Unit unit) {
return dispatch(commandBuilder.georadius(GEORADIUS, key, longitude, latitude, distance, unit.name()));
}
@Override
public RedisFuture<List<GeoWithin<V>>> georadius(K key, double longitude, double latitude, double distance,
GeoArgs.Unit unit, GeoArgs geoArgs) {
return dispatch(commandBuilder.georadius(GEORADIUS, key, longitude, latitude, distance, unit.name(), geoArgs));
}
@Override
public RedisFuture<Long> georadius(K key, double longitude, double latitude, double distance, Unit unit,
GeoRadiusStoreArgs<K> geoRadiusStoreArgs) {
return dispatch(commandBuilder.georadius(key, longitude, latitude, distance, unit.name(), geoRadiusStoreArgs));
}
protected RedisFuture<Set<V>> georadius_ro(K key, double longitude, double latitude, double distance, GeoArgs.Unit unit) {
return dispatch(commandBuilder.georadius(GEORADIUS_RO, key, longitude, latitude, distance, unit.name()));
}
protected RedisFuture<List<GeoWithin<V>>> georadius_ro(K key, double longitude, double latitude, double distance,
GeoArgs.Unit unit, GeoArgs geoArgs) {
return dispatch(commandBuilder.georadius(GEORADIUS_RO, key, longitude, latitude, distance, unit.name(), geoArgs));
}
@Override
public RedisFuture<Set<V>> georadiusbymember(K key, V member, double distance, GeoArgs.Unit unit) {
return dispatch(commandBuilder.georadiusbymember(GEORADIUSBYMEMBER, key, member, distance, unit.name()));
}
@Override
public RedisFuture<List<GeoWithin<V>>> georadiusbymember(K key, V member, double distance, GeoArgs.Unit unit,
GeoArgs geoArgs) {
return dispatch(commandBuilder.georadiusbymember(GEORADIUSBYMEMBER, key, member, distance, unit.name(), geoArgs));
}
@Override
public RedisFuture<Long> georadiusbymember(K key, V member, double distance, Unit unit,
GeoRadiusStoreArgs<K> geoRadiusStoreArgs) {
return dispatch(commandBuilder.georadiusbymember(key, member, distance, unit.name(), geoRadiusStoreArgs));
}
protected RedisFuture<Set<V>> georadiusbymember_ro(K key, V member, double distance, GeoArgs.Unit unit) {
return dispatch(commandBuilder.georadiusbymember(GEORADIUSBYMEMBER_RO, key, member, distance, unit.name()));
}
protected RedisFuture<List<GeoWithin<V>>> georadiusbymember_ro(K key, V member, double distance, GeoArgs.Unit unit,
GeoArgs geoArgs) {
return dispatch(commandBuilder.georadiusbymember(GEORADIUSBYMEMBER_RO, key, member, distance, unit.name(), geoArgs));
}
@Override
public RedisFuture<V> get(K key) {
return dispatch(commandBuilder.get(key));
}
public StatefulConnection<K, V> getConnection() {
return connection;
}
@Override
public RedisFuture<Long> getbit(K key, long offset) {
return dispatch(commandBuilder.getbit(key, offset));
}
@Override
public RedisFuture<V> getrange(K key, long start, long end) {
return dispatch(commandBuilder.getrange(key, start, end));
}
@Override
public RedisFuture<V> getset(K key, V value) {
return dispatch(commandBuilder.getset(key, value));
}
@Override
public RedisFuture<Long> hdel(K key, K... fields) {
return dispatch(commandBuilder.hdel(key, fields));
}
@Override
public RedisFuture<Boolean> hexists(K key, K field) {
return dispatch(commandBuilder.hexists(key, field));
}
@Override
public RedisFuture<V> hget(K key, K field) {
return dispatch(commandBuilder.hget(key, field));
}
@Override
public RedisFuture<Map<K, V>> hgetall(K key) {
return dispatch(commandBuilder.hgetall(key));
}
@Override
public RedisFuture<Long> hgetall(KeyValueStreamingChannel<K, V> channel, K key) {
return dispatch(commandBuilder.hgetall(channel, key));
}
@Override
public RedisFuture<Long> hincrby(K key, K field, long amount) {
return dispatch(commandBuilder.hincrby(key, field, amount));
}
@Override
public RedisFuture<Double> hincrbyfloat(K key, K field, double amount) {
return dispatch(commandBuilder.hincrbyfloat(key, field, amount));
}
@Override
public RedisFuture<List<K>> hkeys(K key) {
return dispatch(commandBuilder.hkeys(key));
}
@Override
public RedisFuture<Long> hkeys(KeyStreamingChannel<K> channel, K key) {
return dispatch(commandBuilder.hkeys(channel, key));
}
@Override
public RedisFuture<Long> hlen(K key) {
return dispatch(commandBuilder.hlen(key));
}
@Override
public RedisFuture<List<KeyValue<K, V>>> hmget(K key, K... fields) {
return dispatch(commandBuilder.hmgetKeyValue(key, fields));
}
@Override
public RedisFuture<Long> hmget(KeyValueStreamingChannel<K, V> channel, K key, K... fields) {
return dispatch(commandBuilder.hmget(channel, key, fields));
}
@Override
public RedisFuture<String> hmset(K key, Map<K, V> map) {
return dispatch(commandBuilder.hmset(key, map));
}
@Override
public RedisFuture<MapScanCursor<K, V>> hscan(K key) {
return dispatch(commandBuilder.hscan(key));
}
@Override
public RedisFuture<MapScanCursor<K, V>> hscan(K key, ScanArgs scanArgs) {
return dispatch(commandBuilder.hscan(key, scanArgs));
}
@Override
public RedisFuture<MapScanCursor<K, V>> hscan(K key, ScanCursor scanCursor, ScanArgs scanArgs) {
return dispatch(commandBuilder.hscan(key, scanCursor, scanArgs));
}
@Override
public RedisFuture<MapScanCursor<K, V>> hscan(K key, ScanCursor scanCursor) {
return dispatch(commandBuilder.hscan(key, scanCursor));
}
@Override
public RedisFuture<StreamScanCursor> hscan(KeyValueStreamingChannel<K, V> channel, K key) {
return dispatch(commandBuilder.hscanStreaming(channel, key));
}
@Override
public RedisFuture<StreamScanCursor> hscan(KeyValueStreamingChannel<K, V> channel, K key, ScanArgs scanArgs) {
return dispatch(commandBuilder.hscanStreaming(channel, key, scanArgs));
}
@Override
public RedisFuture<StreamScanCursor> hscan(KeyValueStreamingChannel<K, V> channel, K key, ScanCursor scanCursor,
ScanArgs scanArgs) {
return dispatch(commandBuilder.hscanStreaming(channel, key, scanCursor, scanArgs));
}
@Override
public RedisFuture<StreamScanCursor> hscan(KeyValueStreamingChannel<K, V> channel, K key, ScanCursor scanCursor) {
return dispatch(commandBuilder.hscanStreaming(channel, key, scanCursor));
}
@Override
public RedisFuture<Boolean> hset(K key, K field, V value) {
return dispatch(commandBuilder.hset(key, field, value));
}
@Override
public RedisFuture<Long> hset(K key, Map<K, V> map) {
return dispatch(commandBuilder.hset(key, map));
}
@Override
public RedisFuture<Boolean> hsetnx(K key, K field, V value) {
return dispatch(commandBuilder.hsetnx(key, field, value));
}
@Override
public RedisFuture<Long> hstrlen(K key, K field) {
return dispatch(commandBuilder.hstrlen(key, field));
}
@Override
public RedisFuture<List<V>> hvals(K key) {
return dispatch(commandBuilder.hvals(key));
}
@Override
public RedisFuture<Long> hvals(ValueStreamingChannel<V> channel, K key) {
return dispatch(commandBuilder.hvals(channel, key));
}
@Override
public RedisFuture<Long> incr(K key) {
return dispatch(commandBuilder.incr(key));
}
@Override
public RedisFuture<Long> incrby(K key, long amount) {
return dispatch(commandBuilder.incrby(key, amount));
}
@Override
public RedisFuture<Double> incrbyfloat(K key, double amount) {
return dispatch(commandBuilder.incrbyfloat(key, amount));
}
@Override
public RedisFuture<String> info() {
return dispatch(commandBuilder.info());
}
@Override
public RedisFuture<String> info(String section) {
return dispatch(commandBuilder.info(section));
}
@Override
public boolean isOpen() {
return connection.isOpen();
}
@Override
public RedisFuture<List<K>> keys(K pattern) {
return dispatch(commandBuilder.keys(pattern));
}
@Override
public RedisFuture<Long> keys(KeyStreamingChannel<K> channel, K pattern) {
return dispatch(commandBuilder.keys(channel, pattern));
}
@Override
public RedisFuture<Date> lastsave() {
return dispatch(commandBuilder.lastsave());
}
@Override
public RedisFuture<V> lindex(K key, long index) {
return dispatch(commandBuilder.lindex(key, index));
}
@Override
public RedisFuture<Long> linsert(K key, boolean before, V pivot, V value) {
return dispatch(commandBuilder.linsert(key, before, pivot, value));
}
@Override
public RedisFuture<Long> llen(K key) {
return dispatch(commandBuilder.llen(key));
}
@Override
public RedisFuture<V> lpop(K key) {
return dispatch(commandBuilder.lpop(key));
}
@Override
public RedisFuture<Long> lpush(K key, V... values) {
return dispatch(commandBuilder.lpush(key, values));
}
@Override
public RedisFuture<Long> lpushx(K key, V... values) {
return dispatch(commandBuilder.lpushx(key, values));
}
@Override
public RedisFuture<List<V>> lrange(K key, long start, long stop) {
return dispatch(commandBuilder.lrange(key, start, stop));
}
@Override
public RedisFuture<Long> lrange(ValueStreamingChannel<V> channel, K key, long start, long stop) {
return dispatch(commandBuilder.lrange(channel, key, start, stop));
}
@Override
public RedisFuture<Long> lrem(K key, long count, V value) {
return dispatch(commandBuilder.lrem(key, count, value));
}
@Override
public RedisFuture<String> lset(K key, long index, V value) {
return dispatch(commandBuilder.lset(key, index, value));
}
@Override
public RedisFuture<String> ltrim(K key, long start, long stop) {
return dispatch(commandBuilder.ltrim(key, start, stop));
}
@Override
public RedisFuture<Long> memoryUsage(K key) {
return dispatch(commandBuilder.memoryUsage(key));
}
@Override
public RedisFuture<List<KeyValue<K, V>>> mget(K... keys) {
return dispatch(commandBuilder.mgetKeyValue(keys));
}
public RedisFuture<List<KeyValue<K, V>>> mget(Iterable<K> keys) {
return dispatch(commandBuilder.mgetKeyValue(keys));
}
@Override
public RedisFuture<Long> mget(KeyValueStreamingChannel<K, V> channel, K... keys) {
return dispatch(commandBuilder.mget(channel, keys));
}
public RedisFuture<Long> mget(KeyValueStreamingChannel<K, V> channel, Iterable<K> keys) {
return dispatch(commandBuilder.mget(channel, keys));
}
@Override
public RedisFuture<String> migrate(String host, int port, K key, int db, long timeout) {
return dispatch(commandBuilder.migrate(host, port, key, db, timeout));
}
@Override
public RedisFuture<String> migrate(String host, int port, int db, long timeout, MigrateArgs<K> migrateArgs) {
return dispatch(commandBuilder.migrate(host, port, db, timeout, migrateArgs));
}
@Override
public RedisFuture<Boolean> move(K key, int db) {
return dispatch(commandBuilder.move(key, db));
}
@Override
public RedisFuture<String> mset(Map<K, V> map) {
return dispatch(commandBuilder.mset(map));
}
@Override
public RedisFuture<Boolean> msetnx(Map<K, V> map) {
return dispatch(commandBuilder.msetnx(map));
}
@Override
public RedisFuture<String> multi() {
return dispatch(commandBuilder.multi());
}
@Override
public RedisFuture<String> objectEncoding(K key) {
return dispatch(commandBuilder.objectEncoding(key));
}
@Override
public RedisFuture<Long> objectIdletime(K key) {
return dispatch(commandBuilder.objectIdletime(key));
}
@Override
public RedisFuture<Long> objectRefcount(K key) {
return dispatch(commandBuilder.objectRefcount(key));
}
@Override
public RedisFuture<Boolean> persist(K key) {
return dispatch(commandBuilder.persist(key));
}
@Override
public RedisFuture<Boolean> pexpire(K key, long milliseconds) {
return dispatch(commandBuilder.pexpire(key, milliseconds));
}
@Override
public RedisFuture<Boolean> pexpireat(K key, Date timestamp) {
return pexpireat(key, timestamp.getTime());
}
@Override
public RedisFuture<Boolean> pexpireat(K key, long timestamp) {
return dispatch(commandBuilder.pexpireat(key, timestamp));
}
@Override
public RedisFuture<Long> pfadd(K key, V... values) {
return dispatch(commandBuilder.pfadd(key, values));
}
@Override
public RedisFuture<Long> pfcount(K... keys) {
return dispatch(commandBuilder.pfcount(keys));
}
@Override
public RedisFuture<String> pfmerge(K destkey, K... sourcekeys) {
return dispatch(commandBuilder.pfmerge(destkey, sourcekeys));
}
@Override
public RedisFuture<String> ping() {
return dispatch(commandBuilder.ping());
}
@Override
public RedisFuture<String> psetex(K key, long milliseconds, V value) {
return dispatch(commandBuilder.psetex(key, milliseconds, value));
}
@Override
public RedisFuture<Long> pttl(K key) {
return dispatch(commandBuilder.pttl(key));
}
@Override
public RedisFuture<Long> publish(K channel, V message) {
return dispatch(commandBuilder.publish(channel, message));
}
@Override
public RedisFuture<List<K>> pubsubChannels() {
return dispatch(commandBuilder.pubsubChannels());
}
@Override
public RedisFuture<List<K>> pubsubChannels(K channel) {
return dispatch(commandBuilder.pubsubChannels(channel));
}
@Override
public RedisFuture<Long> pubsubNumpat() {
return dispatch(commandBuilder.pubsubNumpat());
}
@Override
public RedisFuture<Map<K, Long>> pubsubNumsub(K... channels) {
return dispatch(commandBuilder.pubsubNumsub(channels));
}
@Override
public RedisFuture<String> quit() {
return dispatch(commandBuilder.quit());
}
@Override
public RedisFuture<K> randomkey() {
return dispatch(commandBuilder.randomkey());
}
@Override
public RedisFuture<String> readOnly() {
return dispatch(commandBuilder.readOnly());
}
@Override
public RedisFuture<String> readWrite() {
return dispatch(commandBuilder.readWrite());
}
@Override
public RedisFuture<String> rename(K key, K newKey) {
return dispatch(commandBuilder.rename(key, newKey));
}
@Override
public RedisFuture<Boolean> renamenx(K key, K newKey) {
return dispatch(commandBuilder.renamenx(key, newKey));
}
@Override
public void reset() {
getConnection().reset();
}
@Override
public RedisFuture<String> restore(K key, long ttl, byte[] value) {
return dispatch(commandBuilder.restore(key, value, RestoreArgs.Builder.ttl(ttl)));
}
@Override
public RedisFuture<String> restore(K key, byte[] value, RestoreArgs args) {
return dispatch(commandBuilder.restore(key, value, args));
}
@Override
public RedisFuture<List<Object>> role() {
return dispatch(commandBuilder.role());
}
@Override
public RedisFuture<V> rpop(K key) {
return dispatch(commandBuilder.rpop(key));
}
@Override
public RedisFuture<V> rpoplpush(K source, K destination) {
return dispatch(commandBuilder.rpoplpush(source, destination));
}
@Override
public RedisFuture<Long> rpush(K key, V... values) {
return dispatch(commandBuilder.rpush(key, values));
}
@Override
public RedisFuture<Long> rpushx(K key, V... values) {
return dispatch(commandBuilder.rpushx(key, values));
}
@Override
public RedisFuture<Long> sadd(K key, V... members) {
return dispatch(commandBuilder.sadd(key, members));
}
@Override
public RedisFuture<String> save() {
return dispatch(commandBuilder.save());
}
@Override
public RedisFuture<KeyScanCursor<K>> scan() {
return dispatch(commandBuilder.scan());
}
@Override
public RedisFuture<KeyScanCursor<K>> scan(ScanArgs scanArgs) {
return dispatch(commandBuilder.scan(scanArgs));
}
@Override
public RedisFuture<KeyScanCursor<K>> scan(ScanCursor scanCursor, ScanArgs scanArgs) {
return dispatch(commandBuilder.scan(scanCursor, scanArgs));
}
@Override
public RedisFuture<KeyScanCursor<K>> scan(ScanCursor scanCursor) {
return dispatch(commandBuilder.scan(scanCursor));
}
@Override
public RedisFuture<StreamScanCursor> scan(KeyStreamingChannel<K> channel) {
return dispatch(commandBuilder.scanStreaming(channel));
}
@Override
public RedisFuture<StreamScanCursor> scan(KeyStreamingChannel<K> channel, ScanArgs scanArgs) {
return dispatch(commandBuilder.scanStreaming(channel, scanArgs));
}
@Override
public RedisFuture<StreamScanCursor> scan(KeyStreamingChannel<K> channel, ScanCursor scanCursor, ScanArgs scanArgs) {
return dispatch(commandBuilder.scanStreaming(channel, scanCursor, scanArgs));
}
@Override
public RedisFuture<StreamScanCursor> scan(KeyStreamingChannel<K> channel, ScanCursor scanCursor) {
return dispatch(commandBuilder.scanStreaming(channel, scanCursor));
}
@Override
public RedisFuture<Long> scard(K key) {
return dispatch(commandBuilder.scard(key));
}
@Override
public RedisFuture<List<Boolean>> scriptExists(String... digests) {
return dispatch(commandBuilder.scriptExists(digests));
}
@Override
public RedisFuture<String> scriptFlush() {
return dispatch(commandBuilder.scriptFlush());
}
@Override
public RedisFuture<String> scriptKill() {
return dispatch(commandBuilder.scriptKill());
}
@Override
public RedisFuture<String> scriptLoad(String script) {
return scriptLoad(encodeScript(script));
}
@Override
public RedisFuture<String> scriptLoad(byte[] script) {
return dispatch(commandBuilder.scriptLoad(script));
}
@Override
public RedisFuture<Set<V>> sdiff(K... keys) {
return dispatch(commandBuilder.sdiff(keys));
}
@Override
public RedisFuture<Long> sdiff(ValueStreamingChannel<V> channel, K... keys) {
return dispatch(commandBuilder.sdiff(channel, keys));
}
@Override
public RedisFuture<Long> sdiffstore(K destination, K... keys) {
return dispatch(commandBuilder.sdiffstore(destination, keys));
}
public RedisFuture<String> select(int db) {
return dispatch(commandBuilder.select(db));
}
@Override
public RedisFuture<String> set(K key, V value) {
return dispatch(commandBuilder.set(key, value));
}
@Override
public RedisFuture<String> set(K key, V value, SetArgs setArgs) {
return dispatch(commandBuilder.set(key, value, setArgs));
}
@Override
public void setAutoFlushCommands(boolean autoFlush) {
connection.setAutoFlushCommands(autoFlush);
}
public void setTimeout(Duration timeout) {
connection.setTimeout(timeout);
}
@Override
public RedisFuture<Long> setbit(K key, long offset, int value) {
return dispatch(commandBuilder.setbit(key, offset, value));
}
@Override
public RedisFuture<String> setex(K key, long seconds, V value) {
return dispatch(commandBuilder.setex(key, seconds, value));
}
@Override
public RedisFuture<Boolean> setnx(K key, V value) {
return dispatch(commandBuilder.setnx(key, value));
}
@Override
public RedisFuture<Long> setrange(K key, long offset, V value) {
return dispatch(commandBuilder.setrange(key, offset, value));
}
@Override
public void shutdown(boolean save) {
dispatch(commandBuilder.shutdown(save));
}
@Override
public RedisFuture<Set<V>> sinter(K... keys) {
return dispatch(commandBuilder.sinter(keys));
}
@Override
public RedisFuture<Long> sinter(ValueStreamingChannel<V> channel, K... keys) {
return dispatch(commandBuilder.sinter(channel, keys));
}
@Override
public RedisFuture<Long> sinterstore(K destination, K... keys) {
return dispatch(commandBuilder.sinterstore(destination, keys));
}
@Override
public RedisFuture<Boolean> sismember(K key, V member) {
return dispatch(commandBuilder.sismember(key, member));
}
@Override
public RedisFuture<String> slaveof(String host, int port) {
return dispatch(commandBuilder.slaveof(host, port));
}
@Override
public RedisFuture<String> slaveofNoOne() {
return dispatch(commandBuilder.slaveofNoOne());
}
@Override
public RedisFuture<List<Object>> slowlogGet() {
return dispatch(commandBuilder.slowlogGet());
}
@Override
public RedisFuture<List<Object>> slowlogGet(int count) {
return dispatch(commandBuilder.slowlogGet(count));
}
@Override
public RedisFuture<Long> slowlogLen() {
return dispatch(commandBuilder.slowlogLen());
}
@Override
public RedisFuture<String> slowlogReset() {
return dispatch(commandBuilder.slowlogReset());
}
@Override
public RedisFuture<Set<V>> smembers(K key) {
return dispatch(commandBuilder.smembers(key));
}
@Override
public RedisFuture<Long> smembers(ValueStreamingChannel<V> channel, K key) {
return dispatch(commandBuilder.smembers(channel, key));
}
@Override
public RedisFuture<Boolean> smove(K source, K destination, V member) {
return dispatch(commandBuilder.smove(source, destination, member));
}
@Override
public RedisFuture<List<V>> sort(K key) {
return dispatch(commandBuilder.sort(key));
}
@Override
public RedisFuture<Long> sort(ValueStreamingChannel<V> channel, K key) {
return dispatch(commandBuilder.sort(channel, key));
}
@Override
public RedisFuture<List<V>> sort(K key, SortArgs sortArgs) {
return dispatch(commandBuilder.sort(key, sortArgs));
}
@Override
public RedisFuture<Long> sort(ValueStreamingChannel<V> channel, K key, SortArgs sortArgs) {
return dispatch(commandBuilder.sort(channel, key, sortArgs));
}
@Override
public RedisFuture<Long> sortStore(K key, SortArgs sortArgs, K destination) {
return dispatch(commandBuilder.sortStore(key, sortArgs, destination));
}
@Override
public RedisFuture<V> spop(K key) {
return dispatch(commandBuilder.spop(key));
}
@Override
public RedisFuture<Set<V>> spop(K key, long count) {
return dispatch(commandBuilder.spop(key, count));
}
@Override
public RedisFuture<V> srandmember(K key) {
return dispatch(commandBuilder.srandmember(key));
}
@Override
public RedisFuture<List<V>> srandmember(K key, long count) {
return dispatch(commandBuilder.srandmember(key, count));
}
@Override
public RedisFuture<Long> srandmember(ValueStreamingChannel<V> channel, K key, long count) {
return dispatch(commandBuilder.srandmember(channel, key, count));
}
@Override
public RedisFuture<Long> srem(K key, V... members) {
return dispatch(commandBuilder.srem(key, members));
}
@Override
public RedisFuture<ValueScanCursor<V>> sscan(K key) {
return dispatch(commandBuilder.sscan(key));
}
@Override
public RedisFuture<ValueScanCursor<V>> sscan(K key, ScanArgs scanArgs) {
return dispatch(commandBuilder.sscan(key, scanArgs));
}
@Override
public RedisFuture<ValueScanCursor<V>> sscan(K key, ScanCursor scanCursor, ScanArgs scanArgs) {
return dispatch(commandBuilder.sscan(key, scanCursor, scanArgs));
}
@Override
public RedisFuture<ValueScanCursor<V>> sscan(K key, ScanCursor scanCursor) {
return dispatch(commandBuilder.sscan(key, scanCursor));
}
@Override
public RedisFuture<StreamScanCursor> sscan(ValueStreamingChannel<V> channel, K key) {
return dispatch(commandBuilder.sscanStreaming(channel, key));
}
@Override
public RedisFuture<StreamScanCursor> sscan(ValueStreamingChannel<V> channel, K key, ScanArgs scanArgs) {
return dispatch(commandBuilder.sscanStreaming(channel, key, scanArgs));
}
@Override
public RedisFuture<StreamScanCursor> sscan(ValueStreamingChannel<V> channel, K key, ScanCursor scanCursor,
ScanArgs scanArgs) {
return dispatch(commandBuilder.sscanStreaming(channel, key, scanCursor, scanArgs));
}
@Override
public RedisFuture<StreamScanCursor> sscan(ValueStreamingChannel<V> channel, K key, ScanCursor scanCursor) {
return dispatch(commandBuilder.sscanStreaming(channel, key, scanCursor));
}
@Override
public RedisFuture<Long> strlen(K key) {
return dispatch(commandBuilder.strlen(key));
}
@Override
public RedisFuture<Set<V>> sunion(K... keys) {
return dispatch(commandBuilder.sunion(keys));
}
@Override
public RedisFuture<Long> sunion(ValueStreamingChannel<V> channel, K... keys) {
return dispatch(commandBuilder.sunion(channel, keys));
}
@Override
public RedisFuture<Long> sunionstore(K destination, K... keys) {
return dispatch(commandBuilder.sunionstore(destination, keys));
}
public RedisFuture<String> swapdb(int db1, int db2) {
return dispatch(commandBuilder.swapdb(db1, db2));
}
@Override
public RedisFuture<List<V>> time() {
return dispatch(commandBuilder.time());
}
@Override
public RedisFuture<Long> touch(K... keys) {
return dispatch(commandBuilder.touch(keys));
}
public RedisFuture<Long> touch(Iterable<K> keys) {
return dispatch(commandBuilder.touch(keys));
}
@Override
public RedisFuture<Long> ttl(K key) {
return dispatch(commandBuilder.ttl(key));
}
@Override
public RedisFuture<String> type(K key) {
return dispatch(commandBuilder.type(key));
}
@Override
public RedisFuture<Long> unlink(K... keys) {
return dispatch(commandBuilder.unlink(keys));
}
public RedisFuture<Long> unlink(Iterable<K> keys) {
return dispatch(commandBuilder.unlink(keys));
}
@Override
public RedisFuture<String> unwatch() {
return dispatch(commandBuilder.unwatch());
}
@Override
public RedisFuture<Long> waitForReplication(int replicas, long timeout) {
return dispatch(commandBuilder.wait(replicas, timeout));
}
@Override
public RedisFuture<String> watch(K... keys) {
return dispatch(commandBuilder.watch(keys));
}
@Override
public RedisFuture<Long> xack(K key, K group, String... messageIds) {
return dispatch(commandBuilder.xack(key, group, messageIds));
}
@Override
public RedisFuture<String> xadd(K key, Map<K, V> body) {
return dispatch(commandBuilder.xadd(key, null, body));
}
@Override
public RedisFuture<String> xadd(K key, XAddArgs args, Map<K, V> body) {
return dispatch(commandBuilder.xadd(key, args, body));
}
@Override
public RedisFuture<String> xadd(K key, Object... keysAndValues) {
return dispatch(commandBuilder.xadd(key, null, keysAndValues));
}
@Override
public RedisFuture<String> xadd(K key, XAddArgs args, Object... keysAndValues) {
return dispatch(commandBuilder.xadd(key, args, keysAndValues));
}
@Override
public RedisFuture<List<StreamMessage<K, V>>> xclaim(K key, Consumer<K> consumer, long minIdleTime, String... messageIds) {
return dispatch(commandBuilder.xclaim(key, consumer, XClaimArgs.Builder.minIdleTime(minIdleTime), messageIds));
}
@Override
public RedisFuture<List<StreamMessage<K, V>>> xclaim(K key, Consumer<K> consumer, XClaimArgs args, String... messageIds) {
return dispatch(commandBuilder.xclaim(key, consumer, args, messageIds));
}
@Override
public RedisFuture<Long> xdel(K key, String... messageIds) {
return dispatch(commandBuilder.xdel(key, messageIds));
}
@Override
public RedisFuture<String> xgroupCreate(XReadArgs.StreamOffset<K> offset, K group) {
return dispatch(commandBuilder.xgroupCreate(offset, group, null));
}
@Override
public RedisFuture<String> xgroupCreate(XReadArgs.StreamOffset<K> offset, K group, XGroupCreateArgs args) {
return dispatch(commandBuilder.xgroupCreate(offset, group, args));
}
@Override
public RedisFuture<Boolean> xgroupDelconsumer(K key, Consumer<K> consumer) {
return dispatch(commandBuilder.xgroupDelconsumer(key, consumer));
}
@Override
public RedisFuture<Boolean> xgroupDestroy(K key, K group) {
return dispatch(commandBuilder.xgroupDestroy(key, group));
}
@Override
public RedisFuture<String> xgroupSetid(XReadArgs.StreamOffset<K> offset, K group) {
return dispatch(commandBuilder.xgroupSetid(offset, group));
}
@Override
public RedisFuture<List<Object>> xinfoStream(K key) {
return dispatch(commandBuilder.xinfoStream(key));
}
@Override
public RedisFuture<List<Object>> xinfoGroups(K key) {
return dispatch(commandBuilder.xinfoGroups(key));
}
@Override
public RedisFuture<List<Object>> xinfoConsumers(K key, K group) {
return dispatch(commandBuilder.xinfoConsumers(key, group));
}
@Override
public RedisFuture<Long> xlen(K key) {
return dispatch(commandBuilder.xlen(key));
}
@Override
public RedisFuture<List<Object>> xpending(K key, K group) {
return dispatch(commandBuilder.xpending(key, group, Range.unbounded(), Limit.unlimited()));
}
@Override
public RedisFuture<List<Object>> xpending(K key, K group, Range<String> range, Limit limit) {
return dispatch(commandBuilder.xpending(key, group, range, limit));
}
@Override
public RedisFuture<List<Object>> xpending(K key, Consumer<K> consumer, Range<String> range, Limit limit) {
return dispatch(commandBuilder.xpending(key, consumer, range, limit));
}
@Override
public RedisFuture<List<StreamMessage<K, V>>> xrange(K key, Range<String> range) {
return dispatch(commandBuilder.xrange(key, range, Limit.unlimited()));
}
@Override
public RedisFuture<List<StreamMessage<K, V>>> xrange(K key, Range<String> range, Limit limit) {
return dispatch(commandBuilder.xrange(key, range, limit));
}
@Override
public RedisFuture<List<StreamMessage<K, V>>> xread(XReadArgs.StreamOffset<K>... streams) {
return dispatch(commandBuilder.xread(null, streams));
}
@Override
public RedisFuture<List<StreamMessage<K, V>>> xread(XReadArgs args, XReadArgs.StreamOffset<K>... streams) {
return dispatch(commandBuilder.xread(args, streams));
}
@Override
public RedisFuture<List<StreamMessage<K, V>>> xreadgroup(Consumer<K> consumer, XReadArgs.StreamOffset<K>... streams) {
return dispatch(commandBuilder.xreadgroup(consumer, null, streams));
}
@Override
public RedisFuture<List<StreamMessage<K, V>>> xreadgroup(Consumer<K> consumer, XReadArgs args,
XReadArgs.StreamOffset<K>... streams) {
return dispatch(commandBuilder.xreadgroup(consumer, args, streams));
}
@Override
public RedisFuture<List<StreamMessage<K, V>>> xrevrange(K key, Range<String> range) {
return dispatch(commandBuilder.xrevrange(key, range, Limit.unlimited()));
}
@Override
public RedisFuture<List<StreamMessage<K, V>>> xrevrange(K key, Range<String> range, Limit limit) {
return dispatch(commandBuilder.xrevrange(key, range, limit));
}
@Override
public RedisFuture<Long> xtrim(K key, long count) {
return xtrim(key, false, count);
}
@Override
public RedisFuture<Long> xtrim(K key, boolean approximateTrimming, long count) {
return dispatch(commandBuilder.xtrim(key, approximateTrimming, count));
}
@Override
public RedisFuture<KeyValue<K, ScoredValue<V>>> bzpopmin(long timeout, K... keys) {
return dispatch(commandBuilder.bzpopmin(timeout, keys));
}
@Override
public RedisFuture<KeyValue<K, ScoredValue<V>>> bzpopmax(long timeout, K... keys) {
return dispatch(commandBuilder.bzpopmax(timeout, keys));
}
@Override
public RedisFuture<Long> zadd(K key, double score, V member) {
return dispatch(commandBuilder.zadd(key, null, score, member));
}
@Override
public RedisFuture<Long> zadd(K key, Object... scoresAndValues) {
return dispatch(commandBuilder.zadd(key, null, scoresAndValues));
}
@Override
public RedisFuture<Long> zadd(K key, ScoredValue<V>... scoredValues) {
return dispatch(commandBuilder.zadd(key, null, (Object[]) scoredValues));
}
@Override
public RedisFuture<Long> zadd(K key, ZAddArgs zAddArgs, double score, V member) {
return dispatch(commandBuilder.zadd(key, zAddArgs, score, member));
}
@Override
public RedisFuture<Long> zadd(K key, ZAddArgs zAddArgs, Object... scoresAndValues) {
return dispatch(commandBuilder.zadd(key, zAddArgs, scoresAndValues));
}
@Override
public RedisFuture<Long> zadd(K key, ZAddArgs zAddArgs, ScoredValue<V>... scoredValues) {
return dispatch(commandBuilder.zadd(key, zAddArgs, (Object[]) scoredValues));
}
@Override
public RedisFuture<Double> zaddincr(K key, double score, V member) {
return dispatch(commandBuilder.zaddincr(key, null, score, member));
}
@Override
public RedisFuture<Double> zaddincr(K key, ZAddArgs zAddArgs, double score, V member) {
return dispatch(commandBuilder.zaddincr(key, zAddArgs, score, member));
}
@Override
public RedisFuture<Long> zcard(K key) {
return dispatch(commandBuilder.zcard(key));
}
@Override
public RedisFuture<Long> zcount(K key, double min, double max) {
return dispatch(commandBuilder.zcount(key, min, max));
}
@Override
public RedisFuture<Long> zcount(K key, String min, String max) {
return dispatch(commandBuilder.zcount(key, min, max));
}
@Override
public RedisFuture<Long> zcount(K key, Range<? extends Number> range) {
return dispatch(commandBuilder.zcount(key, range));
}
@Override
public RedisFuture<Double> zincrby(K key, double amount, V member) {
return dispatch(commandBuilder.zincrby(key, amount, member));
}
@Override
public RedisFuture<Long> zinterstore(K destination, K... keys) {
return dispatch(commandBuilder.zinterstore(destination, keys));
}
@Override
public RedisFuture<Long> zinterstore(K destination, ZStoreArgs storeArgs, K... keys) {
return dispatch(commandBuilder.zinterstore(destination, storeArgs, keys));
}
@Override
public RedisFuture<Long> zlexcount(K key, String min, String max) {
return dispatch(commandBuilder.zlexcount(key, min, max));
}
@Override
public RedisFuture<Long> zlexcount(K key, Range<? extends V> range) {
return dispatch(commandBuilder.zlexcount(key, range));
}
@Override
public RedisFuture<ScoredValue<V>> zpopmin(K key) {
return dispatch(commandBuilder.zpopmin(key));
}
@Override
public RedisFuture<List<ScoredValue<V>>> zpopmin(K key, long count) {
return dispatch(commandBuilder.zpopmin(key, count));
}
@Override
public RedisFuture<ScoredValue<V>> zpopmax(K key) {
return dispatch(commandBuilder.zpopmax(key));
}
@Override
public RedisFuture<List<ScoredValue<V>>> zpopmax(K key, long count) {
return dispatch(commandBuilder.zpopmax(key, count));
}
@Override
public RedisFuture<List<V>> zrange(K key, long start, long stop) {
return dispatch(commandBuilder.zrange(key, start, stop));
}
@Override
public RedisFuture<Long> zrange(ValueStreamingChannel<V> channel, K key, long start, long stop) {
return dispatch(commandBuilder.zrange(channel, key, start, stop));
}
@Override
public RedisFuture<List<ScoredValue<V>>> zrangeWithScores(K key, long start, long stop) {
return dispatch(commandBuilder.zrangeWithScores(key, start, stop));
}
@Override
public RedisFuture<Long> zrangeWithScores(ScoredValueStreamingChannel<V> channel, K key, long start, long stop) {
return dispatch(commandBuilder.zrangeWithScores(channel, key, start, stop));
}
@Override
public RedisFuture<List<V>> zrangebylex(K key, String min, String max) {
return dispatch(commandBuilder.zrangebylex(key, min, max));
}
@Override
public RedisFuture<List<V>> zrangebylex(K key, Range<? extends V> range) {
return dispatch(commandBuilder.zrangebylex(key, range, Limit.unlimited()));
}
@Override
public RedisFuture<List<V>> zrangebylex(K key, String min, String max, long offset, long count) {
return dispatch(commandBuilder.zrangebylex(key, min, max, offset, count));
}
@Override
public RedisFuture<List<V>> zrangebylex(K key, Range<? extends V> range, Limit limit) {
return dispatch(commandBuilder.zrangebylex(key, range, limit));
}
@Override
public RedisFuture<List<V>> zrangebyscore(K key, double min, double max) {
return dispatch(commandBuilder.zrangebyscore(key, min, max));
}
@Override
public RedisFuture<List<V>> zrangebyscore(K key, String min, String max) {
return dispatch(commandBuilder.zrangebyscore(key, min, max));
}
@Override
public RedisFuture<List<V>> zrangebyscore(K key, Range<? extends Number> range) {
return dispatch(commandBuilder.zrangebyscore(key, range, Limit.unlimited()));
}
@Override
public RedisFuture<List<V>> zrangebyscore(K key, double min, double max, long offset, long count) {
return dispatch(commandBuilder.zrangebyscore(key, min, max, offset, count));
}
@Override
public RedisFuture<List<V>> zrangebyscore(K key, String min, String max, long offset, long count) {
return dispatch(commandBuilder.zrangebyscore(key, min, max, offset, count));
}
@Override
public RedisFuture<List<V>> zrangebyscore(K key, Range<? extends Number> range, Limit limit) {
return dispatch(commandBuilder.zrangebyscore(key, range, limit));
}
@Override
public RedisFuture<Long> zrangebyscore(ValueStreamingChannel<V> channel, K key, double min, double max) {
return dispatch(commandBuilder.zrangebyscore(channel, key, min, max));
}
@Override
public RedisFuture<Long> zrangebyscore(ValueStreamingChannel<V> channel, K key, String min, String max) {
return dispatch(commandBuilder.zrangebyscore(channel, key, min, max));
}
@Override
public RedisFuture<Long> zrangebyscore(ValueStreamingChannel<V> channel, K key, Range<? extends Number> range) {
return dispatch(commandBuilder.zrangebyscore(channel, key, range, Limit.unlimited()));
}
@Override
public RedisFuture<Long> zrangebyscore(ValueStreamingChannel<V> channel, K key, double min, double max, long offset,
long count) {
return dispatch(commandBuilder.zrangebyscore(channel, key, min, max, offset, count));
}
@Override
public RedisFuture<Long> zrangebyscore(ValueStreamingChannel<V> channel, K key, String min, String max, long offset,
long count) {
return dispatch(commandBuilder.zrangebyscore(channel, key, min, max, offset, count));
}
@Override
public RedisFuture<Long> zrangebyscore(ValueStreamingChannel<V> channel, K key, Range<? extends Number> range,
Limit limit) {
return dispatch(commandBuilder.zrangebyscore(channel, key, range, limit));
}
@Override
public RedisFuture<List<ScoredValue<V>>> zrangebyscoreWithScores(K key, double min, double max) {
return dispatch(commandBuilder.zrangebyscoreWithScores(key, min, max));
}
@Override
public RedisFuture<List<ScoredValue<V>>> zrangebyscoreWithScores(K key, String min, String max) {
return dispatch(commandBuilder.zrangebyscoreWithScores(key, min, max));
}
@Override
public RedisFuture<List<ScoredValue<V>>> zrangebyscoreWithScores(K key, Range<? extends Number> range) {
return dispatch(commandBuilder.zrangebyscoreWithScores(key, range, Limit.unlimited()));
}
@Override
public RedisFuture<List<ScoredValue<V>>> zrangebyscoreWithScores(K key, double min, double max, long offset, long count) {
return dispatch(commandBuilder.zrangebyscoreWithScores(key, min, max, offset, count));
}
@Override
public RedisFuture<List<ScoredValue<V>>> zrangebyscoreWithScores(K key, String min, String max, long offset, long count) {
return dispatch(commandBuilder.zrangebyscoreWithScores(key, min, max, offset, count));
}
@Override
public RedisFuture<List<ScoredValue<V>>> zrangebyscoreWithScores(K key, Range<? extends Number> range, Limit limit) {
return dispatch(commandBuilder.zrangebyscoreWithScores(key, range, limit));
}
@Override
public RedisFuture<Long> zrangebyscoreWithScores(ScoredValueStreamingChannel<V> channel, K key, double min, double max) {
return dispatch(commandBuilder.zrangebyscoreWithScores(channel, key, min, max));
}
@Override
public RedisFuture<Long> zrangebyscoreWithScores(ScoredValueStreamingChannel<V> channel, K key, String min, String max) {
return dispatch(commandBuilder.zrangebyscoreWithScores(channel, key, min, max));
}
@Override
public RedisFuture<Long> zrangebyscoreWithScores(ScoredValueStreamingChannel<V> channel, K key,
Range<? extends Number> range) {
return dispatch(commandBuilder.zrangebyscoreWithScores(channel, key, range, Limit.unlimited()));
}
@Override
public RedisFuture<Long> zrangebyscoreWithScores(ScoredValueStreamingChannel<V> channel, K key, double min, double max,
long offset, long count) {
return dispatch(commandBuilder.zrangebyscoreWithScores(channel, key, min, max, offset, count));
}
@Override
public RedisFuture<Long> zrangebyscoreWithScores(ScoredValueStreamingChannel<V> channel, K key, String min, String max,
long offset, long count) {
return dispatch(commandBuilder.zrangebyscoreWithScores(channel, key, min, max, offset, count));
}
@Override
public RedisFuture<Long> zrangebyscoreWithScores(ScoredValueStreamingChannel<V> channel, K key,
Range<? extends Number> range, Limit limit) {
return dispatch(commandBuilder.zrangebyscoreWithScores(channel, key, range, limit));
}
@Override
public RedisFuture<Long> zrank(K key, V member) {
return dispatch(commandBuilder.zrank(key, member));
}
@Override
public RedisFuture<Long> zrem(K key, V... members) {
return dispatch(commandBuilder.zrem(key, members));
}
@Override
public RedisFuture<Long> zremrangebylex(K key, String min, String max) {
return dispatch(commandBuilder.zremrangebylex(key, min, max));
}
@Override
public RedisFuture<Long> zremrangebylex(K key, Range<? extends V> range) {
return dispatch(commandBuilder.zremrangebylex(key, range));
}
@Override
public RedisFuture<Long> zremrangebyrank(K key, long start, long stop) {
return dispatch(commandBuilder.zremrangebyrank(key, start, stop));
}
@Override
public RedisFuture<Long> zremrangebyscore(K key, double min, double max) {
return dispatch(commandBuilder.zremrangebyscore(key, min, max));
}
@Override
public RedisFuture<Long> zremrangebyscore(K key, String min, String max) {
return dispatch(commandBuilder.zremrangebyscore(key, min, max));
}
@Override
public RedisFuture<Long> zremrangebyscore(K key, Range<? extends Number> range) {
return dispatch(commandBuilder.zremrangebyscore(key, range));
}
@Override
public RedisFuture<List<V>> zrevrange(K key, long start, long stop) {
return dispatch(commandBuilder.zrevrange(key, start, stop));
}
@Override
public RedisFuture<Long> zrevrange(ValueStreamingChannel<V> channel, K key, long start, long stop) {
return dispatch(commandBuilder.zrevrange(channel, key, start, stop));
}
@Override
public RedisFuture<List<ScoredValue<V>>> zrevrangeWithScores(K key, long start, long stop) {
return dispatch(commandBuilder.zrevrangeWithScores(key, start, stop));
}
@Override
public RedisFuture<Long> zrevrangeWithScores(ScoredValueStreamingChannel<V> channel, K key, long start, long stop) {
return dispatch(commandBuilder.zrevrangeWithScores(channel, key, start, stop));
}
@Override
public RedisFuture<List<V>> zrevrangebylex(K key, Range<? extends V> range) {
return dispatch(commandBuilder.zrevrangebylex(key, range, Limit.unlimited()));
}
@Override
public RedisFuture<List<V>> zrevrangebylex(K key, Range<? extends V> range, Limit limit) {
return dispatch(commandBuilder.zrevrangebylex(key, range, limit));
}
@Override
public RedisFuture<List<V>> zrevrangebyscore(K key, double max, double min) {
return dispatch(commandBuilder.zrevrangebyscore(key, max, min));
}
@Override
public RedisFuture<List<V>> zrevrangebyscore(K key, String max, String min) {
return dispatch(commandBuilder.zrevrangebyscore(key, max, min));
}
@Override
public RedisFuture<List<V>> zrevrangebyscore(K key, Range<? extends Number> range) {
return dispatch(commandBuilder.zrevrangebyscore(key, range, Limit.unlimited()));
}
@Override
public RedisFuture<List<V>> zrevrangebyscore(K key, double max, double min, long offset, long count) {
return dispatch(commandBuilder.zrevrangebyscore(key, max, min, offset, count));
}
@Override
public RedisFuture<List<V>> zrevrangebyscore(K key, String max, String min, long offset, long count) {
return dispatch(commandBuilder.zrevrangebyscore(key, max, min, offset, count));
}
@Override
public RedisFuture<List<V>> zrevrangebyscore(K key, Range<? extends Number> range, Limit limit) {
return dispatch(commandBuilder.zrevrangebyscore(key, range, limit));
}
@Override
public RedisFuture<Long> zrevrangebyscore(ValueStreamingChannel<V> channel, K key, double max, double min) {
return dispatch(commandBuilder.zrevrangebyscore(channel, key, max, min));
}
@Override
public RedisFuture<Long> zrevrangebyscore(ValueStreamingChannel<V> channel, K key, String max, String min) {
return dispatch(commandBuilder.zrevrangebyscore(channel, key, max, min));
}
@Override
public RedisFuture<Long> zrevrangebyscore(ValueStreamingChannel<V> channel, K key, Range<? extends Number> range) {
return dispatch(commandBuilder.zrevrangebyscore(channel, key, range, Limit.unlimited()));
}
@Override
public RedisFuture<Long> zrevrangebyscore(ValueStreamingChannel<V> channel, K key, double max, double min, long offset,
long count) {
return dispatch(commandBuilder.zrevrangebyscore(channel, key, max, min, offset, count));
}
@Override
public RedisFuture<Long> zrevrangebyscore(ValueStreamingChannel<V> channel, K key, String max, String min, long offset,
long count) {
return dispatch(commandBuilder.zrevrangebyscore(channel, key, max, min, offset, count));
}
@Override
public RedisFuture<Long> zrevrangebyscore(ValueStreamingChannel<V> channel, K key, Range<? extends Number> range,
Limit limit) {
return dispatch(commandBuilder.zrevrangebyscore(channel, key, range, limit));
}
@Override
public RedisFuture<List<ScoredValue<V>>> zrevrangebyscoreWithScores(K key, double max, double min) {
return dispatch(commandBuilder.zrevrangebyscoreWithScores(key, max, min));
}
@Override
public RedisFuture<List<ScoredValue<V>>> zrevrangebyscoreWithScores(K key, String max, String min) {
return dispatch(commandBuilder.zrevrangebyscoreWithScores(key, max, min));
}
@Override
public RedisFuture<List<ScoredValue<V>>> zrevrangebyscoreWithScores(K key, Range<? extends Number> range) {
return dispatch(commandBuilder.zrevrangebyscoreWithScores(key, range, Limit.unlimited()));
}
@Override
public RedisFuture<List<ScoredValue<V>>> zrevrangebyscoreWithScores(K key, double max, double min, long offset,
long count) {
return dispatch(commandBuilder.zrevrangebyscoreWithScores(key, max, min, offset, count));
}
@Override
public RedisFuture<List<ScoredValue<V>>> zrevrangebyscoreWithScores(K key, String max, String min, long offset,
long count) {
return dispatch(commandBuilder.zrevrangebyscoreWithScores(key, max, min, offset, count));
}
@Override
public RedisFuture<List<ScoredValue<V>>> zrevrangebyscoreWithScores(K key, Range<? extends Number> range, Limit limit) {
return dispatch(commandBuilder.zrevrangebyscoreWithScores(key, range, limit));
}
@Override
public RedisFuture<Long> zrevrangebyscoreWithScores(ScoredValueStreamingChannel<V> channel, K key, double max, double min) {
return dispatch(commandBuilder.zrevrangebyscoreWithScores(channel, key, max, min));
}
@Override
public RedisFuture<Long> zrevrangebyscoreWithScores(ScoredValueStreamingChannel<V> channel, K key, String max, String min) {
return dispatch(commandBuilder.zrevrangebyscoreWithScores(channel, key, max, min));
}
@Override
public RedisFuture<Long> zrevrangebyscoreWithScores(ScoredValueStreamingChannel<V> channel, K key,
Range<? extends Number> range) {
return dispatch(commandBuilder.zrevrangebyscoreWithScores(channel, key, range, Limit.unlimited()));
}
@Override
public RedisFuture<Long> zrevrangebyscoreWithScores(ScoredValueStreamingChannel<V> channel, K key, double max, double min,
long offset, long count) {
return dispatch(commandBuilder.zrevrangebyscoreWithScores(channel, key, max, min, offset, count));
}
@Override
public RedisFuture<Long> zrevrangebyscoreWithScores(ScoredValueStreamingChannel<V> channel, K key, String max, String min,
long offset, long count) {
return dispatch(commandBuilder.zrevrangebyscoreWithScores(channel, key, max, min, offset, count));
}
@Override
public RedisFuture<Long> zrevrangebyscoreWithScores(ScoredValueStreamingChannel<V> channel, K key,
Range<? extends Number> range, Limit limit) {
return dispatch(commandBuilder.zrevrangebyscoreWithScores(channel, key, range, limit));
}
@Override
public RedisFuture<Long> zrevrank(K key, V member) {
return dispatch(commandBuilder.zrevrank(key, member));
}
@Override
public RedisFuture<ScoredValueScanCursor<V>> zscan(K key) {
return dispatch(commandBuilder.zscan(key));
}
@Override
public RedisFuture<ScoredValueScanCursor<V>> zscan(K key, ScanArgs scanArgs) {
return dispatch(commandBuilder.zscan(key, scanArgs));
}
@Override
public RedisFuture<ScoredValueScanCursor<V>> zscan(K key, ScanCursor scanCursor, ScanArgs scanArgs) {
return dispatch(commandBuilder.zscan(key, scanCursor, scanArgs));
}
@Override
public RedisFuture<ScoredValueScanCursor<V>> zscan(K key, ScanCursor scanCursor) {
return dispatch(commandBuilder.zscan(key, scanCursor));
}
@Override
public RedisFuture<StreamScanCursor> zscan(ScoredValueStreamingChannel<V> channel, K key) {
return dispatch(commandBuilder.zscanStreaming(channel, key));
}
@Override
public RedisFuture<StreamScanCursor> zscan(ScoredValueStreamingChannel<V> channel, K key, ScanArgs scanArgs) {
return dispatch(commandBuilder.zscanStreaming(channel, key, scanArgs));
}
@Override
public RedisFuture<StreamScanCursor> zscan(ScoredValueStreamingChannel<V> channel, K key, ScanCursor scanCursor,
ScanArgs scanArgs) {
return dispatch(commandBuilder.zscanStreaming(channel, key, scanCursor, scanArgs));
}
@Override
public RedisFuture<StreamScanCursor> zscan(ScoredValueStreamingChannel<V> channel, K key, ScanCursor scanCursor) {
return dispatch(commandBuilder.zscanStreaming(channel, key, scanCursor));
}
@Override
public RedisFuture<Double> zscore(K key, V member) {
return dispatch(commandBuilder.zscore(key, member));
}
@Override
public RedisFuture<Long> zunionstore(K destination, K... keys) {
return dispatch(commandBuilder.zunionstore(destination, keys));
}
@Override
public RedisFuture<Long> zunionstore(K destination, ZStoreArgs storeArgs, K... keys) {
return dispatch(commandBuilder.zunionstore(destination, storeArgs, keys));
}
private byte[] encodeScript(String script) {
LettuceAssert.notNull(script, "Lua script must not be null");
LettuceAssert.notEmpty(script, "Lua script must not be empty");
return script.getBytes(getConnection().getOptions().getScriptCharset());
}
}
|
import java.sql.SQLException;
public class BookManager {
// Resets the text fields for book information
public void reset(TextField textField_id, TextField textField_title, TextField textField_author, TextField textField_publisher, TextField textField_price) throws DaoException, SQLException {
// Implementation to reset text fields
// Example: textField_id.setText("");
// Example: textField_title.setText("");
// Example: textField_author.setText("");
// Example: textField_publisher.setText("");
// Example: textField_price.setText("");
}
// Adds a new book with the provided information
public void addBook(TextField textField_id, TextField textField_title, TextField textField_author, TextField textField_publisher, TextField textField_price) throws DaoException {
// Implementation to add a new book
// Example: Book newBook = new Book(textField_id.getText(), textField_title.getText(), textField_author.getText(), textField_publisher.getText(), Double.parseDouble(textField_price.getText()));
// Example: bookDao.addBook(newBook);
}
// Updates the information of an existing book
public void updateBookInfo(TextField textField_id, TextField textField_title, TextField textField_author, TextField textField_publisher, TextField textField_price) throws DaoException {
// Implementation to update book information
// Example: Book updatedBook = new Book(textField_id.getText(), textField_title.getText(), textField_author.getText(), textField_publisher.getText(), Double.parseDouble(textField_price.getText()));
// Example: bookDao.updateBook(updatedBook);
}
} |
#!/bin/bash
## source first
source scripts/common_variables.sh
## config
outdir=${1:-"plots2D"}
plot="met_vs_time"
misc="empty"
## main loop
for input in "${inputs[@]}"
do
echo ${!input} | while read -r label infile insigfile sel varwgtmap
do
## make outfile text
outfile="${plot}_${label}"
## run script
./scripts/runTreePlotter2D.sh "${skimdir}/${infile}.root" "${skimdir}/${insigfile}.root" "${cutconfigdir}/${sel}.${inTextExt}" "${varwgtconfigdir}/${varwgtmap}.${inTextExt}" "${plotconfigdir}/${plot}.${inTextExt}" "${miscconfigdir}/${misc}.${inTextExt}" "${MainEra}" "${outfile}" "${outdir}/${label}"
done
done
## Final message
echo "Finished Making2DPlots"
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.