text stringlengths 1 1.05M |
|---|
<reponame>jjhlzn/ContractApp_Android
package com.jinjunhang.contract.controller;
import android.support.v4.app.Fragment;
/**
* Created by lzn on 16/4/6.
*/
public class ApprovalListActivity extends SingleFragmentActivity {
@Override
protected String getActivityTitle() {
return "审批列表";
}
@Override
protected boolean isNeedPushDownFresh() {
return true;
}
@Override
protected Fragment createFragment() {
return new ApprovalListFragment();
}
}
|
<gh_stars>0
'use strict';
/**
* A dialog for editing publishing settings for Content nodes
*
* @memberof HashBrown.Client.EntityView.Modal
*/
class ContentPublishingSettings extends HashBrown.Entity.View.Modal.ModalBase {
/**
* Constructor
*/
constructor(params) {
super(params);
this.template = require('template/modal/contentPublishingSettings');
}
/**
* Fetches the model
*/
async fetch() {
if(!this.model.settings) { this.model.settings = {}; }
if(!this.model.settings.publishing) { this.model.settings.publishing = {}; }
this.state.value = await this.model.getSettings('publishing') || {};
this.state.title = this.model.prop('title', HashBrown.Context.language) || this.model.id;
if(this.state.value.governedBy) {
let content = await HashBrown.Service.ContentService.getContentById(this.state.value.governedBy);
throw new Error(`(Settings inherited from <a href="#/content/${content.id}">${content.prop('title', HashBrown.Context.language) || content.id}</a>)`);
}
let connections = await HashBrown.Service.ConnectionService.getAllConnections();
this.state.connections = {};
for(let connection of connections) {
this.state.connections[connection.title] = connection.id;
}
}
/**
* Event: Change apply to children checkbox
*/
onToggleApplyToChildren(apply) {
this.model.settings.publishing.applyToChildren = apply;
}
/**
* Event: Change connection
*/
onChangeConnection(connectionId) {
this.model.settings.publishing.connectionId = connectionId;
}
/**
* Event: Click OK
*/
async onClickOK() {
try {
await HashBrown.Service.ResourceService.set('content', this.model.id, this.model);
HashBrown.Service.EventService.trigger('settings', this.model.id);
this.close();
} catch(e) {
this.setErrorState(e);
}
}
}
module.exports = ContentPublishingSettings;
|
#!/bin/bash
# This file is accessible as https://install.direct/go.sh
# Original source is located at github.com/v2ray/v2ray-core/release/install-release.sh
# If not specify, default meaning of return value:
# 0: Success
# 1: System error
# 2: Application error
# 3: Network error
# CLI arguments
PROXY=''
HELP=''
FORCE=''
CHECK=''
REMOVE=''
VERSION=''
VSRC_ROOT='/tmp/v2ray'
EXTRACT_ONLY=''
LOCAL=''
LOCAL_INSTALL=''
DIST_SRC='github'
ERROR_IF_UPTODATE=''
CUR_VER=""
NEW_VER=""
VDIS=''
ZIPFILE="/tmp/v2ray/v2ray.zip"
V2RAY_RUNNING=0
CMD_INSTALL=""
CMD_UPDATE=""
SOFTWARE_UPDATED=0
SYSTEMCTL_CMD=$(command -v systemctl 2>/dev/null)
SERVICE_CMD=$(command -v service 2>/dev/null)
#######color code########
RED="31m" # Error message
GREEN="32m" # Success message
YELLOW="33m" # Warning message
BLUE="36m" # Info message
#########################
while [[ $# > 0 ]]; do
case "$1" in
-p|--proxy)
PROXY="-x ${2}"
shift # past argument
;;
-h|--help)
HELP="1"
;;
-f|--force)
FORCE="1"
;;
-c|--check)
CHECK="1"
;;
--remove)
REMOVE="1"
;;
--version)
VERSION="$2"
shift
;;
--extract)
VSRC_ROOT="$2"
shift
;;
--extractonly)
EXTRACT_ONLY="1"
;;
-l|--local)
LOCAL="$2"
LOCAL_INSTALL="1"
shift
;;
--source)
DIST_SRC="$2"
shift
;;
--errifuptodate)
ERROR_IF_UPTODATE="1"
;;
*)
# unknown option
;;
esac
shift # past argument or value
done
###############################
colorEcho(){
echo -e "\033[${1}${@:2}\033[0m" 1>& 2
}
archAffix(){
case "${1:-"$(uname -m)"}" in
i686|i386)
echo '32'
;;
x86_64|amd64)
echo '64'
;;
*armv7*|armv6l)
echo 'arm'
;;
*armv8*|aarch64)
echo 'arm64'
;;
*mips64le*)
echo 'mips64le'
;;
*mips64*)
echo 'mips64'
;;
*mipsle*)
echo 'mipsle'
;;
*mips*)
echo 'mips'
;;
*s390x*)
echo 's390x'
;;
ppc64le)
echo 'ppc64le'
;;
ppc64)
echo 'ppc64'
;;
*)
return 1
;;
esac
return 0
}
zipRoot() {
unzip -lqq "$1" | awk -e '
NR == 1 {
prefix = $4;
}
NR != 1 {
prefix_len = length(prefix);
cur_len = length($4);
for (len = prefix_len < cur_len ? prefix_len : cur_len; len >= 1; len -= 1) {
sub_prefix = substr(prefix, 1, len);
sub_cur = substr($4, 1, len);
if (sub_prefix == sub_cur) {
prefix = sub_prefix;
break;
}
}
if (len == 0) {
prefix = "";
nextfile;
}
}
END {
print prefix;
}
'
}
downloadV2Ray(){
rm -rf /tmp/v2ray
mkdir -p /tmp/v2ray
if [[ "${DIST_SRC}" == "jsdelivr" ]]; then
DOWNLOAD_LINK="https://cdn.jsdelivr.net/gh/v2ray/dist/v2ray-linux-${VDIS}.zip"
else
DOWNLOAD_LINK="https://github.com/v2ray/v2ray-core/releases/download/${NEW_VER}/v2ray-linux-${VDIS}.zip"
fi
colorEcho ${BLUE} "Downloading V2Ray: ${DOWNLOAD_LINK}"
curl ${PROXY} -L -H "Cache-Control: no-cache" -o ${ZIPFILE} ${DOWNLOAD_LINK}
if [ $? != 0 ];then
colorEcho ${RED} "Failed to download! Please check your network or try again."
return 3
fi
return 0
}
installSoftware(){
COMPONENT=$1
if [[ -n `command -v $COMPONENT` ]]; then
return 0
fi
getPMT
if [[ $? -eq 1 ]]; then
colorEcho ${RED} "The system package manager tool isn't APT or YUM, please install ${COMPONENT} manually."
return 1
fi
if [[ $SOFTWARE_UPDATED -eq 0 ]]; then
colorEcho ${BLUE} "Updating software repo"
$CMD_UPDATE
SOFTWARE_UPDATED=1
fi
colorEcho ${BLUE} "Installing ${COMPONENT}"
$CMD_INSTALL $COMPONENT
if [[ $? -ne 0 ]]; then
colorEcho ${RED} "Failed to install ${COMPONENT}. Please install it manually."
return 1
fi
return 0
}
# return 1: not apt, yum, or zypper
getPMT(){
if [[ -n `command -v apt-get` ]];then
CMD_INSTALL="apt-get -y -qq install"
CMD_UPDATE="apt-get -qq update"
elif [[ -n `command -v yum` ]]; then
CMD_INSTALL="yum -y -q install"
CMD_UPDATE="yum -q makecache"
elif [[ -n `command -v zypper` ]]; then
CMD_INSTALL="zypper -y install"
CMD_UPDATE="zypper ref"
else
return 1
fi
return 0
}
normalizeVersion() {
if [ -n "$1" ]; then
case "$1" in
v*)
echo "$1"
;;
*)
echo "v$1"
;;
esac
else
echo ""
fi
}
# 1: new V2Ray. 0: no. 2: not installed. 3: check failed. 4: don't check.
getVersion(){
if [[ -n "$VERSION" ]]; then
NEW_VER="$(normalizeVersion "$VERSION")"
return 4
else
VER="$(/usr/bin/v2ray/v2ray -version 2>/dev/null)"
RETVAL=$?
CUR_VER="$(normalizeVersion "$(echo "$VER" | head -n 1 | cut -d " " -f2)")"
TAG_URL="https://api.github.com/repos/v2ray/v2ray-core/releases/latest"
NEW_VER="$(normalizeVersion "$(curl ${PROXY} -H "Accept: application/json" -H "User-Agent: Mozilla/5.0 (X11; Linux x86_64; rv:74.0) Gecko/20100101 Firefox/74.0" -s "${TAG_URL}" --connect-timeout 10| grep 'tag_name' | cut -d\" -f4)")"
if [[ $? -ne 0 ]] || [[ $NEW_VER == "" ]]; then
colorEcho ${RED} "Failed to fetch release information. Please check your network or try again."
return 3
elif [[ $RETVAL -ne 0 ]];then
return 2
elif [[ $NEW_VER != $CUR_VER ]];then
return 1
fi
return 0
fi
}
stopV2ray(){
colorEcho ${BLUE} "Shutting down V2Ray service."
if [[ -n "${SYSTEMCTL_CMD}" ]] || [[ -f "/lib/systemd/system/v2ray.service" ]] || [[ -f "/etc/systemd/system/v2ray.service" ]]; then
${SYSTEMCTL_CMD} stop v2ray
elif [[ -n "${SERVICE_CMD}" ]] || [[ -f "/etc/init.d/v2ray" ]]; then
${SERVICE_CMD} v2ray stop
fi
if [[ $? -ne 0 ]]; then
colorEcho ${YELLOW} "Failed to shutdown V2Ray service."
return 2
fi
return 0
}
startV2ray(){
if [ -n "${SYSTEMCTL_CMD}" ] && [[ -f "/lib/systemd/system/v2ray.service" || -f "/etc/systemd/system/v2ray.service" ]]; then
${SYSTEMCTL_CMD} start v2ray
elif [ -n "${SERVICE_CMD}" ] && [ -f "/etc/init.d/v2ray" ]; then
${SERVICE_CMD} v2ray start
fi
if [[ $? -ne 0 ]]; then
colorEcho ${YELLOW} "Failed to start V2Ray service."
return 2
fi
return 0
}
installV2Ray(){
# Install V2Ray binary to /usr/bin/v2ray
mkdir -p '/etc/v2ray' '/var/log/v2ray' && \
unzip -oj "$1" "$2v2ray" "$2v2ctl" "$2geoip.dat" "$2geosite.dat" -d '/usr/bin/v2ray' && \
chmod +x '/usr/bin/v2ray/v2ray' '/usr/bin/v2ray/v2ctl' || {
colorEcho ${RED} "Failed to copy V2Ray binary and resources."
return 1
}
# Install V2Ray server config to /etc/v2ray
if [ ! -f '/etc/v2ray/config.json' ]; then
local PORT="$(($RANDOM + 10000))"
local UUID="$(cat '/proc/sys/kernel/random/uuid')"
unzip -pq "$1" "$2vpoint_vmess_freedom.json" | \
sed -e "s/10086/${PORT}/g; s/23ad6b10-8d1a-40f7-8ad0-e3e35cd38297/${UUID}/g;" - > \
'/etc/v2ray/config.json' || {
colorEcho ${YELLOW} "Failed to create V2Ray configuration file. Please create it manually."
return 1
}
colorEcho ${BLUE} "PORT:${PORT}"
colorEcho ${BLUE} "UUID:${UUID}"
fi
}
installInitScript(){
if [[ -n "${SYSTEMCTL_CMD}" ]]; then
if [[ ! -f "/etc/systemd/system/v2ray.service" && ! -f "/lib/systemd/system/v2ray.service" ]]; then
unzip -oj "$1" "$2systemd/v2ray.service" -d '/etc/systemd/system' && \
systemctl enable v2ray.service
fi
elif [[ -n "${SERVICE_CMD}" ]] && [[ ! -f "/etc/init.d/v2ray" ]]; then
installSoftware 'daemon' && \
unzip -oj "$1" "$2systemv/v2ray" -d '/etc/init.d' && \
chmod +x '/etc/init.d/v2ray' && \
update-rc.d v2ray defaults
fi
}
Help(){
cat - 1>& 2 << EOF
./install-release.sh [-h] [-c] [--remove] [-p proxy] [-f] [--version vx.y.z] [-l file]
-h, --help Show help
-p, --proxy To download through a proxy server, use -p socks5://127.0.0.1:1080 or -p http://127.0.0.1:3128 etc
-f, --force Force install
--version Install a particular version, use --version v3.15
-l, --local Install from a local file
--remove Remove installed V2Ray
-c, --check Check for update
EOF
}
remove(){
if [[ -n "${SYSTEMCTL_CMD}" ]] && [[ -f "/etc/systemd/system/v2ray.service" ]];then
if pgrep "v2ray" > /dev/null ; then
stopV2ray
fi
systemctl disable v2ray.service
rm -rf "/usr/bin/v2ray" "/etc/systemd/system/v2ray.service"
if [[ $? -ne 0 ]]; then
colorEcho ${RED} "Failed to remove V2Ray."
return 0
else
colorEcho ${GREEN} "Removed V2Ray successfully."
colorEcho ${BLUE} "If necessary, please remove configuration file and log file manually."
return 0
fi
elif [[ -n "${SYSTEMCTL_CMD}" ]] && [[ -f "/lib/systemd/system/v2ray.service" ]];then
if pgrep "v2ray" > /dev/null ; then
stopV2ray
fi
systemctl disable v2ray.service
rm -rf "/usr/bin/v2ray" "/lib/systemd/system/v2ray.service"
if [[ $? -ne 0 ]]; then
colorEcho ${RED} "Failed to remove V2Ray."
return 0
else
colorEcho ${GREEN} "Removed V2Ray successfully."
colorEcho ${BLUE} "If necessary, please remove configuration file and log file manually."
return 0
fi
elif [[ -n "${SERVICE_CMD}" ]] && [[ -f "/etc/init.d/v2ray" ]]; then
if pgrep "v2ray" > /dev/null ; then
stopV2ray
fi
rm -rf "/usr/bin/v2ray" "/etc/init.d/v2ray"
if [[ $? -ne 0 ]]; then
colorEcho ${RED} "Failed to remove V2Ray."
return 0
else
colorEcho ${GREEN} "Removed V2Ray successfully."
colorEcho ${BLUE} "If necessary, please remove configuration file and log file manually."
return 0
fi
else
colorEcho ${YELLOW} "V2Ray not found."
return 0
fi
}
checkUpdate(){
echo "Checking for update."
VERSION=""
getVersion
RETVAL="$?"
if [[ $RETVAL -eq 1 ]]; then
colorEcho ${BLUE} "Found new version ${NEW_VER} for V2Ray.(Current version:$CUR_VER)"
elif [[ $RETVAL -eq 0 ]]; then
colorEcho ${BLUE} "No new version. Current version is ${NEW_VER}."
elif [[ $RETVAL -eq 2 ]]; then
colorEcho ${YELLOW} "No V2Ray installed."
colorEcho ${BLUE} "The newest version for V2Ray is ${NEW_VER}."
fi
return 0
}
main(){
#helping information
[[ "$HELP" == "1" ]] && Help && return
[[ "$CHECK" == "1" ]] && checkUpdate && return
[[ "$REMOVE" == "1" ]] && remove && return
local ARCH=$(uname -m)
VDIS="$(archAffix)"
# extract local file
if [[ $LOCAL_INSTALL -eq 1 ]]; then
colorEcho ${YELLOW} "Installing V2Ray via local file. Please make sure the file is a valid V2Ray package, as we are not able to determine that."
NEW_VER=local
rm -rf /tmp/v2ray
ZIPFILE="$LOCAL"
#FILEVDIS=`ls /tmp/v2ray |grep v2ray-v |cut -d "-" -f4`
#SYSTEM=`ls /tmp/v2ray |grep v2ray-v |cut -d "-" -f3`
#if [[ ${SYSTEM} != "linux" ]]; then
# colorEcho ${RED} "The local V2Ray can not be installed in linux."
# return 1
#elif [[ ${FILEVDIS} != ${VDIS} ]]; then
# colorEcho ${RED} "The local V2Ray can not be installed in ${ARCH} system."
# return 1
#else
# NEW_VER=`ls /tmp/v2ray |grep v2ray-v |cut -d "-" -f2`
#fi
else
# download via network and extract
installSoftware "curl" || return $?
getVersion
RETVAL="$?"
if [[ $RETVAL == 0 ]] && [[ "$FORCE" != "1" ]]; then
colorEcho ${BLUE} "Latest version ${CUR_VER} is already installed."
if [ -n "${ERROR_IF_UPTODATE}" ]; then
return 10
fi
return
elif [[ $RETVAL == 3 ]]; then
return 3
else
colorEcho ${BLUE} "Installing V2Ray ${NEW_VER} on ${ARCH}"
downloadV2Ray || return $?
fi
fi
local ZIPROOT="$(zipRoot "${ZIPFILE}")"
installSoftware unzip || return $?
if [ -n "${EXTRACT_ONLY}" ]; then
colorEcho ${BLUE} "Extracting V2Ray package to ${VSRC_ROOT}."
if unzip -o "${ZIPFILE}" -d ${VSRC_ROOT}; then
colorEcho ${GREEN} "V2Ray extracted to ${VSRC_ROOT%/}${ZIPROOT:+/${ZIPROOT%/}}, and exiting..."
return 0
else
colorEcho ${RED} "Failed to extract V2Ray."
return 2
fi
fi
if pgrep "v2ray" > /dev/null ; then
V2RAY_RUNNING=1
stopV2ray
fi
installV2Ray "${ZIPFILE}" "${ZIPROOT}" || return $?
installInitScript "${ZIPFILE}" "${ZIPROOT}" || return $?
if [[ ${V2RAY_RUNNING} -eq 1 ]];then
colorEcho ${BLUE} "Restarting V2Ray service."
startV2ray
fi
colorEcho ${GREEN} "V2Ray ${NEW_VER} is installed."
rm -rf /tmp/v2ray
return 0
}
main |
#!/bin/bash
for script in scripts/*.sql; do
echo Executing $script...
mysql --login-path=migrate < $script
done
|
import time
class OperationPoller:
def __init__(self, logger, operation_polling_time_sleep_secs=5):
self.logger = logger
self.operation_polling_time_sleep_secs = operation_polling_time_sleep_secs
def poll_operation_status(self, operation, max_timeout_mins):
operation_status = operation['status']
self.logger.debug('Beginning to poll for operation')
start_time = time.time()
while operation_status != 'DONE' and time.time() - start_time < max_timeout_mins * 60:
self.logger.debug(f'Sleeping for {self.operation_polling_time_sleep_secs} secs before polling')
time.sleep(self.operation_polling_time_sleep_secs)
# Update operation_status from the actual operation source
operation_status = self.get_actual_operation_status(operation['selfLink'])
if operation_status == 'DONE':
self.logger.debug('Operation status changed to DONE')
else:
self.logger.debug('Timeout reached before operation status changed to DONE')
def get_actual_operation_status(self, operation_self_link):
# Implement logic to fetch the actual operation status from the source using operation_self_link
# Return the actual operation status
pass |
<reponame>JeremyBYU/AsyncAstar
import * as fs from 'fs';
import { join } from 'path';
import * as Benchmark from 'benchmark';
import ndarray from 'ndarray';
import * as PNGJS from 'pngjs';
import AsyncAstar, {
AsyncAstarResult,
AsyncAstarStatus
} from '../lib/asyncastar';
import { copyNdarray, createPlanner, NodeData } from '../lib/util';
import { MAZE_BENCHMARKS, MAZES } from './fixtures/data';
import { saveImage } from './helper';
// import { performance } from 'perf_hooks';
// Require imports necessitate this
const PNG = PNGJS.default.PNG;
const tempPNG = 'tmp.png';
const FIXTURE_FOLDER = 'src/tests/fixtures';
// Import the maze data (read PNG images)
const MAZE_DATA = MAZES.map(maze2 => {
const fname = join(FIXTURE_FOLDER, maze2.file || maze2.name);
// Reading raw data
if (maze2.data) {
return { ...maze2, fname };
}
// Reading an actual picture (PNG)
const buf = fs.readFileSync(fname);
const img = PNG.sync.read(buf);
const data = ndarray(
new Uint8Array(img.data),
[img.width | 0, img.height | 0, 4],
[4, (4 * img.width) | 0, 1],
0
);
return { ...maze2, data, fname };
});
const planners = {
AsyncAstar(mazeTest) {
const mInfo = MAZE_DATA.find(mzData => mazeTest.maze === mzData.name);
const maze1 = mInfo.data;
const planner1 = createPlanner(
maze1,
mazeTest.start,
mazeTest.goal,
mazeTest.allowDiag,
mazeTest.heuristic
);
return {planner: planner1, mazeInfo: mInfo, maze: maze1};
}
};
// const test = MAZE_BENCHMARKS[0];
// const {planner, mazeInfo, maze} = planners.AsyncAstar(test);
// const result = planner.searchAsync();
// const pathData = result.path.map(node => [
// node.data.x,
// node.data.y,
// node.data.z
// ]);
// saveImage(
// maze,
// pathData,
// mazeInfo.fname.slice(0, -4) + `_solved.png`,
// planner,
// mazeInfo.is3D
// );
// console.log(result)
const suite = new Benchmark.Suite('Small Obstacle');
suite
.add('AsyncAstar', () => {
// console.log('run')
const mazeTest = MAZE_BENCHMARKS[0];
const {planner, mazeInfo, maze} = planners.AsyncAstar(mazeTest);
const result = planner.searchAsync();
// console.log('run finished')
}, {
onCycle() {
// console.log('bench cycle')
}
})
// add listeners
.on('cycle', event => {
// console.log(event)
const name = event.target.name
const meanTime = event.target.stats.mean.toFixed(3)
const rme = event.target.stats.rme.toFixed(2)
console.log(`${name} x${meanTime} sec. ± ${rme}%`)
// console.log(String(event.target));
})
.on('complete', function() {
console.log('Fastest is ' + this.filter('fastest').map('name'));
})
.on('start', () => {
console.log('Starting Small Obstacle Suite')
})
.run({ async: false });
|
def filter_length_5(string_list):
"""
Take in a list of strings and returns strings with 5 characters
"""
# Create empty list
filtered_list= []
# Iterate over the list
for string in string_list:
# Check length of string
if len(string) == 5:
# Add to list if length is 5
filtered_list.append(string)
# Return filtered list
return filtered_list
if __name__ == '__main__':
res = filter_length_5(my_list)
print(res) |
<filename>src/http/index.js
import axios from 'axios';
import Vue from 'vue';
import router from '../router';
const vue = new Vue({
router
});
axios.defaults.baseURL = 'http://api.zhubaba.cn/';
// axios.defaults.baseURL = 'http://172.16.17.32:8202/';
// axios.defaults.baseURL = 'http://172.16.17.32:8203/';
// axios.defaults.baseURL = 'http://localhost:8202/';
// axios.defaults.baseURL = 'http://localhost:8807/';
// axios.defaults.baseURL = 'http://localhost:8200/railnet/manager/wms';
// 自定义判断元素类型JS
function toType(obj) {
return ({}).toString.call(obj).match(/\s([a-zA-Z]+)/)[1].toLowerCase()
}
// 参数过滤函数
function filterNull(o) {
for (var key in o) {
if (o[key] === null) {
delete o[key]
}
if (toType(o[key]) === 'string') {
o[key] = o[key].trim()
} else if (toType(o[key]) === 'object') {
o[key] = filterNull(o[key])
} else if (toType(o[key]) === 'array') {
o[key] = filterNull(o[key])
}
}
return o
}
// 接口处理函数
function apiAxios(method, url, contentType, params, success, failure) {
if (params) {
params = filterNull(params)
}
//localStorage.getItem('Authorization')
var Authorization = localStorage.getItem('Authorization');
axios({
method: method,
url: url,
headers: {
'Content-Type': contentType,
'Authorization': Authorization
},
data: method === 'POST' || method === 'PUT' ? params : null,
params: method === 'GET' || method === 'DELETE' ? params : null,
withCredentials: true
}).then(function (res) {
if (res.data.code == '0') {
if (success) {
success(res.data)
}
} else if(res.data.code == 504){
// console.log("--s");
// location.href="http://merchant.zhubaba.cn/login";
location.href="http://localhost:8080/#/login";
} else {
if (failure) {
failure(res.data);
} else {
console.log('error: ', res.data);
}
}
}).catch(function (error) {
console.log("失败", error);
if (error.response) {
// The request was made and the server responded with a status code
// that falls out of the range of 2xx
console.log(error.response.data);
console.log(error.response.status);
console.log(error.response.headers);
// if (error.response.status == 401) {
// vue.$message({
// showClose: true,
// message: '登录过期,请重新登录'
// })
// sessionStorage.removeItem("sessionId");
// vue.$router.replace('/login');
//
// }
// if (error.response.status == 403) {
// vue.$message({
// showClose: true,
// message: '未登录,请登录账号'
// })
//
// vue.$router.replace('/login');
//
// }
} else if (error.request) {
// The request was made but no response was received
// `error.request` is an instance of XMLHttpRequest in the browser and an instance of
// http.ClientRequest in node.js
console.log(error.request);
} else {
// Something happened in setting up the request that triggered an Error
console.log('Error', error.message);
}
console.log(error.config);
return;
})
}
export default {
get: function (url, contentType, params, success, failure) {
return apiAxios('GET', url, contentType, params, success, failure)
},
post: function (url, contentType, params, success, failure) {
return apiAxios('POST', url, contentType, params, success, failure)
},
put: function (url, contentType, params, success, failure) {
return apiAxios('PUT', url, contentType, params, success, failure)
},
delete: function (url, contentType, params, success, failure) {
return apiAxios('DELETE', url, contentType, params, success, failure)
}
}
|
<reponame>ernestoeperez88/filesystem_spec<filename>fsspec/__init__.py
from ._version import get_versions
__version__ = get_versions()["version"]
del get_versions
from .spec import AbstractFileSystem
from .registry import get_filesystem_class, registry, filesystem
from .mapping import FSMap, get_mapper
from .core import open_files, get_fs_token_paths, open
|
#!/usr/bin/env bash
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
HUDI_JAR=`ls $DIR/target/hudi-cli-*-SNAPSHOT.jar | grep -v source | grep -v javadoc`
if [ -z "$HADOOP_CONF_DIR" ]; then
echo "setting hadoop conf dir"
HADOOP_CONF_DIR="/etc/hadoop/conf"
fi
if [ -z "$SPARK_CONF_DIR" ]; then
echo "setting spark conf dir"
SPARK_CONF_DIR="/etc/spark/conf"
fi
if [ -z "$CLIENT_JAR" ]; then
echo "client jar location not set"
fi
echo "java -cp ${HADOOP_CONF_DIR}:${SPARK_CONF_DIR}:$DIR/target/lib/*:$HUDI_JAR:${CLIENT_JAR} -DSPARK_CONF_DIR=${SPARK_CONF_DIR} -DHADOOP_CONF_DIR=${HADOOP_CONF_DIR} org.springframework.shell.Bootstrap"
java -cp ${HADOOP_CONF_DIR}:${SPARK_CONF_DIR}:$DIR/target/lib/*:$HUDI_JAR:${CLIENT_JAR} -DSPARK_CONF_DIR=${SPARK_CONF_DIR} -DHADOOP_CONF_DIR=${HADOOP_CONF_DIR} org.springframework.shell.Bootstrap
|
class Pattern < ActiveRecord::Base
has_many :variants
def self.TYPES
[
'accessory',
'dress',
"men's",
'pants/shorts',
'skirt',
'home',
'swimwear',
'tops',
'undergarments',
'outerwear'
]
end
def is_garment?
['dress', "men's", 'pants/shorts', 'skirt', 'swimwear', 'tops', 'undergarments', 'outerwear'].include?(category)
end
end
|
<gh_stars>0
import {PasswordValidationRule, ValidationRule} from '@dps/mycms-commons/dist/search-commons/model/forms/generic-validator.util';
import {PasswordUtils} from '@dps/mycms-commons/dist/commons/utils/password.utils';
import {ConfigInitializerUtil} from '@dps/mycms-commons/dist/tools/config-initializer.util';
import * as Promise_serial from 'promise-serial';
import {CommonAdminCommand, SimpleConfigFilePathValidationRule} from './common-admin.command';
export class ConfigInitializerCommand extends CommonAdminCommand {
protected configbasepath: string;
protected createValidationRules(): {[key: string]: ValidationRule} {
return {
'newpassword': new PasswordValidationRule(false),
'tokenkey': new PasswordValidationRule(false),
'configbasepath': new SimpleConfigFilePathValidationRule(false)
};
}
protected definePossibleActions(): string[] {
return ['resetServicePasswords',
'resetTokenCookie', 'setTokenCookie'
];
}
protected processCommandArgs(argv: {}): Promise<any> {
this.configbasepath = argv['configbasepath'] || 'config';
const tokenkey = argv['tokenkey'];
const newpassword = argv['<PASSWORD>'];
const action = argv['action'];
switch (action) {
case 'resetServicePasswords':
return Promise.resolve('DONE - resetServicePasswords');
case 'resetTokenCookie':
return this.setTokenCookie(tokenkey, PasswordUtils.createNewDefaultPassword(30));
case 'setTokenCookie':
return this.setTokenCookie(tokenkey, newpassword);
default:
console.error('unknown action:', argv);
return Promise.reject('unknown action');
}
}
protected setTokenCookie(tokenkey: string, newpassword: string): Promise<any> {
if (tokenkey === undefined || tokenkey.length < 8) {
return Promise.reject('valid tokenkey required');
}
if (newpassword === undefined || newpassword.length < 8) {
return Promise.reject('valid newpassword required');
}
const me = this;
const promises = [];
promises.push(function () {
return ConfigInitializerUtil.replaceTokenCookieInFirewallConfig(
me.configbasepath + '/firewall.beta.json',
tokenkey,
newpassword, false);
});
promises.push(function () {
return ConfigInitializerUtil.replaceTokenCookieInFirewallConfig(
me.configbasepath + '/firewall.dev.json',
tokenkey,
newpassword, false);
});
promises.push(function () {
return ConfigInitializerUtil.replaceTokenCookieInFirewallConfig(
me.configbasepath + '/firewall.prod.json',
tokenkey,
newpassword, false);
});
return Promise_serial(promises, {parallelize: 1}).then(() => {
return Promise.resolve('DONE - setTokenCookie');
}).catch(reason => {
return Promise.reject(reason);
});
}
}
|
#psql export to csv && exit 1 "please refine pre_activity.csv"
# "ready to transfer?"
#transfer.sh
|
<reponame>jddixon/xlutil_py<filename>src/extsrc/cFTLogForPy.h
/* cFTLogForPy.h */
#ifndef _C_FT_LOG_FOR_PY_H_
#define _C_FT_LOG_FOR_PY_H_
// we need something like -I /usr/include/python2.7 on the command line
// if this isn't first, expect _POSIX_C_SOURCE redefined warnings
#include <Python.h>
/* pthread specs require that this be #included first */
#include <pthread.h>
#include <structmember.h>
#include <errno.h>
#include <ev.h>
#include <fcntl.h>
#include <stdbool.h>
#include <stdint.h>
#include <stdio.h>
#include <stdlib.h> // calloc and such
#include <string.h>
#include <sys/types.h>
#include <sys/stat.h>
#include <unistd.h>
/* maximum number of open log files */
#define CLOG_MAX_LOG (16)
// how often we write the log to disk, in seconds
#define WRITE_INTERVAL (0.1)
#define LOG_BUFFER_SIZE (4*4096)
#define ACTIVE_BUF (1)
#define FULL_BUF (2)
#define BEING_WRITTEN (3)
#define READY_BUF (4)
#define PADBYTES (1024)
typedef struct _logPage {
unsigned char* data;
uint16_t offset; // to first free byte
uint16_t pageBytes; // K * LOG_PAGE_SIZE
uint16_t flags;
} logBufDesc_t;
/*
*/
#define PATH_SEP '/'
#define MAX_PATH_LEN (256)
#define C_FT_LOG_BUF_COUNT (4)
/*
* This is a data structure allocated for each log in use. The data structure
* must be initialized before use and deallocated on close().
*
* Presumably need to align this
*
*/
typedef struct _c_log_ {
logBufDesc_t logBufDescs[C_FT_LOG_BUF_COUNT];
unsigned char logBuffers[C_FT_LOG_BUF_COUNT * LOG_BUFFER_SIZE];
// GCC insists upon all the parentheses
char logDir [MAX_PATH_LEN+1] __attribute__((aligned(16)));
char logName[MAX_PATH_LEN+1] __attribute__((aligned(16)));
u_int32_t fd __attribute__((aligned(16)));
u_int32_t count; // number of messages written
ev_timer t_watcher; // timed write to disk
// buffer write flags
# define WRITE_PENDING (0x0001)
# define WRITE_IN_PROGRESS (0x0002)
u_int32_t writeFlags;
u_int32_t bufInUse; // which buffer we are using
pthread_mutex_t logBufLock; // = PTHREAD_MUTEX_INITIALIZER;
} cFTLogDesc_t;
// GLOBALS //////////////////////////////////////////////////////////
extern int secondThreadStarted;
extern struct ev_loop* loop;
extern int logNdx; // one less than the number of logs open
extern cFTLogDesc_t* logDescs[CLOG_MAX_LOG];
extern pthread_t writerThread;
// extern int writerReady;
// extern pthread_mutex_t readyLock;
// extern pthread_cond_t readyCond;
// PROTOTYPES ///////////////////////////////////////////////////////
extern void initLogDescs(void);
extern int openLogFile(const char* pathToLog) ;
extern void cLogDealloc(int ndx);
extern int setupLibEvAndCallbacks(int);
extern int initLogBuffers(int);
extern int writerInitThreaded(void);
// MODULE-LEVEL METHODS ////////////////////////////////////
PyObject* init_cft_logger(PyObject* self, PyObject* args);
PyObject* open_cft_log(PyObject* self, PyObject* args);
PyObject* close_cft_logger(PyObject* self, PyObject* args);
PyObject* log_msg(PyObject* self, PyObject* args);
// WRAPPED FUNCTIONS //////////////////////////////////////
int _open_cft_log(const char* pathToLog);
void _log_msg(const int ndx, const char* msg);
#endif /* _C_FT_LOG_FOR_PY_H_ */
|
#!/bin/bash
PREFIX_DIR=$HOME/wonderful/media/ffmpeg_build
BIN_DIR=$HOME/wonderful/media/bin
PATH=$BIN_DIR:$PATH
PKG_CONFIG_PATH=$PREFIX_DIR/lib/pkgconfig:$PKG_CONFIG_PATH
./configure \
--prefix=$PREFIX_DIR \
--bindir=$BIN_DIR \
--enable-static \
--enable-pic
make && make install
|
#!/bin/bash
# Load gcc
GCC_VERSION=gcc-9.2.0
export PATH=/opt/${GCC_VERSION}/bin:$PATH
export LD_LIBRARY_PATH=/opt/${GCC_VERSION}/lib64:$LD_LIBRARY_PATH
set CC=/opt/${GCC_VERSION}/bin/gcc
set GCC=/opt/${GCC_VERSION}/bin/gcc
INSTALL_PREFIX=/opt
# HPC-X v2.6.0
HPCX_VERSION="v2.6.0"
wget http://www.mellanox.com/downloads/hpc/hpc-x/v2.6/hpcx-v2.6.0-gcc-MLNX_OFED_LINUX-5.0-1.0.0.0-ubuntu18.04-x86_64.tbz
tar -xvf hpcx-${HPCX_VERSION}-gcc-MLNX_OFED_LINUX-5.0-1.0.0.0-ubuntu18.04-x86_64.tbz
mv hpcx-${HPCX_VERSION}-gcc-MLNX_OFED_LINUX-5.0-1.0.0.0-ubuntu18.04-x86_64 ${INSTALL_PREFIX}
HPCX_PATH=${INSTALL_PREFIX}/hpcx-${HPCX_VERSION}-gcc-MLNX_OFED_LINUX-5.0-1.0.0.0-ubuntu18.04-x86_64
# MVAPICH2 2.3.3
MV2_VERSION="2.3.3"
wget http://mvapich.cse.ohio-state.edu/download/mvapich/mv2/mvapich2-${MV2_VERSION}.tar.gz
tar -xvf mvapich2-${MV2_VERSION}.tar.gz
cd mvapich2-${MV2_VERSION}
./configure --prefix=${INSTALL_PREFIX}/mvapich2-${MV2_VERSION} --enable-g=none --enable-fast=yes && make -j$(nproc) && make install
cd ..
# Intel MPI 2019 (update 7)
IMPI_2019_VERSION="2019.7.217"
wget http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/16546/l_mpi_${IMPI_2019_VERSION}.tgz
tar -xvf l_mpi_${IMPI_2019_VERSION}.tgz
cd l_mpi_${IMPI_2019_VERSION}
sed -i -e 's/ACCEPT_EULA=decline/ACCEPT_EULA=accept/g' silent.cfg
./install.sh --silent ./silent.cfg
cd ..
# Module Files
MODULE_FILES_DIRECTORY=/usr/share/modules/modulefiles/mpi
mkdir -p ${MODULE_FILES_DIRECTORY}
# HPC-X
cat << EOF >> ${MODULE_FILES_DIRECTORY}/hpcx-${HPCX_VERSION}
#%Module 1.0
#
# HPCx ${HPCX_VERSION}
#
conflict mpi
module load ${HPCX_PATH}/modulefiles/hpcx
EOF
# MVAPICH2
cat << EOF >> ${MODULE_FILES_DIRECTORY}/mvapich2-${MV2_VERSION}
#%Module 1.0
#
# MVAPICH2 ${MV2_VERSION}
#
conflict mpi
module load ${GCC_VERSION}
prepend-path PATH /opt/mvapich2-${MV2_VERSION}/bin
prepend-path LD_LIBRARY_PATH /opt/mvapich2-${MV2_VERSION}/lib
prepend-path MANPATH /opt/mvapich2-${MV2_VERSION}/share/man
setenv MPI_BIN /opt/mvapich2-${MV2_VERSION}/bin
setenv MPI_INCLUDE /opt/mvapich2-${MV2_VERSION}/include
setenv MPI_LIB /opt/mvapich2-${MV2_VERSION}/lib
setenv MPI_MAN /opt/mvapich2-${MV2_VERSION}/share/man
setenv MPI_HOME /opt/mvapich2-${MV2_VERSION}
EOF
# Intel 2019
cat << EOF >> ${MODULE_FILES_DIRECTORY}/impi_${IMPI_2019_VERSION}
#%Module 1.0
#
# Intel MPI ${IMPI_2019_VERSION}
#
conflict mpi
module load /opt/intel/impi/${IMPI_2019_VERSION}/intel64/modulefiles/mpi
setenv MPI_BIN /opt/intel/impi/${IMPI_2019_VERSION}/intel64/bin
setenv MPI_INCLUDE /opt/intel/impi/${IMPI_2019_VERSION}/intel64/include
setenv MPI_LIB /opt/intel/impi/${IMPI_2019_VERSION}/intel64/lib
setenv MPI_MAN /opt/intel/impi/${IMPI_2019_VERSION}/man
setenv MPI_HOME /opt/intel/impi/${IMPI_2019_VERSION}/intel64
EOF
# Softlinks
ln -s ${MODULE_FILES_DIRECTORY}/hpcx-${HPCX_VERSION} ${MODULE_FILES_DIRECTORY}/hpcx
ln -s ${MODULE_FILES_DIRECTORY}/mvapich2-${MV2_VERSION} ${MODULE_FILES_DIRECTORY}/mvapich2
ln -s ${MODULE_FILES_DIRECTORY}/impi_${IMPI_2019_VERSION} ${MODULE_FILES_DIRECTORY}/impi-2019
|
#!/bin/bash
trizen -Syu --needed clang clang-tools-extra cmake rsync gdb gcc \
ccls fzf neovim python-neovim python-pylint ripgrep \
tig python-virtualenv python-pipenv python36 subversion \
vagrant virtualbox virtualbox-host-modules-arch linux-headers \
bloop gulp sbt pass ruby-sass watchman
# Jabba for java
curl -sL https://github.com/shyiko/jabba/raw/master/install.sh | bash && . ~/.jabba/jabba.sh
jabba install openjdk@1.14.0-1
# nvm for node
#curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.35.2/install.sh | bash
#nvm install 12
#nvm use 12
# Rustup for rust
curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh
curl -Lo cs https://git.io/coursier-cli-linux && chmod +x cs && mv cs ~/gbin/ && ~/gbin/cs --help
#/home/lakin/gbin/cs bootstrap --java-opt -Xss4m --java-opt -Xms100m --java-opt -Dmetals.client=vim-lsc org.scalameta:metals_2.12:0.8.1 -r bintray:scalacenter/releases -r sonatype:snapshots -o /home/lakin/gbin/metals-vim -f
|
<reponame>jrcamelo/MySNiPs
require "net/http"
require "uri"
require "json"
require "base64"
# Documentação da API
# https://github.com/ricardoebbers/MySNiPs/wiki/Documentação-da-API
local = "http://localhost:3000/"
remote = "https://mysnips.herokuapp.com/"
MYSNIPS_URI = local
DONT_POST = false
CSV_TEST_FILE = "0010000001.gnm"
TEST_IDENTIFIER_1 = ARGV[0]
TEST_IDENTIFIER_2 = "003"
def get_response_from uri, request
Net::HTTP.start(uri.hostname, uri.port) do |http|
http.request(request)
end
end
def authenticate(identifier, password)
uri = URI.parse(MYSNIPS_URI + "api/v1/authenticate")
request = Net::HTTP::Post.new(uri)
request.content_type = "application/json"
request.body = JSON.dump("identifier": identifier, "password": password)
http = Net::HTTP.new(uri.host, uri.port)
http.use_ssl = (uri.scheme == "https")
response = http.request(request)
auth_token = eval(response.body)
auth_token
end
def post action, params_hash, auth_token
return if DONT_POST
uri = URI.parse(MYSNIPS_URI + "api/v1/" + action.to_s)
request = Net::HTTP::Post.new(uri)
request.content_type = "application/json"
#request["pp"] = "profile-gc"
request["Authorization"] = auth_token
request.body = JSON.dump(params_hash)
http = Net::HTTP.new(uri.host, uri.port)
http.use_ssl = (uri.scheme == "https")
response = http.request(request)
puts response.code
response.body
end
def get action, auth_token
uri = URI.parse(MYSNIPS_URI + "api/v1/" + action.to_s)
request = Net::HTTP::Get.new(uri)
request.content_type = "application/json"
request["Authorization"] = auth_token
http = Net::HTTP.new(uri.host, uri.port)
http.use_ssl = (uri.scheme == "https")
response = http.request(request)
puts response.code
response.body
end
def upload_data path
file = File.read path
Base64.encode64 file unless file.nil?
end
def run
puts "\n\n\nAUTHENTICATION - Um token de autenticação é retornado caso as credenciais sejam válidas.\n\n"
puts "POST http://localhost:3000/api/v1/authenticate data:{identifier:'001', password:'<PASSWORD>'}\n\n"
auth_token = authenticate("<PASSWORD>", "<PASSWORD>")
puts auth_token
puts "\n\n\nUPLOAD - Envia um csv e cria um novo usuário e um novo genoma, que entrará na fila para processo.\n\n"
puts "POST http://localhost:3000/api/v1/upload data:{identifier:#{TEST_IDENTIFIER_1}, upload_data:'...'} header:{Authorization:[auth_token]}\n\n"
puts post "upload", {identifier: TEST_IDENTIFIER_1, raw_file: upload_data(CSV_TEST_FILE)}, auth_token
puts "\n\n\nGENOMAS - Devolve uma lista de todos os genomas do laboratório.\n\n"
puts "GET http://localhost:3000/api/v1/genomas header:{Authorization:[auth_token]}\n\n"
puts get "genomas", auth_token
#puts "\n\n\nUPLOAD - Identifier só aceita números, mas podem estar como strings também.\n\n"
#puts "POST http://localhost:3000/api/v1/upload data:{identifier:#{TEST_IDENTIFIER_2}, upload_data:'...'} header:{Authorization:[auth_token]}\n\n"
#puts post "upload", {identifier: TEST_IDENTIFIER_2, raw_file: upload_data(CSV_TEST_FILE)}, auth_token
puts "\n\n\nUSER/:identifier - Devolve as informações do usuário com o identifier pedido. GENOMA/:identifier funciona da mesma forma.\n\n"
puts "GET http://localhost:3000/api/v1/user/#{TEST_IDENTIFIER_1} header:{Authorization:[auth_token]}\n\n"
puts get "user/" + TEST_IDENTIFIER_1.to_s, auth_token
puts "\n\n\nGENOMAS/LAST - Devolve as informações do último genoma adicionado. USERS/LAST funciona da mesma forma.\n\n"
puts "GET http://localhost:3000/api/v1/genomas/last header:{Authorization:[auth_token]}\n\n"
puts get "genomas/last", auth_token
end
run
|
<reponame>longtv2222/IGDB
const router = require('express').Router();
const esportController = require('../controller/esportController')
/**
* @swagger
* /esport :
* get:
* tags:
* - "esport"
* summary : Get all esports
* responses :
* 200:
* description : Get all esports succesfully
* 500:
* description : Error occured
*/
router.get("/", esportController.getAllEsport);
/**
* @swagger
* /esport :
* post:
* tags:
* - "esport"
* security:
* - ApiKeyAuth: []
* summary : Insert an esport
* requestBody :
* content:
* application/json:
* schema :
* type : object
* properties :
* league :
* type : string
* example :
* league : "League of Legend Esport"
* responses :
* 200:
* description : Insert esport succesfully
* 500:
* description : Error occured
* 401:
* description : Error occured
* responseBody :
* content :
* application/json:
* schema :
* type : object
* properties :
* message :
* type : string
* example :
* message : "Authentication failed"
*/
router.post("/", esportController.postAEsport);
/**
* @swagger
* /esport/{league} :
* delete:
* tags:
* - "esport"
* security:
* - ApiKeyAuth: []
* summary : Delete an esport
* parameters :
* - name : league
* in : path
* description : the league
* schema :
* type : string
* example : "League of Legend Esport"
* responses :
* 200:
* description : Delete specified esport succesfully
* 500:
* description : Error occured
* 401:
* description : Error occured
* responseBody :
* content :
* application/json:
* schema :
* type : object
* properties :
* message :
* type : string
* example :
* message : "Authentication failed"
*/
router.delete('/:league', esportController.deleteALeague);
module.exports = router; |
#!/usr/bin/env bash
#
# Copyright (c) 2018 The Karmacoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
export LC_ALL=C.UTF-8
TRAVIS_COMMIT_LOG=$(git log --format=fuller -1)
export TRAVIS_COMMIT_LOG
OUTDIR=$BASE_OUTDIR/$TRAVIS_PULL_REQUEST/$TRAVIS_JOB_NUMBER-$HOST
KARMACOIN_CONFIG_ALL="--disable-dependency-tracking --prefix=$TRAVIS_BUILD_DIR/depends/$HOST --bindir=$OUTDIR/bin --libdir=$OUTDIR/lib"
if [ -z "$NO_DEPENDS" ]; then
DOCKER_EXEC ccache --max-size=$CCACHE_SIZE
fi
BEGIN_FOLD autogen
if [ -n "$CONFIG_SHELL" ]; then
DOCKER_EXEC "$CONFIG_SHELL" -c "./autogen.sh"
else
DOCKER_EXEC ./autogen.sh
fi
END_FOLD
mkdir build
cd build || (echo "could not enter build directory"; exit 1)
BEGIN_FOLD configure
DOCKER_EXEC ../configure --cache-file=config.cache $KARMACOIN_CONFIG_ALL $KARMACOIN_CONFIG || ( cat config.log && false)
END_FOLD
BEGIN_FOLD distdir
DOCKER_EXEC make distdir VERSION=$HOST
END_FOLD
cd "karmacoin-$HOST" || (echo "could not enter distdir karmacoin-$HOST"; exit 1)
BEGIN_FOLD configure
DOCKER_EXEC ./configure --cache-file=../config.cache $KARMACOIN_CONFIG_ALL $KARMACOIN_CONFIG || ( cat config.log && false)
END_FOLD
set -o errtrace
trap 'DOCKER_EXEC "cat ${TRAVIS_BUILD_DIR}/sanitizer-output/* 2> /dev/null"' ERR
BEGIN_FOLD build
DOCKER_EXEC make $MAKEJOBS $GOAL || ( echo "Build failure. Verbose build follows." && DOCKER_EXEC make $GOAL V=1 ; false )
END_FOLD
cd ${TRAVIS_BUILD_DIR} || (echo "could not enter travis build dir $TRAVIS_BUILD_DIR"; exit 1)
|
export {wordsToDecimal, wordsToFraction} from './src/main'; |
#!/bin/bash
echo ""
echo "Applying migration VATDueToHMRC"
echo "Adding routes to conf/app.routes"
echo "" >> ../conf/app.routes
echo "GET /vATDueToHMRC controllers.VATDueToHMRCController.onPageLoad(mode: Mode = NormalMode)" >> ../conf/app.routes
echo "POST /vATDueToHMRC controllers.VATDueToHMRCController.onSubmit(mode: Mode = NormalMode)" >> ../conf/app.routes
echo "GET /changeVATDueToHMRC controllers.VATDueToHMRCController.onPageLoad(mode: Mode = CheckMode)" >> ../conf/app.routes
echo "POST /changeVATDueToHMRC controllers.VATDueToHMRCController.onSubmit(mode: Mode = CheckMode)" >> ../conf/app.routes
echo "Adding messages to conf.messages"
echo "" >> ../conf/messages.en
echo "vATDueToHMRC.title = vATDueToHMRC" >> ../conf/messages.en
echo "vATDueToHMRC.heading = vATDueToHMRC" >> ../conf/messages.en
echo "vATDueToHMRC.checkYourAnswersLabel = vATDueToHMRC" >> ../conf/messages.en
echo "vATDueToHMRC.error.required = Enter vATDueToHMRC" >> ../conf/messages.en
echo "vATDueToHMRC.error.length = VATDueToHMRC must be 14 characters or less" >> ../conf/messages.en
echo "Adding to UserAnswersEntryGenerators"
awk '/trait UserAnswersEntryGenerators/ {\
print;\
print "";\
print " implicit lazy val arbitraryVATDueToHMRCUserAnswersEntry: Arbitrary[(VATDueToHMRCPage.type, JsValue)] =";\
print " Arbitrary {";\
print " for {";\
print " page <- arbitrary[VATDueToHMRCPage.type]";\
print " value <- arbitrary[String].suchThat(_.nonEmpty).map(Json.toJson(_))";\
print " } yield (page, value)";\
print " }";\
next }1' ../test/generators/UserAnswersEntryGenerators.scala > tmp && mv tmp ../test/generators/UserAnswersEntryGenerators.scala
echo "Adding to PageGenerators"
awk '/trait PageGenerators/ {\
print;\
print "";\
print " implicit lazy val arbitraryVATDueToHMRCPage: Arbitrary[VATDueToHMRCPage.type] =";\
print " Arbitrary(VATDueToHMRCPage)";\
next }1' ../test/generators/PageGenerators.scala > tmp && mv tmp ../test/generators/PageGenerators.scala
echo "Adding to UserAnswersGenerator"
awk '/val generators/ {\
print;\
print " arbitrary[(VATDueToHMRCPage.type, JsValue)] ::";\
next }1' ../test/generators/UserAnswersGenerator.scala > tmp && mv tmp ../test/generators/UserAnswersGenerator.scala
echo "Adding helper method to CheckYourAnswersHelper"
awk '/class/ {\
print;\
print "";\
print " def vATDueToHMRC: Option[AnswerRow] = userAnswers.get(VATDueToHMRCPage) map {";\
print " x =>";\
print " AnswerRow(";\
print " HtmlFormat.escape(messages(\"vATDueToHMRC.checkYourAnswersLabel\")),";\
print " HtmlFormat.escape(x),";\
print " routes.VATDueToHMRCController.onPageLoad(CheckMode).url";\
print " )"
print " }";\
next }1' ../app/utils/CheckYourAnswersHelper.scala > tmp && mv tmp ../app/utils/CheckYourAnswersHelper.scala
echo "Migration VATDueToHMRC completed"
|
/*
Siesta 4.2.2
Copyright(c) 2009-2016 Bryntum AB
http://bryntum.com/contact
http://bryntum.com/products/siesta/license
*/
Class('Ariadne.DomQueryFinder.Identifier.NthOfType', {
isa : Ariadne.QueryFinder.Identifier,
methods : {
identify : function (target, root, maze) {
var parentElement = target.parentElement
if (!parentElement) return null
// SVG elements in IE does not have ".children"
var siblings = parentElement.children || parentElement.childNodes
var counter = 0
var tagName = target.tagName.toLowerCase()
for (var i = 0; i < siblings.length; i++) {
if (siblings[ i ].tagName.toLowerCase() == tagName) {
counter++
if (siblings[ i ] == target) break
}
}
return {
query : ':nth-of-type(' + counter + ')',
weight : 1e6
}
}
}
});
|
<filename>web/src/app/workplace/workbasket-selector/workbasket-selector.component.ts
import {Component, EventEmitter, OnInit, Output} from '@angular/core';
import {Task} from 'app/workplace/models/task';
import {Workbasket} from 'app/models/workbasket';
import {TaskService} from 'app/workplace/services/task.service';
import {WorkbasketService} from 'app/workplace/services/workbasket.service';
@Component({
selector: 'taskana-workbasket-selector',
templateUrl: './workbasket-selector.component.html'
})
export class SelectorComponent implements OnInit {
@Output()
tasksChanged = new EventEmitter<Task[]>();
tasks: Task[] = [];
autoCompleteData: string[] = [];
result = '';
resultKey: string;
workbaskets: Workbasket[];
constructor(private taskService: TaskService,
private workbasketService: WorkbasketService) {
}
ngOnInit() {
this.workbasketService.getAllWorkBaskets().subscribe(workbaskets => {
this.workbaskets = workbaskets._embedded ? workbaskets._embedded.workbaskets : [];
this.workbaskets.forEach(workbasket => {
this.autoCompleteData.push(workbasket.name);
});
});
if (this.workbasketService.workbasketKey) {
this.getTasks(this.workbasketService.workbasketKey);
this.result = this.workbasketService.workbasketName;
}
}
searchBasket() {
if (this.workbaskets) {
this.workbaskets.forEach(workbasket => {
if (workbasket.name === this.result) {
this.resultKey = workbasket.workbasketId;
}
});
this.getTasks(this.resultKey);
this.workbasketService.workbasketKey = this.resultKey;
this.workbasketService.workbasketName = this.result;
this.tasksChanged.emit(this.tasks);
}
}
getTasks(workbasketKey: string) {
this.taskService.findTasksWithWorkbasket(workbasketKey).subscribe(
tasks => {
if (!tasks || tasks._embedded === undefined) {
this.tasks.length = 0;
return;
}
tasks._embedded.tasks.forEach(e => this.tasks.push(e));
});
}
}
|
package e120919.Client;
import e120919.PedinaButton.PedinaButton;
import javax.swing.*;
import java.awt.*;
import java.util.ArrayList;
import java.util.Arrays;
class GUI extends JFrame {
private JPanel rootPane;
private JButton startButton;
private JTextField ipField;
private JTextField portField;
private JButton stopButton;
private JPanel centerGrid;
private ArrayList<PedinaButton> pedinaList;
private JButton connectButton;
private JButton disconnectButton;
private JButton clearButton;
private boolean connected = false;
private boolean transmitting = false;
private void createUIComponents() {
centerGrid = new JPanel();
centerGrid.setLayout(new GridLayout(4,4));
pedinaList = new ArrayList<>();
for (int i=0; i<16; i++) {
PedinaButton p = new PedinaButton();
pedinaList.add(p);
centerGrid.add(p);
}
}
GUI() {
super("CLIENT");
Act act = new Act(this);
startButton.addActionListener(act);
stopButton.addActionListener(act);
connectButton.addActionListener(act);
disconnectButton.addActionListener(act);
clearButton.addActionListener(act);
add(rootPane);
setDefaultCloseOperation(EXIT_ON_CLOSE);
pack();
update();
setVisible(true);
}
private void update() {
if (!connected) {
startButton.setEnabled(false);
stopButton.setEnabled(false);
connectButton.setEnabled(true);
disconnectButton.setEnabled(false);
clearButton.setEnabled(true);
} else if (!transmitting){
startButton.setEnabled(true);
stopButton.setEnabled(false);
connectButton.setEnabled(false);
disconnectButton.setEnabled(true);
clearButton.setEnabled(true);
} else {
startButton.setEnabled(false);
stopButton.setEnabled(true);
connectButton.setEnabled(false);
disconnectButton.setEnabled(false);
clearButton.setEnabled(false);
}
}
public void setStatus(boolean connected, boolean transmitting) {
this.connected = connected;
this.transmitting = transmitting;
update();
}
public void setColor(int n, Color c) {
pedinaList.get(n).setBackground(c);
}
String getIP() {
return ipField.getText();
}
int getPort() {
return Integer.parseInt(portField.getText());
}
void declareWinner(Color userColor) {
boolean userWins = checkWinner(userColor), cpuWins = checkWinner((userColor.equals(Color.YELLOW))?Color.CYAN:Color.YELLOW);
if (userWins && !cpuWins) {
JOptionPane.showMessageDialog(this, "Hai vinto!");
} else if (cpuWins && !userWins) {
JOptionPane.showMessageDialog(this, "Hai perso :c");
} else {
JOptionPane.showMessageDialog(this, "Pareggio");
}
}
boolean checkWinner(Color userColor) {
// Forse la funzione piu' brutta che io abbia mai scritto in vita mia
boolean d1 = true;
boolean d2 = true;
for (int i=0; i<4; i++) {
if (pedinaList.get(i).checkColor(userColor)) {
// columns
boolean b = true;
for (int ii=1; ii<4 && b; ii++) {
if (!pedinaList.get(ii*4+i).checkColor(userColor)) {
b = false;
//System.out.println(i +";" + ii*4);
}
}
if (b) return true;
}
if (pedinaList.get(i*4).checkColor(userColor)) {
// rows
boolean b = true;
for (int ii=1; ii<4 && b; ii++) {
if (!pedinaList.get(i*4+ii).checkColor(userColor)) {
b = false;
//System.out.println(ii +";" + i*4);
}
}
if (b) return true;
}
// Diagonals
if (d1 && !pedinaList.get(5*i).checkColor(userColor)) {
d1 = false;
}
if (d2 && !pedinaList.get(3*i).checkColor(userColor)) {
d2 = false;
}
}
return d1||d2;
}
}
|
var expect = chai.expect;
describe("rotationPoint", function() {
it(`should return 1 if given the array ['cool', 'cat']`, function() {
var words = ["cool", "cat"];
var result = rotationPoint(words);
expect(result).to.eql(1);
});
it(`should return 4 if given the array ['great', 'ostrich', 'panther', 'ruby', 'chosen', 'feathers']`, function() {
var words = ["great", "ostrich", "panther", "ruby", "chosen", "feathers"];
var result = rotationPoint(words);
expect(result).to.eql(4);
});
it(`should return 5 if given the array ['problem', 'reactive', 'supper',
'undesirable', 'xebra', 'alpha',
'bank', 'banned', 'endanger',
'fallout', 'yellow']`, function() {
var words = [
"problem",
"reactive",
"supper",
"undesirable",
"xebra",
"alpha",
"bank",
"banned",
"endanger",
"fallout",
"yellow"
];
var result = rotationPoint(words);
expect(result).to.eql(5);
});
it(`should return -1 if given the array ['any', 'body', 'can', 'dance', 'to', 'violas']`, function() {
var words = ["any", "body", "can", "dance", "to", "violas"];
var result = rotationPoint(words);
expect(result).to.eql(-1);
});
});
|
<reponame>ONSdigital/sdc-int-cc-service
package uk.gov.ons.ctp.integration.contactcentresvc.event;
import static org.junit.jupiter.api.Assertions.assertAll;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import java.time.ZoneOffset;
import java.util.UUID;
import javax.persistence.PersistenceException;
import ma.glasnost.orika.MapperFacade;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.ArgumentCaptor;
import org.mockito.Captor;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.Spy;
import org.mockito.junit.jupiter.MockitoExtension;
import uk.gov.ons.ctp.common.FixtureHelper;
import uk.gov.ons.ctp.common.event.model.CollectionExerciseUpdate;
import uk.gov.ons.ctp.common.event.model.CollectionExerciseUpdateEvent;
import uk.gov.ons.ctp.integration.contactcentresvc.CCSvcBeanMapper;
import uk.gov.ons.ctp.integration.contactcentresvc.model.CollectionExercise;
import uk.gov.ons.ctp.integration.contactcentresvc.repository.db.CollectionExerciseRepository;
@ExtendWith(MockitoExtension.class)
public class CollectionExerciseEventReceiverTest {
@Mock private CollectionExerciseRepository repo;
@Spy private MapperFacade mapper = new CCSvcBeanMapper();
@InjectMocks private CollectionExerciseEventReceiver target;
@Captor private ArgumentCaptor<CollectionExercise> collExCaptor;
private CollectionExerciseUpdateEvent event;
@BeforeEach
public void setup() {
event = FixtureHelper.loadPackageFixtures(CollectionExerciseUpdateEvent[].class).get(0);
}
@Test
public void shouldReceiveSurveyUpdateEvent() {
target.acceptEvent(event);
verify(repo).saveAndFlush(collExCaptor.capture());
CollectionExerciseUpdate payload = event.getPayload().getCollectionExerciseUpdate();
CollectionExercise persistedCollEx = collExCaptor.getValue();
verifyMapping(persistedCollEx, payload);
}
@Test
public void shouldRejectFailingSave() {
when(repo.saveAndFlush(any())).thenThrow(PersistenceException.class);
assertThrows(PersistenceException.class, () -> target.acceptEvent(event));
}
private void verifyMapping(
CollectionExercise persistedCollEx, CollectionExerciseUpdate collExPayload) {
assertEquals(UUID.fromString(collExPayload.getSurveyId()), persistedCollEx.getSurvey().getId());
assertEquals(UUID.fromString(collExPayload.getCollectionExerciseId()), persistedCollEx.getId());
assertEquals(collExPayload.getName(), persistedCollEx.getName());
assertEquals(collExPayload.getReference(), persistedCollEx.getReference());
assertEquals(
collExPayload.getStartDate().toInstant(),
persistedCollEx.getStartDate().toInstant(ZoneOffset.UTC));
assertEquals(
collExPayload.getEndDate().toInstant(),
persistedCollEx.getEndDate().toInstant(ZoneOffset.UTC));
var meta = collExPayload.getMetadata();
assertAll(
() -> assertEquals(meta.getNumberOfWaves(), persistedCollEx.getNumberOfWaves()),
() -> assertEquals(meta.getWaveLength(), persistedCollEx.getWaveLength()),
() -> assertEquals(meta.getCohorts(), persistedCollEx.getCohorts()),
() -> assertEquals(meta.getCohortSchedule(), persistedCollEx.getCohortSchedule()));
}
}
|
<filename>src/core/scene/index.ts
export * from './Scene';
export * from './SceneNavigator';
export * from './SceneRouter';
export * from './SceneType';
|
<gh_stars>0
package com.professorvennie.bronzeage.core.network;
import cpw.mods.fml.common.network.simpleimpl.IMessage;
import io.netty.buffer.ByteBuf;
/**
* Created by ProfessorVennie on 12/14/2014 at 7:42 PM.
*/
public class MessageCoords implements IMessage {
public int x;
public int y;
public int z;
public MessageCoords() {
}
public MessageCoords(int x, int y, int z) {
this.x = x;
this.y = y;
this.z = z;
}
@Override
public void fromBytes(ByteBuf buf) {
x = buf.readInt();
y = buf.readInt();
z = buf.readInt();
}
@Override
public void toBytes(ByteBuf buf) {
buf.writeInt(x);
buf.writeInt(y);
buf.writeInt(z);
}
}
|
import numpy as np
def merge_rgb(image1_file, image2_file) -> dict:
cdict1 = load_rgb(image1_file)
cdict2 = load_rgb(image2_file)
merged_dict = {
'red': cdict1['red'],
'green': cdict2['green'],
'blue': cdict2['blue']
}
return merged_dict |
/*
* Copyright (c) 2013 <NAME> <<EMAIL>>
* See the file LICENSE for copying permission.
*/
package net.interdon.domaincheck;
import net.interdon.domaincheck.containers.Domain;
import net.interdon.domaincheck.parsers.IDomainParser;
import net.interdon.domaincheck.parsers.ParserFactory;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Queue;
import java.util.concurrent.ConcurrentLinkedQueue;
public class DomainCheck {
private List<Domain> domainsToCheck;
private Queue<Domain> parsingQueue;
private IServerPool servers;
public DomainCheck(String[] domainsToCheck, IServerPool servers) {
setServerPool(servers);
buildDomainsList(domainsToCheck);
parsingQueue = new ConcurrentLinkedQueue<>();
}
public void setServerPool(IServerPool servers) {
this.servers = servers;
}
private void buildDomainsList(String[] domainsToCheck) {
this.domainsToCheck = new LinkedList<>();
for(String item: domainsToCheck) {
this.domainsToCheck.add(new Domain(item));
}
}
public void start() {
Worker worker = new Worker(domainsToCheck, servers, parsingQueue);
ParserFactory factory = new ParserFactory();
HashMap<String, IDomainParser> parserPool = new HashMap<>();
Domain tmp;
worker.run();
try {
worker.join();
} catch (InterruptedException e) {
worker.interrupt();
}
while(!parsingQueue.isEmpty()) {
tmp = parsingQueue.poll();
if(!parserPool.containsKey(tmp.getTld())) {
parserPool.put(tmp.getTld(), factory.newParser(tmp.getTld()));
}
parserPool.get(tmp.getTld()).parse(tmp);
}
}
public List<Domain> getDomains() {
return domainsToCheck;
}
}
|
package org.softuni.exodia.annotations.validation.composite;
import javax.validation.Constraint;
import javax.validation.Payload;
import javax.validation.constraints.NotNull;
import javax.validation.constraints.Pattern;
import javax.validation.constraints.Size;
import java.lang.annotation.Documented;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
import static java.lang.annotation.ElementType.*;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
@NotNull
@Size(min = 75, max = 75)
@Pattern(regexp = "^\\$argon2d\\$v=19\\$m=65536,t=3,p=4\\$.{45}$")
@Target({METHOD, FIELD, ANNOTATION_TYPE, CONSTRUCTOR, PARAMETER})
@Retention(RUNTIME)
@Constraint(validatedBy = {})
@Documented
public @interface ValidUserHashedPassword {
String message() default "";
Class<?>[] groups() default {};
Class<? extends Payload>[] payload() default {};
}
|
import { ReviewerEntity } from '@pimp-my-pr/server/repository/core/domain';
import { RepositoryStatisticsReadModel } from '../get-repository-statistics/repository-statistics.read-model';
export class ReviewerStatisticsReadModel {
avatarUrl: string;
contributions: number;
id: number;
name: string;
repositories: RepositoryStatisticsReadModel[];
constructor(user: ReviewerEntity, repositories: RepositoryStatisticsReadModel[]) {
this.name = user.name;
this.id = user.id;
this.contributions = user.contributions;
this.avatarUrl = user.avatarUrl;
this.repositories = repositories;
}
}
|
def sum_numbers(n):
result = 0
i = 1
while i <= n:
result += i
i += 1
return result |
#!/bin/bash
# Author: Alex Maimescu
BUILD_OUTPUT="build"
REPORTS_OTPUT="$BUILD_OUTPUT/reports"
COVERAGE_OUTPUT="$REPORTS_OTPUT/coverage"
WORKSPACE="ZappMerchantLib.xcworkspace"
PROJECT="ZappMerchantLib.xcodeproj"
SCHEME="ZappMerchantLibTests"
# Required tools
# - xcodebuild: Xcode command line tool
# - xcpretty: Install ruby gem (sudo gem install xcpretty)
# - slather: Install ruby gem (sudo gem install slather)
REQUIRED_PROGRAMS_IN_PATH=(
"xcodebuild"
"xcpretty"
"slather"
)
function validateTools() {
count=0
while [ "x${REQUIRED_PROGRAMS_IN_PATH[$count]}" != "x" ]
do
program=${REQUIRED_PROGRAMS_IN_PATH[$count]}
hash $program 2>/dev/null
if [ $? -eq 1 ]; then
echo >&2 "ERROR - $program is not installed or not in your PATH"; exit 1;
fi
count=$(( $count + 1 ))
done
}
# Validate toolset
echo "[VALIDATE TOOLSET]"
validateTools
# Run tests
echo "[RUN TESTS]"
xcodebuild test -workspace $WORKSPACE -scheme $SCHEME -configuration "Debug" -sdk "iphonesimulator" -derivedDataPath "$BUILD_OUTPUT" -destination "platform=iOS Simulator,name=iPhone 6s" -enableCodeCoverage YES | xcpretty -c -r html
# Generate detailed code coverage
echo "[GENERATE COVERAGE REPORT]"
slather coverage --build-directory "$BUILD_OUTPUT" -i "Pods/*" -i "ZappMerchantLibTests/*" -i "../*" --output-directory "$COVERAGE_OUTPUT" --html --show "$PROJECT" |
#!/bin/sh
set -e
set -u
set -o pipefail
if [ -z ${FRAMEWORKS_FOLDER_PATH+x} ]; then
# If FRAMEWORKS_FOLDER_PATH is not set, then there's nowhere for us to copy
# frameworks to, so exit 0 (signalling the script phase was successful).
exit 0
fi
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
COCOAPODS_PARALLEL_CODE_SIGN="${COCOAPODS_PARALLEL_CODE_SIGN:-false}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
# Used as a return value for each invocation of `strip_invalid_archs` function.
STRIP_BINARY_RETVAL=0
# This protects against multiple targets copying the same framework dependency at the same time. The solution
# was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html
RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????")
# Copies and strips a vendored framework
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# Use filter instead of exclude so missing patterns don't throw errors.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u && exit ${PIPESTATUS[0]})
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Copies and strips a vendored dSYM
install_dsym() {
local source="$1"
if [ -r "$source" ]; then
# Copy the dSYM into a the targets temp dir.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${DERIVED_FILES_DIR}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${DERIVED_FILES_DIR}"
local basename
basename="$(basename -s .framework.dSYM "$source")"
binary="${DERIVED_FILES_DIR}/${basename}.framework.dSYM/Contents/Resources/DWARF/${basename}"
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"Mach-O dSYM companion"* ]]; then
strip_invalid_archs "$binary"
fi
if [[ $STRIP_BINARY_RETVAL == 1 ]]; then
# Move the stripped file into its final destination.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${DERIVED_FILES_DIR}/${basename}.framework.dSYM\" \"${DWARF_DSYM_FOLDER_PATH}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${DERIVED_FILES_DIR}/${basename}.framework.dSYM" "${DWARF_DSYM_FOLDER_PATH}"
else
# The dSYM was not stripped at all, in this case touch a fake folder so the input/output paths from Xcode do not reexecute this script because the file is missing.
touch "${DWARF_DSYM_FOLDER_PATH}/${basename}.framework.dSYM"
fi
fi
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY}" -a "${CODE_SIGNING_REQUIRED:-}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identitiy
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS:-} --preserve-metadata=identifier,entitlements '$1'"
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
code_sign_cmd="$code_sign_cmd &"
fi
echo "$code_sign_cmd"
eval "$code_sign_cmd"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current target binary
binary_archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | awk '{$1=$1;print}' | rev)"
# Intersect them with the architectures we are building for
intersected_archs="$(echo ${ARCHS[@]} ${binary_archs[@]} | tr ' ' '\n' | sort | uniq -d)"
# If there are no archs supported by this binary then warn the user
if [[ -z "$intersected_archs" ]]; then
echo "warning: [CP] Vendored binary '$binary' contains architectures ($binary_archs) none of which match the current build architectures ($ARCHS)."
STRIP_BINARY_RETVAL=0
return
fi
stripped=""
for arch in $binary_archs; do
if ! [[ "${ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary" || exit 1
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
STRIP_BINARY_RETVAL=1
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/AFNetworking/AFNetworking.framework"
install_framework "${BUILT_PRODUCTS_DIR}/Alamofire/Alamofire.framework"
install_framework "${BUILT_PRODUCTS_DIR}/SwiftyJSON/SwiftyJSON.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/AFNetworking/AFNetworking.framework"
install_framework "${BUILT_PRODUCTS_DIR}/Alamofire/Alamofire.framework"
install_framework "${BUILT_PRODUCTS_DIR}/SwiftyJSON/SwiftyJSON.framework"
fi
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
wait
fi
|
/**
* Copyright (c) 2017, Oracle and/or its affiliates.
* The Universal Permissive License (UPL), Version 1.0
*/
#import <Foundation/Foundation.h>
@interface OMUtilities : NSObject
+ (NSString *)keystoreDirectoryName;
+ (NSString *)localAuthDirectoryName;
+ (NSString *)omaDirectoryPath;
+ (NSString *)secureDirectoryName;
+ (NSString *)filePathForfile:(NSString*)fileName inDirectory:(NSString*)directory
error:(NSError **)error;
+ (NSData *)sendSynchronousRequest:(NSURLRequest *)request
returningResponse:(__autoreleasing NSURLResponse **)responsePtr
error:(__autoreleasing NSError **)errorPtr;
@end
|
#!/usr/bin/env -S bash ../.port_include.sh
port=SDL2_image
useconfigure=true
version=2.0.5
depends="SDL2 libpng libjpeg libtiff"
files="https://www.libsdl.org/projects/SDL_image/release/SDL2_image-${version}.tar.gz SDL_image-${version}.tar.gz bdd5f6e026682f7d7e1be0b6051b209da2f402a2dd8bd1c4bd9c25ad263108d0"
auth_type=sha256
configure() {
run ./configure \
--host="${SERENITY_ARCH}-pc-serenity" \
--with-sdl-prefix="${SERENITY_INSTALL_ROOT}/usr/local" \
--enable-webp=false --enable-webp-shared=false \
LDFLAGS="-lgui -lgfx -lipc -lcore -lm"
}
build() {
run make -k
}
install() {
run make -k DESTDIR="${SERENITY_INSTALL_ROOT}" install
${CC} -shared -o ${SERENITY_INSTALL_ROOT}/usr/local/lib/libSDL2_image.so -Wl,-soname,libSDL2_image.so -Wl,--whole-archive ${SERENITY_INSTALL_ROOT}/usr/local/lib/libSDL2_image.a -Wl,--no-whole-archive -lpng -ljpeg -ltiff
rm -f ${SERENITY_INSTALL_ROOT}/usr/local/lib/libSDL2_image.la
}
|
<reponame>szymon-owczarzak/cogboard
import React, { useMemo } from 'react';
import { useDispatch, useSelector } from 'react-redux';
import { useToggle } from '../hooks';
import { createGetNotification } from '../selectors';
import { deleteNotification } from '../actions/actionCreators';
import Snackbar from '@material-ui/core/Snackbar/index';
import SnackbarVariantContent from './SnackbarVariantContent';
const SnackbarWithVariant = React.memo(
({ notificationId, vertical, horizontal, 'data-cy': dataCy }) => {
const getNotification = useMemo(createGetNotification, []);
const { type, message, action, duration } = useSelector(state =>
getNotification(state, notificationId)
);
const dispatch = useDispatch();
const [isOpened, , closeSnackbar] = useToggle(true);
const handleClose = (event, reason) => {
if (reason !== 'timeout') {
return;
}
closeSnackbar();
};
const handleExited = () => dispatch(deleteNotification(notificationId));
return (
<Snackbar
onClose={handleClose}
onExited={handleExited}
anchorOrigin={{ vertical, horizontal }}
autoHideDuration={duration}
disableWindowBlurListener
open={isOpened}
data-cy={dataCy}
>
<SnackbarVariantContent
variant={type}
message={message}
action={action && action(closeSnackbar)}
/>
</Snackbar>
);
}
);
export default SnackbarWithVariant;
|
<gh_stars>1-10
#if 0
//
// Make this header file available as ESMFVersionDefine.h in order to build
// NEMS against an ESMF installation that contains a reference level NUOPC Layer.
//
#endif
#include "./ESMFConvenienceMacros.h"
|
#!/bin/bash
# usage:
# ./train.sh --rank 0 --seed 1 "BeamRiderNoFrameskip-v4"
# batch training on multiple environments:
# ./train.sh --rank 0 --seed 1 "SeaquestNoFrameskip-v4" \
# "BeamRiderNoFrameskip-v4" \
# "PongNoFrameskip-v4"
rank=0
seed=1
env_ids=('BeamRiderNoFrameskip-v4')
exe=unstable_baselines.d.qrdqn.run
function train() {
# train ${rank} ${seed} ${end_id}
echo "Start training, rank=$1, seed=$2, env_id=$3"
python -m $exe --rank $rank --seed $seed \
--logdir='./log/{env_id}/qrdqn/{rank}' \
--logging='training.log' \
--monitor_dir='monitor' \
--tb_logdir='' \
--model_dir='model' \
--env_id="${env_id}" \
--log_interval=1000 \
--eval_interval=10000 \
--eval_episodes=5 \
--save_interval=10000 \
--num_quantiles=200 \
--num_envs=8 \
--num_epochs=312500 \
--num_steps=4 \
--num_gradsteps=1 \
--batch_size=256 \
--buffer_size=1000000 \
--min_buffer=50000 \
--target_update=625 \
--explore_rate=1.0 \
--explore_final=0.01 \
--explore_progress=0.1 \
--verbose=2 \
--record_video
}
# Formalize arguments
ARGS=`getopt -o r:s: -l rank:,seed: -n "$0" -- "$@"`
if [ $? -ne 0 ]; then
echo "Terminating..." >&2
exit 1
fi
eval set -- "$ARGS"
# Parse arguments
while true; do
case "$1" in
-r|--rank) rank="$2"; shift;;
-s|--seed) seed="$2"; shift;;
--) shift; break;;
*)
echo "Unknown args: $@"
exit 1
esac
shift
done
if [[ $# -gt 0 ]]; then
env_ids=("$@")
fi
# Start training
for env_id in "${env_ids[@]}"; do
train "$rank" "$seed" "$env_id"
done |
<gh_stars>1-10
# Module that contains validating methods for project name and test type
module Verifiable
def valid_characters?(project_name)
!%r{[\#%&{}\\<>*?\/ $!'":@+`|=]}.match(project_name)
end
def valid_initial_character?(project_name)
!/[ .\-_]/.match(project_name[0])
end
def valid_name?(project_name)
!project_name.nil? &&
valid_characters?(project_name) &&
valid_initial_character?(project_name) &&
project_name.length < 31
end
def valid_test?(test_type)
!test_type.nil? && correct_test_name?(test_type) && flag?(test_type)
end
end
|
#!/bin/sh
# Detects which OS and if it is Linux then it will detect which Linux Distribution.
OS=`uname -s`
REV=`uname -r`
MACH=`uname -m`
GetVersionFromFile()
{
VERSION=`cat $1 | tr "\n" ' ' | sed s/.*VERSION.*=\ // `
}
if [ "${OS}" = "SunOS" ] ; then
OS=Solaris
ARCH=`uname -p`
OSSTR="${OS} ${REV}(${ARCH} `uname -v`)"
elif [ "${OS}" = "AIX" ] ; then
OSSTR="${OS} `oslevel` (`oslevel -r`)"
elif [ "${OS}" = "Darwin" ] ; then
OSSTR="${OS} `sw_vers -productName` (`sw_vers -productVersion`) `sw_vers -buildVersion`"
elif [ "${OS}" = "Linux" ] ; then
KERNEL=`uname -r`
if [ -f /etc/redhat-release ] ; then
DIST='RedHat'
PSUEDONAME=`cat /etc/redhat-release | sed s/.*\(// | sed s/\)//`
REV=`cat /etc/redhat-release | sed s/.*release\ // | sed s/\ .*//`
elif [ -f /etc/SuSE-release ] ; then
DIST=`cat /etc/SuSE-release | tr "\n" ' '| sed s/VERSION.*//`
REV=`cat /etc/SuSE-release | tr "\n" ' ' | sed s/.*=\ //`
elif [ -f /etc/mandrake-release ] ; then
DIST='Mandrake'
PSUEDONAME=`cat /etc/mandrake-release | sed s/.*\(// | sed s/\)//`
REV=`cat /etc/mandrake-release | sed s/.*release\ // | sed s/\ .*//`
elif [ -f /etc/debian_version ] ; then
DIST="Debian `cat /etc/debian_version`"
REV=""
fi
if [ -f /etc/UnitedLinux-release ] ; then
DIST="${DIST}[`cat /etc/UnitedLinux-release | tr "\n" ' ' | sed s/VERSION.*//`]"
fi
OSSTR="${OS} ${DIST} ${REV}(${PSUEDONAME} ${KERNEL} ${MACH})"
fi
echo ${OSSTR}
|
<reponame>jeeneee/realworld
package com.jeeneee.realworld.user.service;
import com.jeeneee.realworld.user.domain.User;
import com.jeeneee.realworld.user.domain.UserRepository;
import com.jeeneee.realworld.user.dto.ProfileResponse;
import com.jeeneee.realworld.user.exception.UserNotFoundException;
import lombok.RequiredArgsConstructor;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
@RequiredArgsConstructor
@Transactional(readOnly = true)
@Service
public class ProfileService {
private final UserRepository userRepository;
public ProfileResponse find(String username, User user) {
User target = getUserByUsername(username);
return ProfileResponse.of(target, user);
}
@Transactional
public ProfileResponse follow(String username, User user) {
User target = getUserByUsername(username);
user.follow(target);
return ProfileResponse.of(target, user);
}
@Transactional
public ProfileResponse unfollow(String username, User user) {
User target = getUserByUsername(username);
user.unfollow(target);
return ProfileResponse.of(target, user);
}
private User getUserByUsername(String username) {
return userRepository.findByUsername(username)
.orElseThrow(UserNotFoundException::new);
}
}
|
#include <stdio.h>
int main() {
int x;
scanf("%d", &x);
printf("%d", x);
return 0;
} |
<reponame>googleapis/googleapis-gen<gh_stars>1-10
# Generated by the protocol buffer compiler. DO NOT EDIT!
# Source: google/cloud/dialogflow/cx/v3beta1/security_settings.proto for package 'Google.Cloud.Dialogflow.CX.V3beta1'
# Original file comments:
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'grpc'
require 'google/cloud/dialogflow/cx/v3beta1/security_settings_pb'
module Google
module Cloud
module Dialogflow
module CX
module V3beta1
module SecuritySettingsService
# Service for managing security settings for Dialogflow.
class Service
include ::GRPC::GenericService
self.marshal_class_method = :encode
self.unmarshal_class_method = :decode
self.service_name = 'google.cloud.dialogflow.cx.v3beta1.SecuritySettingsService'
# Create security settings in the specified location.
rpc :CreateSecuritySettings, ::Google::Cloud::Dialogflow::CX::V3beta1::CreateSecuritySettingsRequest, ::Google::Cloud::Dialogflow::CX::V3beta1::SecuritySettings
# Retrieves the specified [SecuritySettings][google.cloud.dialogflow.cx.v3beta1.SecuritySettings].
# The returned settings may be stale by up to 1 minute.
rpc :GetSecuritySettings, ::Google::Cloud::Dialogflow::CX::V3beta1::GetSecuritySettingsRequest, ::Google::Cloud::Dialogflow::CX::V3beta1::SecuritySettings
# Updates the specified [SecuritySettings][google.cloud.dialogflow.cx.v3beta1.SecuritySettings].
rpc :UpdateSecuritySettings, ::Google::Cloud::Dialogflow::CX::V3beta1::UpdateSecuritySettingsRequest, ::Google::Cloud::Dialogflow::CX::V3beta1::SecuritySettings
# Returns the list of all security settings in the specified location.
rpc :ListSecuritySettings, ::Google::Cloud::Dialogflow::CX::V3beta1::ListSecuritySettingsRequest, ::Google::Cloud::Dialogflow::CX::V3beta1::ListSecuritySettingsResponse
# Deletes the specified [SecuritySettings][google.cloud.dialogflow.cx.v3beta1.SecuritySettings].
rpc :DeleteSecuritySettings, ::Google::Cloud::Dialogflow::CX::V3beta1::DeleteSecuritySettingsRequest, ::Google::Protobuf::Empty
end
Stub = Service.rpc_stub_class
end
end
end
end
end
end
|
<gh_stars>0
'use strict';
Object.defineProperty(exports, '__esModule', {
value: true
});
exports.default = postProcessDiff;
var _lodash = require('lodash.foreach');
var _lodash2 = _interopRequireDefault(_lodash);
var _lodash3 = require('lodash.isnumber');
var _lodash4 = _interopRequireDefault(_lodash3);
var _lodash5 = require('lodash.isplainobject');
var _lodash6 = _interopRequireDefault(_lodash5);
function _interopRequireDefault (obj) {
return obj && obj.__esModule ? obj : { default: obj };
}
var LOG_LEVELS = {
3: 'errors',
2: 'warnings',
1: 'infos'
};
/**
* @param {Array<Diff>} diff.breaks
* @param {Array<Diff>} diff.smooths
* @param {Array<RaxDiff>} diff.unmatchDiffs
* @param {String} versionDiff, if null -> unchanged, if undefined, no version defined
* @param {Object} config.changes
* @param {Object} config.rules
* @return
* {
* errors: {Array>Diff>}
* warnings: {Array>Diff>}
* infos: {Array>Diff>}
* unmatchDiffs: {Array<RawDiff>}
* }
* @note Diff: {ruleId: String, message: String}
*/
function postProcessDiff (_ref, versionDiff, config) {
var breaks = _ref.breaks;
var smooths = _ref.smooths;
var infos = _ref.infos;
var unmatchDiffs = _ref.unmatchDiffs;
var diff = {
errors: [],
warnings: [],
infos: [],
unmatchDiffs: unmatchDiffs
};
(0, _lodash2.default)(
[
{ changes: breaks, type: 'breaks' },
{ changes: smooths, type: 'smooths' },
{ changes: infos, type: 'infos' }
],
function (_ref2) {
var changes = _ref2.changes;
var type = _ref2.type;
return (0, _lodash2.default)(changes, function (change) {
var ruleConfig = config.rules && config.rules[change.ruleId];
var globalConfig = config.changes[type];
var level = (0, _lodash4.default)(ruleConfig)
? ruleConfig
: versionDiff && (0, _lodash6.default)(ruleConfig)
? ruleConfig[versionDiff]
: (0, _lodash4.default)(globalConfig)
? globalConfig
: versionDiff && (0, _lodash6.default)(globalConfig)
? globalConfig[versionDiff]
: config.default[type];
if (type === 'breaks') level = 3;
if (type === 'smooths' && level == 3) level = 2;
if (type === 'infos' && (level == 2 || level == 3)) level = 1;
if (LOG_LEVELS[level]) {
diff[LOG_LEVELS[level]].push(change);
}
});
}
);
return diff;
}
module.exports = exports['default'];
|
import numpy
from keras.models import Sequential
from keras.layers import Dense
from keras.layers import LSTM
from keras.utils import np_utils
# define the LSTM model
model = Sequential()
model.add(LSTM(256, input_shape=(X_train.shape[1], X_train.shape[2])))
model.add(Dropout(0.2))
model.add(Dense(y_train.shape[1], activation='softmax'))
model.compile(loss='categorical_crossentropy', optimizer='adam')
# fit the model
model.fit(X_train, y_train, epochs=20, batch_size=128, verbose=2)
# make predictions
y_pred = model.predict(X_test) |
#!/bin/tcsh
#PBS -A NTDD0005
#PBS -N testb
#PBS -q regular
#PBS -l walltime=12:00:00
#PBS -j oe
#PBS -M apinard@ucar.edu
#PBS -l select=1:ncpus=1
module load conda
conda activate ldcpy_env
setenv TMPDIR /glade/scratch/$USER/temp
mkdir -p $TMPDIR
python ./compute_batch.py -o '/glade/scratch/apinard/3D/TS_calcs.csv' -j './batch_scripts/3d_dssim_scripts/TS.json' -ts 345 -tt 360 -v -ld
|
#!/usr/bin/env bash
#
# Script to build an upload the SIID Tango MySQL image
#
RED='\033[0;31m'
BLUE='\033[0;34m'
NC='\033[0m'
VERSION="0.1.0"
IMAGE=skasiid/tango_mysql
echo -e "${RED}---------------------------------------------------------${NC}"
echo -e "${BLUE}Building and uploading Docker image(s):"
echo -e "${BLUE} - ${IMAGE}:${VERSION}"
echo -e "${BLUE} - ${IMAGE}:latest"
echo -e "${RED}---------------------------------------------------------${NC}"
docker build -t ${IMAGE}:${VERSION} .
docker tag ${IMAGE}:${VERSION} ${IMAGE}:latest
docker push ${IMAGE}:${VERSION}
docker push ${IMAGE}:latest
|
/*****************************************************************************
* Copyright (C) NanoContainer Organization. All rights reserved. *
* ------------------------------------------------------------------------- *
* The software in this package is published under the terms of the BSD *
* style license a copy of which has been included with this distribution in *
* the LICENSE.txt file. *
* *
*****************************************************************************/
package org.nanocontainer.nanowar.webwork;
import javax.servlet.http.HttpServletRequest;
import org.jmock.Mock;
import org.jmock.MockObjectTestCase;
import org.nanocontainer.nanowar.KeyConstants;
import org.picocontainer.MutablePicoContainer;
import org.picocontainer.defaults.DefaultPicoContainer;
import webwork.action.ServletActionContext;
/**
* @author <NAME>
* @author <NAME>
*/
public class PicoActionFactoryTestCase extends MockObjectTestCase {
private PicoActionFactory factory;
private DefaultPicoContainer container;
public void setUp(){
factory = new PicoActionFactory();
container = new DefaultPicoContainer();
(new ActionContextScopeObjectReference(KeyConstants.REQUEST_CONTAINER)).set(container);
}
public void testActionInstantiationWithValidClassName() throws Exception {
container.registerComponentInstance("foo");
TestAction action = (TestAction) factory
.getActionImpl(TestAction.class.getName());
assertNotNull(action);
assertEquals("foo", action.getFoo());
}
public void testActionInstantiationWhichFailsDueToFailedDependencies() throws Exception {
TestAction action = (TestAction) factory
.getActionImpl(TestAction.class.getName());
assertNull(action);
}
public void testActionInstantiationWithInvalidClassName() throws Exception {
container.registerComponentInstance("foo");
TestAction action = (TestAction) factory
.getActionImpl("invalidAction");
assertNull(action);
}
public void testActionInstantiationWhichHasAlreadyBeenRegistered() throws Exception {
container.registerComponentInstance("foo");
container.registerComponentImplementation(TestAction.class);
TestAction action1 = (TestAction) container.getComponentInstance(TestAction.class);
TestAction action2 = (TestAction) factory
.getActionImpl(TestAction.class.getName());
assertSame(action1, action2);
}
public void testActionInstantiationWhichHasAlreadyBeenRequested() throws Exception {
container.registerComponentInstance("foo");
TestAction action1 = (TestAction) factory
.getActionImpl(TestAction.class.getName());
TestAction action2 = (TestAction) factory
.getActionImpl(TestAction.class.getName());
assertSame(action1, action2);
}
public void testActionContainerIsFoundInRequest() throws Exception {
Mock requestMock = mock(HttpServletRequest.class);
HttpServletRequest request = (HttpServletRequest) requestMock.proxy();
requestMock.expects(once()).method("getAttribute").with(eq(KeyConstants.ACTIONS_CONTAINER)).will(
returnValue(null));
requestMock.expects(once()).method("getAttribute").with(eq(KeyConstants.REQUEST_CONTAINER)).will(
returnValue(container));
requestMock.expects(once()).method("setAttribute").with(eq(KeyConstants.ACTIONS_CONTAINER),
isA(MutablePicoContainer.class));
ServletActionContext.setRequest(request);
container.registerComponentInstance("foo");
TestAction action = (TestAction) factory
.getActionImpl(TestAction.class.getName());
assertNotNull(action);
}
} |
import { createSelector } from 'reselect';
// Define the AppState type assumed to be defined elsewhere
type AppState = {
// Define the structure of the application state
// ...
};
// Define the memoizedStateSelector function using createSelector
const memoizedStateSelector = createSelector(
(s: AppState) => s, // Input selector function
s => s // Result function
);
export default memoizedStateSelector; |
#!/bin/bash
echo "BUILD the linked list programme"
args=("$@")
if [ ! -z ${args[0]} ] && [ ${args[0]} == "clean" ];
then
echo Performing clean build:
rm -rf build
fi
if [ -d build ]
then
echo "directory exists"
else
echo Build directory does not exists:
mkdir build
fi
echo enter build directory:
cd build
echo Executing cmake command:
cmake ..
echo Execute build command now:
make
echo "Build completed"
if [ ! -z ${args[1]} ] && [ ${args[1]} == "test" ];
then
echo "Execute testing sequence"
./test
fi |
function findCombinationSum(array, target) {
for (let i = 0; i < Math.pow(2, array.length); i++) {
let sum = 0;
let current = i;
let output = [];
array.forEach(function(item, index) {
if (current & 1) {
output.push(item);
sum += item;
}
current = current >> 1;
});
if (sum === target) {
console.log(output);
}
}
}
findCombinationSum([2, 5, 3, 1, 7], 8); |
<reponame>navikt/sykepengesoknad
import chai from 'chai';
import React from 'react';
import { mount } from 'enzyme';
import chaiEnzyme from 'chai-enzyme';
import { lagDesimaltall, lagHeltall, Vis } from './utils';
chai.use(chaiEnzyme());
const expect = chai.expect;
describe('utils', () => {
describe('lagDesimaltall', () => {
it('Skal fjerne bokstaver', () => {
const n = lagDesimaltall('12f');
expect(n).to.equal('12');
});
it('Skal fjerne bindestrek', () => {
const n = lagDesimaltall('12f-');
expect(n).to.equal('12');
});
it('Skal erstatte . med komma når . er midt i argumentet', () => {
const n = lagDesimaltall('12.4');
expect(n).to.equal('12,4');
});
it('Skal erstatte . med komma når . er på slutten av argumentet', () => {
const n = lagDesimaltall('12.');
expect(n).to.equal('12,');
});
it('Skal ikke fjerne komma når komma er midt i argumentet', () => {
const n = lagDesimaltall('12,4');
expect(n).to.equal('12,4');
});
it('Skal ikke fjerne komma når komma er på slutten av argumentet', () => {
const n = lagDesimaltall('12,');
expect(n).to.equal('12,');
});
it('Skal returnere tom streng', () => {
const n = lagDesimaltall('');
expect(n).to.equal('');
});
it('Skal fjerne komma hvis det står først', () => {
const n = lagDesimaltall(',');
expect(n).to.equal('');
});
it('Skal fjerne komma hvis det står først', () => {
const n = lagDesimaltall(',,');
expect(n).to.equal('');
});
it('Skal kun godta to desimaler for tall under 10', () => {
const n = lagDesimaltall('1,145');
expect(n).to.equal('1,14');
});
it('Skal kun godta to desimaler for tall over 10', () => {
const n = lagDesimaltall('11,1451');
expect(n).to.equal('11,14');
});
it('Skal kun godta to desimaler for tall over 100', () => {
const n = lagDesimaltall('456,1451');
expect(n).to.equal('456,14');
});
});
describe('lagHeltall', () => {
it('Skal fjerne bokstaver', () => {
const n = lagHeltall('12f');
expect(n).to.equal('12');
});
it('Skal fjerne bindestrek', () => {
const n = lagHeltall('12f-');
expect(n).to.equal('12');
});
it('Skal fjerne .', () => {
const n = lagHeltall('12.4');
expect(n).to.equal('12');
});
it('Skal fjene . når . er på slutten av argumentet', () => {
const n = lagHeltall('12.');
expect(n).to.equal('12');
});
it('Skal returnere tom streng', () => {
const n = lagHeltall('');
expect(n).to.equal('');
});
it('Skal fjerne komma hvis det står først', () => {
const n = lagHeltall(',');
expect(n).to.equal('');
});
it('Skal fjerne komma hvis det står først', () => {
const n = lagHeltall(',,');
expect(n).to.equal('');
});
it('Skal fjerne , for tall under 10', () => {
const n = lagHeltall('1,145');
expect(n).to.equal('1');
});
it('Skal fjerne desimaler for tall over 10', () => {
const n = lagHeltall('11,1451');
expect(n).to.equal('11');
});
it('Skal fjerne desimaler for tall over 100', () => {
const n = lagHeltall('456,1451');
expect(n).to.equal('456');
});
});
describe('Vis', () => {
it('Skal vise children hvis det er children og hvis = true', () => {
const toMount = (<Vis hvis>
<p>Olsen</p>
</Vis>);
const component = mount(toMount);
expect(component.html()).to.equal('<p>Olsen</p>');
});
it('Skal ikke vise children hvis det er children og hvis = false', () => {
const toMount = (<Vis hvis={false}>
<p>Olsen</p>
</Vis>);
const component = mount(toMount);
expect(component.html()).to.equal(null);
});
it('Skal vise render-funksjonens returverdi hvis det er render-funkksjon og hvis = true', () => {
const toMount = (<Vis
hvis
render={() => {
return <p>Olsen</p>;
}} />);
const component = mount(toMount);
expect(component.html()).to.equal('<p>Olsen</p>');
});
it('Skal ikke vise render-funksjonens returverdi hvis det er render-funkksjon og hvis = false', () => {
const toMount = (<Vis
hvis={false}
render={() => {
return <p>Olsen</p>;
}} />);
const component = mount(toMount);
expect(component.html()).to.equal(null);
});
});
});
|
<filename>grpc/src/main/java/io/stargate/grpc/codec/StringCodec.java<gh_stars>0
/*
* Copyright The Stargate Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.stargate.grpc.codec;
import com.datastax.oss.driver.api.core.type.codec.TypeCodec;
import edu.umd.cs.findbugs.annotations.NonNull;
import io.stargate.db.schema.Column;
import io.stargate.db.schema.Column.ColumnType;
import io.stargate.proto.QueryOuterClass.Value;
import io.stargate.proto.QueryOuterClass.Value.InnerCase;
import java.nio.ByteBuffer;
public class StringCodec implements ValueCodec {
private final TypeCodec<String> innerCodec;
public StringCodec(@NonNull TypeCodec<String> innerCodec) {
this.innerCodec = innerCodec;
}
@Override
public ByteBuffer encode(@NonNull Value value, @NonNull Column.ColumnType type) {
if (value.getInnerCase() != InnerCase.STRING) {
throw new IllegalArgumentException("Expected string type");
}
return innerCodec.encode(value.getString(), PROTOCOL_VERSION);
}
@Override
public Value decode(@NonNull ByteBuffer bytes, @NonNull ColumnType type) {
return Value.newBuilder().setString(innerCodec.decode(bytes, PROTOCOL_VERSION)).build();
}
}
|
#!/usr/bin/env bash
export SOURCE=".."
function exclude_header {
echo "$1" | tail -n +3
}
function test_help_option_displays_help {
bash-test -h | grep -q 'Usage'
}
function test_help_option_exits_with_code_0 {
bash-test -h >/dev/null
test $? -eq 0
}
function test_version_option_displays_version {
bash-test -v | grep -qE "v[0-9]+\.[0-9]+\.[0-9]+"
}
function test_version_option_exits_with_code_0 {
bash-test -v >/dev/null
test $? -eq 0
}
function test_invalid_option_displays_help {
bash-test -z | grep -q 'Usage'
}
function test_invalid_option_exits_with_code_1 {
bash-test -z >/dev/null
test $? -eq 1
}
curr_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
function test_run_all_tests_present_on_input_file {
expected=$(cat "$curr_dir"/support/sample-tests-1.expected_output.txt)
actual=$(bash-test "$curr_dir"/support/sample-tests-1.sh 2>/dev/null)
diff <(exclude_header "$actual") <(echo -e "$expected")
}
function test_accepts_multiple_input_files {
expected=$(cat "$curr_dir"/support/multiple-files.expected_output.txt)
actual=$(bash-test "$curr_dir"/support/sample-tests-{1,2}.sh 2>/dev/null)
diff <(exclude_header "$actual") <(echo -e "$expected")
}
function test_elapsed_time_of_tests_execution_is_displayed {
actual=$(bash-test "$curr_dir"/support/*.sh 2>&1 >/dev/null)
echo "$actual" | grep -q "real" &&\
echo "$actual" | grep -q "user" &&\
echo "$actual" | grep -q "sys"
}
function test_all_tests_passing_exits_with_code_0 {
bash-test "$curr_dir"/support/sample-tests-2.sh >/dev/null 2>&1
test $? -eq 0
}
function test_at_least_one_test_failing_exits_with_code_greater_than_0 {
bash-test "$curr_dir"/support/*.sh >/dev/null 2>&1
test $? -gt 0
}
function test_invalid_input_file_displays_error_message {
invalid_input_file="$curr_dir/non/existent/file.sh"
bash-test "$invalid_input_file" \
| grep 'ERROR' \
| grep 'Invalid input file' \
| grep -q "$invalid_input_file"
}
function test_invalid_input_file_exits_with_code_1 {
bash-test "$curr_dir"/non/existent/file.sh >/dev/null 2>&1
test $? -eq 1
}
function test_should_locate_scripts_according_to_relative_path {
bash-test "$curr_dir/support/test-script-relative-path.sh" >/dev/null 2>&1
test $? -eq 0
}
function test_generator_runs_one_test_case_per_line_on_data_provider {
expected=$(cat "$curr_dir"/support/sample-test-generator.expected_output.txt)
actual=$(bash-test "$curr_dir"/support/sample-test-generator.sh 2>/dev/null)
diff <(exclude_header "$actual") <(echo -e "$expected")
}
# reproducing reported issue #6
function test_clean_data_provider_after_running_its_correspondent_tests {
err="$(mktemp)"
bash-test "$curr_dir"/support/sample-tests-{with-data-providers,1}.sh >/dev/null 2>"$err"
grep 'bad substitution' "$err"
test $? -ne 0
}
|
<reponame>stefanvodita/lucene<gh_stars>0
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.lucene.monitor;
import java.io.IOException;
import java.util.List;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.*;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.NamedThreadFactory;
class ReadonlyQueryIndex extends QueryIndex {
private final ScheduledExecutorService refreshExecutor;
public ReadonlyQueryIndex(MonitorConfiguration configuration) throws IOException {
if (configuration.getDirectoryProvider() == null) {
throw new IllegalStateException(
"You must specify a Directory when configuring a Monitor as read-only.");
}
Directory directory = configuration.getDirectoryProvider().get();
this.manager = new SearcherManager(directory, new TermsHashBuilder(termFilters));
this.decomposer = configuration.getQueryDecomposer();
this.serializer = configuration.getQuerySerializer();
this.refreshExecutor =
Executors.newSingleThreadScheduledExecutor(new NamedThreadFactory("cache-purge"));
long refreshFrequency = configuration.getPurgeFrequency();
this.refreshExecutor.scheduleAtFixedRate(
() -> {
try {
this.purgeCache();
} catch (IOException e) {
listeners.forEach(l -> l.onPurgeError(e));
}
},
refreshFrequency,
refreshFrequency,
configuration.getPurgeFrequencyUnits());
}
@Override
public void commit(List<MonitorQuery> updates) throws IOException {
throw new UnsupportedOperationException("Monitor is readOnly cannot commit");
}
@Override
public long search(QueryBuilder queryBuilder, QueryCollector matcher) throws IOException {
IndexSearcher searcher = null;
try {
searcher = manager.acquire();
LazyMonitorQueryCollector collector =
new LazyMonitorQueryCollector(matcher, serializer, decomposer);
long buildTime = System.nanoTime();
Query query =
queryBuilder.buildQuery(
termFilters.get(searcher.getIndexReader().getReaderCacheHelper().getKey()));
buildTime = System.nanoTime() - buildTime;
searcher.search(query, collector);
return buildTime;
} finally {
if (searcher != null) {
manager.release(searcher);
}
}
}
@Override
public void purgeCache() throws IOException {
manager.maybeRefresh();
listeners.forEach(MonitorUpdateListener::onPurge);
}
@Override
void purgeCache(CachePopulator populator) {
throw new UnsupportedOperationException("Monitor is readOnly, it has no cache");
}
@Override
public void close() throws IOException {
refreshExecutor.shutdown();
IOUtils.close(manager);
}
@Override
public int numDocs() throws IOException {
IndexSearcher searcher = null;
int numDocs;
try {
searcher = manager.acquire();
numDocs = searcher.getIndexReader().numDocs();
} finally {
if (searcher != null) {
manager.release(searcher);
}
}
return numDocs;
}
@Override
public int cacheSize() {
return -1;
}
@Override
public void deleteQueries(List<String> ids) throws IOException {
throw new UnsupportedOperationException("Monitor is readOnly cannot delete queries");
}
@Override
public void clear() throws IOException {
throw new UnsupportedOperationException("Monitor is readOnly cannot clear");
}
@Override
public long getLastPurged() {
return -1;
}
// ---------------------------------------------
// Helper classes...
// ---------------------------------------------
/** A Collector that decodes the stored query for each document hit reparsing them everytime. */
static final class LazyMonitorQueryCollector extends SimpleCollector {
private final QueryIndex.QueryCollector matcher;
private final QueryIndex.DataValues dataValues = new QueryIndex.DataValues();
private final MonitorQuerySerializer serializer;
private final QueryDecomposer decomposer;
LazyMonitorQueryCollector(
QueryIndex.QueryCollector matcher,
MonitorQuerySerializer serializer,
QueryDecomposer decomposer) {
this.matcher = matcher;
this.serializer = serializer;
this.decomposer = decomposer;
}
@Override
public void setScorer(Scorable scorer) {
this.dataValues.scorer = scorer;
}
@Override
public void collect(int doc) throws IOException {
dataValues.advanceTo(doc);
BytesRef cache_id = dataValues.cacheId.lookupOrd(dataValues.cacheId.ordValue());
BytesRef query_id = dataValues.queryId.lookupOrd(dataValues.queryId.ordValue());
MonitorQuery mq = serializer.deserialize(dataValues.mq.binaryValue());
QueryCacheEntry query =
QueryCacheEntry.decompose(mq, decomposer).stream()
.filter(queryCacheEntry -> queryCacheEntry.cacheId.equals(cache_id.utf8ToString()))
.findFirst()
.orElseThrow(() -> new IllegalStateException("Cached queries not found"));
matcher.matchQuery(query_id.utf8ToString(), query, dataValues);
}
@Override
public void doSetNextReader(LeafReaderContext context) throws IOException {
this.dataValues.cacheId = context.reader().getSortedDocValues(QueryIndex.FIELDS.cache_id);
this.dataValues.queryId = context.reader().getSortedDocValues(QueryIndex.FIELDS.query_id);
this.dataValues.mq = context.reader().getBinaryDocValues(QueryIndex.FIELDS.mq);
this.dataValues.ctx = context;
}
@Override
public ScoreMode scoreMode() {
return matcher.scoreMode();
}
}
}
|
<filename>targets/TARGET_Maxim/TARGET_MAX32625/device/i2cs_regs.h<gh_stars>1000+
/*******************************************************************************
* Copyright (C) 2016 Maxim Integrated Products, Inc., All Rights Reserved.
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included
* in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
* OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
* IN NO EVENT SHALL MAXIM INTEGRATED BE LIABLE FOR ANY CLAIM, DAMAGES
* OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
* ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
*
* Except as contained in this notice, the name of Maxim Integrated
* Products, Inc. shall not be used except as stated in the Maxim Integrated
* Products, Inc. Branding Policy.
*
* The mere transfer of this software does not imply any licenses
* of trade secrets, proprietary technology, copyrights, patents,
* trademarks, maskwork rights, or any other form of intellectual
* property whatsoever. Maxim Integrated Products, Inc. retains all
* ownership rights.
******************************************************************************/
#ifndef _MXC_I2CS_REGS_H_
#define _MXC_I2CS_REGS_H_
#ifdef __cplusplus
extern "C" {
#endif
#include <stdint.h>
#include "mxc_device.h"
/*
If types are not defined elsewhere (CMSIS) define them here
*/
#ifndef __IO
#define __IO volatile
#endif
#ifndef __I
#define __I volatile const
#endif
#ifndef __O
#define __O volatile
#endif
/*
Typedefed structure(s) for module registers (per instance or section) with direct 32-bit
access to each register in module.
*/
/* Offset Register Description
============= ============================================================================ */
typedef struct {
__IO uint32_t clk_div; /* 0x0000 I2C Slave Clock Divisor Control */
__IO uint32_t dev_id; /* 0x0004 I2C Slave Device ID Register */
__IO uint32_t intfl; /* 0x0008 I2CS Interrupt Flags */
__IO uint32_t inten; /* 0x000C I2CS Interrupt Enable/Disable Controls */
__IO uint32_t data_byte[32]; /* 0x0010-0x008C I2CS Data Byte */
} mxc_i2cs_regs_t;
/*
Register offsets for module I2CS.
*/
#define MXC_R_I2CS_OFFS_CLK_DIV ((uint32_t)0x00000000UL)
#define MXC_R_I2CS_OFFS_DEV_ID ((uint32_t)0x00000004UL)
#define MXC_R_I2CS_OFFS_INTFL ((uint32_t)0x00000008UL)
#define MXC_R_I2CS_OFFS_INTEN ((uint32_t)0x0000000CUL)
#define MXC_R_I2CS_OFFS_DATA_BYTE ((uint32_t)0x00000010UL)
/*
Field positions and masks for module I2CS.
*/
#define MXC_F_I2CS_CLK_DIV_FS_FILTER_CLOCK_DIV_POS 0
#define MXC_F_I2CS_CLK_DIV_FS_FILTER_CLOCK_DIV ((uint32_t)(0x000000FFUL << MXC_F_I2CS_CLK_DIV_FS_FILTER_CLOCK_DIV_POS))
#define MXC_F_I2CS_DEV_ID_SLAVE_DEV_ID_POS 0
#define MXC_F_I2CS_DEV_ID_SLAVE_DEV_ID ((uint32_t)(0x000003FFUL << MXC_F_I2CS_DEV_ID_SLAVE_DEV_ID_POS))
#define MXC_F_I2CS_DEV_ID_TEN_BIT_ID_MODE_POS 12
#define MXC_F_I2CS_DEV_ID_TEN_BIT_ID_MODE ((uint32_t)(0x00000001UL << MXC_F_I2CS_DEV_ID_TEN_BIT_ID_MODE_POS))
#define MXC_F_I2CS_DEV_ID_SLAVE_RESET_POS 14
#define MXC_F_I2CS_DEV_ID_SLAVE_RESET ((uint32_t)(0x00000001UL << MXC_F_I2CS_DEV_ID_SLAVE_RESET_POS))
#define MXC_F_I2CS_INTFL_BYTE0_POS 0
#define MXC_F_I2CS_INTFL_BYTE0 ((uint32_t)(0x00000001UL << MXC_F_I2CS_INTFL_BYTE0_POS))
#define MXC_F_I2CS_INTFL_BYTE1_POS 1
#define MXC_F_I2CS_INTFL_BYTE1 ((uint32_t)(0x00000001UL << MXC_F_I2CS_INTFL_BYTE1_POS))
#define MXC_F_I2CS_INTFL_BYTE2_POS 2
#define MXC_F_I2CS_INTFL_BYTE2 ((uint32_t)(0x00000001UL << MXC_F_I2CS_INTFL_BYTE2_POS))
#define MXC_F_I2CS_INTFL_BYTE3_POS 3
#define MXC_F_I2CS_INTFL_BYTE3 ((uint32_t)(0x00000001UL << MXC_F_I2CS_INTFL_BYTE3_POS))
#define MXC_F_I2CS_INTFL_BYTE4_POS 4
#define MXC_F_I2CS_INTFL_BYTE4 ((uint32_t)(0x00000001UL << MXC_F_I2CS_INTFL_BYTE4_POS))
#define MXC_F_I2CS_INTFL_BYTE5_POS 5
#define MXC_F_I2CS_INTFL_BYTE5 ((uint32_t)(0x00000001UL << MXC_F_I2CS_INTFL_BYTE5_POS))
#define MXC_F_I2CS_INTFL_BYTE6_POS 6
#define MXC_F_I2CS_INTFL_BYTE6 ((uint32_t)(0x00000001UL << MXC_F_I2CS_INTFL_BYTE6_POS))
#define MXC_F_I2CS_INTFL_BYTE7_POS 7
#define MXC_F_I2CS_INTFL_BYTE7 ((uint32_t)(0x00000001UL << MXC_F_I2CS_INTFL_BYTE7_POS))
#define MXC_F_I2CS_INTFL_BYTE8_POS 8
#define MXC_F_I2CS_INTFL_BYTE8 ((uint32_t)(0x00000001UL << MXC_F_I2CS_INTFL_BYTE8_POS))
#define MXC_F_I2CS_INTFL_BYTE9_POS 9
#define MXC_F_I2CS_INTFL_BYTE9 ((uint32_t)(0x00000001UL << MXC_F_I2CS_INTFL_BYTE9_POS))
#define MXC_F_I2CS_INTFL_BYTE10_POS 10
#define MXC_F_I2CS_INTFL_BYTE10 ((uint32_t)(0x00000001UL << MXC_F_I2CS_INTFL_BYTE10_POS))
#define MXC_F_I2CS_INTFL_BYTE11_POS 11
#define MXC_F_I2CS_INTFL_BYTE11 ((uint32_t)(0x00000001UL << MXC_F_I2CS_INTFL_BYTE11_POS))
#define MXC_F_I2CS_INTFL_BYTE12_POS 12
#define MXC_F_I2CS_INTFL_BYTE12 ((uint32_t)(0x00000001UL << MXC_F_I2CS_INTFL_BYTE12_POS))
#define MXC_F_I2CS_INTFL_BYTE13_POS 13
#define MXC_F_I2CS_INTFL_BYTE13 ((uint32_t)(0x00000001UL << MXC_F_I2CS_INTFL_BYTE13_POS))
#define MXC_F_I2CS_INTFL_BYTE14_POS 14
#define MXC_F_I2CS_INTFL_BYTE14 ((uint32_t)(0x00000001UL << MXC_F_I2CS_INTFL_BYTE14_POS))
#define MXC_F_I2CS_INTFL_BYTE15_POS 15
#define MXC_F_I2CS_INTFL_BYTE15 ((uint32_t)(0x00000001UL << MXC_F_I2CS_INTFL_BYTE15_POS))
#define MXC_F_I2CS_INTFL_BYTE16_POS 16
#define MXC_F_I2CS_INTFL_BYTE16 ((uint32_t)(0x00000001UL << MXC_F_I2CS_INTFL_BYTE16_POS))
#define MXC_F_I2CS_INTFL_BYTE17_POS 17
#define MXC_F_I2CS_INTFL_BYTE17 ((uint32_t)(0x00000001UL << MXC_F_I2CS_INTFL_BYTE17_POS))
#define MXC_F_I2CS_INTFL_BYTE18_POS 18
#define MXC_F_I2CS_INTFL_BYTE18 ((uint32_t)(0x00000001UL << MXC_F_I2CS_INTFL_BYTE18_POS))
#define MXC_F_I2CS_INTFL_BYTE19_POS 19
#define MXC_F_I2CS_INTFL_BYTE19 ((uint32_t)(0x00000001UL << MXC_F_I2CS_INTFL_BYTE19_POS))
#define MXC_F_I2CS_INTFL_BYTE20_POS 20
#define MXC_F_I2CS_INTFL_BYTE20 ((uint32_t)(0x00000001UL << MXC_F_I2CS_INTFL_BYTE20_POS))
#define MXC_F_I2CS_INTFL_BYTE21_POS 21
#define MXC_F_I2CS_INTFL_BYTE21 ((uint32_t)(0x00000001UL << MXC_F_I2CS_INTFL_BYTE21_POS))
#define MXC_F_I2CS_INTFL_BYTE22_POS 22
#define MXC_F_I2CS_INTFL_BYTE22 ((uint32_t)(0x00000001UL << MXC_F_I2CS_INTFL_BYTE22_POS))
#define MXC_F_I2CS_INTFL_BYTE23_POS 23
#define MXC_F_I2CS_INTFL_BYTE23 ((uint32_t)(0x00000001UL << MXC_F_I2CS_INTFL_BYTE23_POS))
#define MXC_F_I2CS_INTFL_BYTE24_POS 24
#define MXC_F_I2CS_INTFL_BYTE24 ((uint32_t)(0x00000001UL << MXC_F_I2CS_INTFL_BYTE24_POS))
#define MXC_F_I2CS_INTFL_BYTE25_POS 25
#define MXC_F_I2CS_INTFL_BYTE25 ((uint32_t)(0x00000001UL << MXC_F_I2CS_INTFL_BYTE25_POS))
#define MXC_F_I2CS_INTFL_BYTE26_POS 26
#define MXC_F_I2CS_INTFL_BYTE26 ((uint32_t)(0x00000001UL << MXC_F_I2CS_INTFL_BYTE26_POS))
#define MXC_F_I2CS_INTFL_BYTE27_POS 27
#define MXC_F_I2CS_INTFL_BYTE27 ((uint32_t)(0x00000001UL << MXC_F_I2CS_INTFL_BYTE27_POS))
#define MXC_F_I2CS_INTFL_BYTE28_POS 28
#define MXC_F_I2CS_INTFL_BYTE28 ((uint32_t)(0x00000001UL << MXC_F_I2CS_INTFL_BYTE28_POS))
#define MXC_F_I2CS_INTFL_BYTE29_POS 29
#define MXC_F_I2CS_INTFL_BYTE29 ((uint32_t)(0x00000001UL << MXC_F_I2CS_INTFL_BYTE29_POS))
#define MXC_F_I2CS_INTFL_BYTE30_POS 30
#define MXC_F_I2CS_INTFL_BYTE30 ((uint32_t)(0x00000001UL << MXC_F_I2CS_INTFL_BYTE30_POS))
#define MXC_F_I2CS_INTFL_BYTE31_POS 31
#define MXC_F_I2CS_INTFL_BYTE31 ((uint32_t)(0x00000001UL << MXC_F_I2CS_INTFL_BYTE31_POS))
#define MXC_F_I2CS_INTEN_BYTE0_POS 0
#define MXC_F_I2CS_INTEN_BYTE0 ((uint32_t)(0x00000001UL << MXC_F_I2CS_INTEN_BYTE0_POS))
#define MXC_F_I2CS_INTEN_BYTE1_POS 1
#define MXC_F_I2CS_INTEN_BYTE1 ((uint32_t)(0x00000001UL << MXC_F_I2CS_INTEN_BYTE1_POS))
#define MXC_F_I2CS_INTEN_BYTE2_POS 2
#define MXC_F_I2CS_INTEN_BYTE2 ((uint32_t)(0x00000001UL << MXC_F_I2CS_INTEN_BYTE2_POS))
#define MXC_F_I2CS_INTEN_BYTE3_POS 3
#define MXC_F_I2CS_INTEN_BYTE3 ((uint32_t)(0x00000001UL << MXC_F_I2CS_INTEN_BYTE3_POS))
#define MXC_F_I2CS_INTEN_BYTE4_POS 4
#define MXC_F_I2CS_INTEN_BYTE4 ((uint32_t)(0x00000001UL << MXC_F_I2CS_INTEN_BYTE4_POS))
#define MXC_F_I2CS_INTEN_BYTE5_POS 5
#define MXC_F_I2CS_INTEN_BYTE5 ((uint32_t)(0x00000001UL << MXC_F_I2CS_INTEN_BYTE5_POS))
#define MXC_F_I2CS_INTEN_BYTE6_POS 6
#define MXC_F_I2CS_INTEN_BYTE6 ((uint32_t)(0x00000001UL << MXC_F_I2CS_INTEN_BYTE6_POS))
#define MXC_F_I2CS_INTEN_BYTE7_POS 7
#define MXC_F_I2CS_INTEN_BYTE7 ((uint32_t)(0x00000001UL << MXC_F_I2CS_INTEN_BYTE7_POS))
#define MXC_F_I2CS_INTEN_BYTE8_POS 8
#define MXC_F_I2CS_INTEN_BYTE8 ((uint32_t)(0x00000001UL << MXC_F_I2CS_INTEN_BYTE8_POS))
#define MXC_F_I2CS_INTEN_BYTE9_POS 9
#define MXC_F_I2CS_INTEN_BYTE9 ((uint32_t)(0x00000001UL << MXC_F_I2CS_INTEN_BYTE9_POS))
#define MXC_F_I2CS_INTEN_BYTE10_POS 10
#define MXC_F_I2CS_INTEN_BYTE10 ((uint32_t)(0x00000001UL << MXC_F_I2CS_INTEN_BYTE10_POS))
#define MXC_F_I2CS_INTEN_BYTE11_POS 11
#define MXC_F_I2CS_INTEN_BYTE11 ((uint32_t)(0x00000001UL << MXC_F_I2CS_INTEN_BYTE11_POS))
#define MXC_F_I2CS_INTEN_BYTE12_POS 12
#define MXC_F_I2CS_INTEN_BYTE12 ((uint32_t)(0x00000001UL << MXC_F_I2CS_INTEN_BYTE12_POS))
#define MXC_F_I2CS_INTEN_BYTE13_POS 13
#define MXC_F_I2CS_INTEN_BYTE13 ((uint32_t)(0x00000001UL << MXC_F_I2CS_INTEN_BYTE13_POS))
#define MXC_F_I2CS_INTEN_BYTE14_POS 14
#define MXC_F_I2CS_INTEN_BYTE14 ((uint32_t)(0x00000001UL << MXC_F_I2CS_INTEN_BYTE14_POS))
#define MXC_F_I2CS_INTEN_BYTE15_POS 15
#define MXC_F_I2CS_INTEN_BYTE15 ((uint32_t)(0x00000001UL << MXC_F_I2CS_INTEN_BYTE15_POS))
#define MXC_F_I2CS_INTEN_BYTE16_POS 16
#define MXC_F_I2CS_INTEN_BYTE16 ((uint32_t)(0x00000001UL << MXC_F_I2CS_INTEN_BYTE16_POS))
#define MXC_F_I2CS_INTEN_BYTE17_POS 17
#define MXC_F_I2CS_INTEN_BYTE17 ((uint32_t)(0x00000001UL << MXC_F_I2CS_INTEN_BYTE17_POS))
#define MXC_F_I2CS_INTEN_BYTE18_POS 18
#define MXC_F_I2CS_INTEN_BYTE18 ((uint32_t)(0x00000001UL << MXC_F_I2CS_INTEN_BYTE18_POS))
#define MXC_F_I2CS_INTEN_BYTE19_POS 19
#define MXC_F_I2CS_INTEN_BYTE19 ((uint32_t)(0x00000001UL << MXC_F_I2CS_INTEN_BYTE19_POS))
#define MXC_F_I2CS_INTEN_BYTE20_POS 20
#define MXC_F_I2CS_INTEN_BYTE20 ((uint32_t)(0x00000001UL << MXC_F_I2CS_INTEN_BYTE20_POS))
#define MXC_F_I2CS_INTEN_BYTE21_POS 21
#define MXC_F_I2CS_INTEN_BYTE21 ((uint32_t)(0x00000001UL << MXC_F_I2CS_INTEN_BYTE21_POS))
#define MXC_F_I2CS_INTEN_BYTE22_POS 22
#define MXC_F_I2CS_INTEN_BYTE22 ((uint32_t)(0x00000001UL << MXC_F_I2CS_INTEN_BYTE22_POS))
#define MXC_F_I2CS_INTEN_BYTE23_POS 23
#define MXC_F_I2CS_INTEN_BYTE23 ((uint32_t)(0x00000001UL << MXC_F_I2CS_INTEN_BYTE23_POS))
#define MXC_F_I2CS_INTEN_BYTE24_POS 24
#define MXC_F_I2CS_INTEN_BYTE24 ((uint32_t)(0x00000001UL << MXC_F_I2CS_INTEN_BYTE24_POS))
#define MXC_F_I2CS_INTEN_BYTE25_POS 25
#define MXC_F_I2CS_INTEN_BYTE25 ((uint32_t)(0x00000001UL << MXC_F_I2CS_INTEN_BYTE25_POS))
#define MXC_F_I2CS_INTEN_BYTE26_POS 26
#define MXC_F_I2CS_INTEN_BYTE26 ((uint32_t)(0x00000001UL << MXC_F_I2CS_INTEN_BYTE26_POS))
#define MXC_F_I2CS_INTEN_BYTE27_POS 27
#define MXC_F_I2CS_INTEN_BYTE27 ((uint32_t)(0x00000001UL << MXC_F_I2CS_INTEN_BYTE27_POS))
#define MXC_F_I2CS_INTEN_BYTE28_POS 28
#define MXC_F_I2CS_INTEN_BYTE28 ((uint32_t)(0x00000001UL << MXC_F_I2CS_INTEN_BYTE28_POS))
#define MXC_F_I2CS_INTEN_BYTE29_POS 29
#define MXC_F_I2CS_INTEN_BYTE29 ((uint32_t)(0x00000001UL << MXC_F_I2CS_INTEN_BYTE29_POS))
#define MXC_F_I2CS_INTEN_BYTE30_POS 30
#define MXC_F_I2CS_INTEN_BYTE30 ((uint32_t)(0x00000001UL << MXC_F_I2CS_INTEN_BYTE30_POS))
#define MXC_F_I2CS_INTEN_BYTE31_POS 31
#define MXC_F_I2CS_INTEN_BYTE31 ((uint32_t)(0x00000001UL << MXC_F_I2CS_INTEN_BYTE31_POS))
#define MXC_F_I2CS_DATA_BYTE_DATA_FIELD_POS 0
#define MXC_F_I2CS_DATA_BYTE_DATA_FIELD ((uint32_t)(0x000000FFUL << MXC_F_I2CS_DATA_BYTE_DATA_FIELD_POS))
#define MXC_F_I2CS_DATA_BYTE_READ_ONLY_FL_POS 8
#define MXC_F_I2CS_DATA_BYTE_READ_ONLY_FL ((uint32_t)(0x00000001UL << MXC_F_I2CS_DATA_BYTE_READ_ONLY_FL_POS))
#define MXC_F_I2CS_DATA_BYTE_DATA_UPDATED_FL_POS 9
#define MXC_F_I2CS_DATA_BYTE_DATA_UPDATED_FL ((uint32_t)(0x00000001UL << MXC_F_I2CS_DATA_BYTE_DATA_UPDATED_FL_POS))
#ifdef __cplusplus
}
#endif
#endif /* _MXC_I2CS_REGS_H_ */
|
<filename>learn/src/main/java/org/ruogu/learn/nio/api/ChannelExample.java
package org.ruogu.learn.nio.api;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.nio.ByteBuffer;
import java.nio.channels.FileChannel;
/**
* ChannelExample
*
* code ref:http://javapapers.com/java/java-nio-channel/
*
* @author xueyintao 2016年2月5日 下午4:25:07
*/
public class ChannelExample {
public static void main(String[] args) throws IOException {
String fileName = ChannelExample.class.getResource("/").getPath() + "../../files/tmp_nio_read_file.txt";
System.out.println(fileName);
RandomAccessFile file = new RandomAccessFile(fileName, "r");
FileChannel fileChannel = file.getChannel();
ByteBuffer byteBuffer = ByteBuffer.allocate(24);
while (fileChannel.read(byteBuffer) > 0) {
// flip the buffer to prepare for get operation
// 设置 limit = position; position = 0;
byteBuffer.flip();
// while position < limit
while (byteBuffer.hasRemaining()) {
System.out.print((char)byteBuffer.get());
}
// clear the buffer ready for next sequence of read
byteBuffer.clear();
// System.out.println("--while-read-buffer--");
}
file.close();
}
}
|
<reponame>mponsa/ethereum-testapp
const Election = artifacts.require("./Election.sol");
contract("Election", function(accounts) {
it("initializes with two candidates", async () => {
const instance = await Election.deployed()
const candidates = await instance.candidatesCount()
assert.equal(candidates, 2)
});
it("it initializes the candidates with the correct values", async () => {
const instance = await Election.deployed()
const candidate1 = await instance.candidates(1)
assert.equal(candidate1[0], 1, "contains the correct id");
assert.equal(candidate1[1], "Candidate 1", "contains the correct name");
assert.equal(candidate1[2], 0, "contains the correct votes count");
const candidate2 = await instance.candidates(2)
assert.equal(candidate2[0], 2, "contains the correct id");
assert.equal(candidate2[1], "Candidate 2", "contains the correct name");
assert.equal(candidate2[2], 0, "contains the correct votes count");
});
});
|
export default {
get(num) {
return $.ajax({
url: '/single/spus' ,
type: "GET" ,
data: {
ids: num
}
})
},
//https://shopapi.smartisan.com/product/skus?ids=100057701,100057702,100057705,100057710,100057711,100057726,100057727,100057728,100057729&with_stock=true&with_spu=true
getSkus(list , hasShop) {
return $.ajax({
url: '/single/skus' ,
type: 'GET',
data: {
ids: list ,
with_stock: hasShop,
with_spu : hasShop
}
})
},
//https://shopapi.smartisan.com/product/promotions?with_num=true
getPromotion(){
return $.ajax({
url: "/single/promotions",
type: 'GET',
data: {
with_num : true
}
})
}
} |
<gh_stars>0
module CandidateInterface
class GcseNaricForm
include ActiveModel::Model
attr_accessor :have_naric_reference, :naric_reference, :comparable_uk_qualification
validates :have_naric_reference, presence: true
validates :naric_reference, :comparable_uk_qualification, presence: true, if: :chose_to_provide_naric_reference?
def self.build_from_qualification(qualification)
new(
have_naric_reference: qualification.have_naric_reference,
naric_reference: qualification.naric_reference,
comparable_uk_qualification: qualification.comparable_uk_qualification,
)
end
def save(qualification)
return false unless valid?
qualification.update!(
naric_reference: naric_reference,
comparable_uk_qualification: comparable_uk_qualification,
)
end
def set_attributes(params)
@have_naric_reference = params['have_naric_reference']
@naric_reference = chose_to_provide_naric_reference? ? params['naric_reference'] : nil
@comparable_uk_qualification = chose_to_provide_naric_reference? ? params['comparable_uk_qualification'] : nil
end
private
def chose_to_provide_naric_reference?
have_naric_reference == 'Yes'
end
end
end
|
/**
* HIRSprite.js
*
* Copyright © 2018 daisuke.t.
*/
var HIRSprite = {};
HIRSprite = function(){} // new 演算子用コンストラクタ
// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
// 列挙、定数
// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
// 種別
HIRSprite.TYPE =
{
IMAGE : 0,
LINE : 1,
RECT : 2,
ARC : 3,
};
// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
// メンバ
// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
HIRSprite.mType;
HIRSprite.mX;
HIRSprite.mY;
HIRSprite.mX2;
HIRSprite.mY2;
HIRSprite.mWidth;
HIRSprite.mHeight;
HIRSprite.mRadius;
HIRSprite.mColor;
HIRSprite.mImage;
HIRSprite.mIsVisible;
HIRSprite.mPriority;
HIRSprite.mRotate; // デグリー角
HIRSprite.mScaleX;
HIRSprite.mScaleY;
HIRSprite.mIsAffine; // アフィン変換が有効か
// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
// 描画
// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
/**
* 描画する
*/
HIRSprite.Draw = function(ctx, elm)
{
if(!elm.mIsVisible) return; // 非表示設定
if(elm.mType == HIRSprite.TYPE.IMAGE)
{
HIRSprite.DrawImage(ctx, elm);
}
else if(elm.mType == HIRSprite.TYPE.LINE)
{
HIRSprite.DrawLine(ctx, elm);
}
else if(elm.mType == HIRSprite.TYPE.RECT)
{
HIRSprite.DrawRect(ctx, elm);
}
else if(elm.mType == HIRSprite.TYPE.ARC)
{
HIRSprite.DrawArc(ctx, elm);
}
}
/**
* 描画する(線)
*/
HIRSprite.DrawLine = function(ctx, elm)
{
ctx.save(); // コンテキスト状態を保存
// パス設定
ctx.beginPath();
ctx.moveTo(elm.mX, elm.mY);
ctx.lineTo(elm.mX2, elm.mY2);
ctx.closePath();
// 描画
ctx.strokeStyle = HIRColor.CSS(elm.mColor);
ctx.lineWidth = elm.mWidth;
ctx.stroke();
ctx.restore(); // コンテキスト状態を復帰
}
/**
* 描画する(矩形)
*/
HIRSprite.DrawRect = function(ctx, elm)
{
var width_half = elm.mWidth >> 1;
var height_half = elm.mHeight >> 1;
ctx.save(); // コンテキスト状態を保存
if(elm.mIsAffine)
{
// アフィン変換
ctx.translate(elm.mX + width_half, elm.mY + height_half);
ctx.rotate(HIRCommon.Deg2Rad(elm.mRotate)) ;
ctx.scale(elm.mScaleX, elm.mScaleY) ;
ctx.translate(-(elm.mX + width_half), -(elm.mY + height_half));
}
// 描画
ctx.fillStyle = HIRColor.CSS(elm.mColor);
ctx.fillRect(elm.mX, elm.mY, elm.mWidth, elm.mHeight);
ctx.restore(); // コンテキスト状態を復帰
}
/**
* 描画する(円)
*/
HIRSprite.DrawArc = function(ctx, elm)
{
var radius_half = elm.mRadius >> 1;
ctx.save(); // コンテキスト状態を保存
if(elm.mIsAffine)
{
// アフィン変換
ctx.translate(elm.mX + radius_half, elm.mY + radius_half);
ctx.rotate(HIRCommon.Deg2Rad(elm.mRotate)) ;
ctx.scale(elm.mScaleX, elm.mScaleY) ;
ctx.translate(-(elm.mX + radius_half), -(elm.mY + radius_half));
}
// パス設定
ctx.beginPath();
ctx.arc(elm.mX + radius_half,
elm.mY + radius_half,
elm.mRadius,
0,
HIRCommon.Deg2Rad(360),
true);
ctx.closePath();
// 描画
ctx.fillStyle = HIRColor.CSS(elm.mColor);
ctx.fill();
ctx.restore(); // コンテキスト状態を復帰
}
/**
* 描画する(画像)
*/
HIRSprite.DrawImage = function(ctx, elm)
{
if(elm.mImage == null) return;
var width_half = elm.mWidth >> 1;
var height_half = elm.mHeight >> 1;
ctx.save(); // コンテキスト状態を保存
if(elm.mIsAffine)
{
// アフィン変換
ctx.translate(elm.mX + width_half, elm.mY + height_half);
ctx.rotate(HIRCommon.Deg2Rad(elm.mRotate)) ;
ctx.scale(elm.mScaleX, elm.mScaleY) ;
ctx.translate(-(elm.mX + width_half), -(elm.mY + height_half));
}
ctx.globalAlpha = elm.mColor.mAlpha;
ctx.drawImage(elm.mImage, elm.mX, elm.mY, elm.mWidth, elm.mHeight);
ctx.restore(); // コンテキスト状態を復帰
}
// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
// プロパティ
// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
/**
* スケール設定
*/
HIRSprite.ScaleSet = function(elm, scale)
{
elm.mScaleX = scale;
elm.mScaleY = scale;
}
|
;(function ($) {
"use strict";
$(document).ready(function () {
initCheckboxSwitch();
initConfirm();
initDatepicker();
initMultiselect();
initPagination();
initSubmit();
initToggleFilter();
initConfirmForm();
initPreventLosingFormData();
initCheckboxesWithSelectAction();
initFieldsEmpty();
initDisableAfterClick();
initNumberChooser();
initTypePagesCollection();
});
})(jQuery);
function initTypePagesCollection()
{
// Récupère le div qui contient la collection de modèle de pages
var collectionHolder = $('ul.type-pages');
// ajoute un lien de suppression à tous les éléments li de
// formulaires de modèle de page existants
collectionHolder.children('li').not('#type-page-add-link-li').each(function() {
addTypePageFormDeleteLink($(this));
});
// ajoute un lien « add a modèle de page »
var $addTypePageLink = $('#type-page-add-link');
var $newLinkLi = $addTypePageLink.parent('li');
//var $addTypePageLink = $('<div class="margin-top-30 margin-left-20" id="type-page-add-link"><a href="#" class="pull-left tip-top" title="Ajouter un modèle de page"><i class="fa fa-2x fa-plus-circle"></i></a><span class="add_link_label pull-left margin-left-10">Ajouter un modèle de page</span></div>');
//var $newLinkLi = $('<li></li>').append($addTypePageLink);
// ajoute l'ancre « ajouter un modèle de page » et li à la balise ul
//collectionHolder.append($newLinkLi);
$addTypePageLink.on('click', function(e) {
// empêche le lien de créer un « # » dans l'URL
e.preventDefault();
// ajoute un nouveau formulaire modèle de page (voir le prochain bloc de code)
addTypePageForm(collectionHolder, $newLinkLi);
});
updateTypePageCount();
}
function addTypePageForm(collectionHolder, $newLinkLi) {
// Récupère l'élément ayant l'attribut data-prototype comme expliqué plus tôt
var prototype = collectionHolder.attr('data-prototype');
// Remplace '__name__' dans le HTML du prototype par un nombre basé sur
// la longueur de la collection courante
var newForm = prototype.replace(/__name__/g, collectionHolder.children().length);
// Affiche le formulaire dans la page dans un li, avant le lien "ajouter un modèle de page"
var $newFormLi = $('<li></li>').append(newForm);
collectionHolder.children('li').each(function() {
var siblingSelectedValue = $(this).find('select').find('option:selected').val();
$newFormLi.find('select').find('option').each(function() {
if ($(this).val() === siblingSelectedValue) {
$(this).remove();
}
});
//$('#typePagesAvailable').html($newFormLi.find('select').find('option').length + " modèle(s) de page disponible(s)");
});
$newLinkLi.before($newFormLi);
// ajoute un lien de suppression au nouveau formulaire
addTypePageFormDeleteLink($newFormLi);
initNumberChooser();
multiselectLauncher($newFormLi.find('.multiselect'));
}
function addTypePageFormDeleteLink($typePageFormLi) {
var $removeFormA = $('<div class="col-xs-1 type-page-action pull-right"><a href="#" title="Supprimer ce modèle de page" class="pull-right tip-top"><i class="fa fa-2x fa-minus-circle"></i></a></div>');
$typePageFormLi.append($removeFormA);
$removeFormA.on('click', function(e) {
// empêche le lien de créer un « # » dans l'URL
e.preventDefault();
// supprime l'élément li pour le formulaire de modèle de page
$typePageFormLi.remove();
});
}
function updateTypePageCount() {
var collectionHolder = $('ul.type-pages');
var selectInitialCount = collectionHolder.children('li').first().find('select').find('option').length;
var selectedValues = [];
collectionHolder.children('li').each(function() {
var siblingSelectedValue = $(this).find('select').find('option:selected').val();
$(this).find('select').find('option').each(function() {
if ($.inArray($(this).val(), selectedValues) >= 0) {
$parent = $(this).parent('select');
$(this).remove();
$parent.multiselect('rebuild');
}
});
selectedValues.push(siblingSelectedValue);
if (null !== siblingSelectedValue) {
selectInitialCount = selectInitialCount - 1;
}
//$('#typePagesAvailable').html((selectInitialCount - 1) + " modèle(s) de page disponible(s)");
});
}
function initNumberChooser()
{
$('.spinner .btn.btn-up').on('click', function() {
var $input = $(this).parent('.input-group').parent('.spinner').find('input');
$input.val( parseInt($input.val(), 10) + 1);
});
$('.spinner .btn.btn-down').on('click', function() {
var $input = $(this).parent('.input-group').parent('.spinner').find('input');
var $newCount = parseInt($input.val(), 10) - 1;
$input.val( ($newCount >= 0 ) ? $newCount : 0);
});
}
function initFieldsEmpty()
{
$('span.input-group-addon').next('input, textarea').keyup(function() {
if($(this).val() == '') {
$(this).prev('span.input-group-addon').find('.fa-pencil').addClass('color-red');
$(this).prev('span.input-group-addon').find('.fa-pencil').removeClass('color-blue');
} else {
$(this).prev('span.input-group-addon').find('.fa-pencil').addClass('color-blue');
$(this).prev('span.input-group-addon').find('.fa-pencil').removeClass('color-red');
}
});
}
function initConfirm()
{
$('a[data-confirm]').click(function() {
var href = $(this).attr('href') + '?modal-confirm=1';
$('#popin-confirm').find('.modal-body').html($(this).attr('data-confirm'));
$('#dataConfirmOK').attr('href', href);
$('#popin-confirm').modal({
show: true,
backdrop: 'static'
});
return false;
});
}
function initSubmit()
{
$('[data-submit]').click(function() {
$('#'+ $(this).data('submit')).submit();
});
}
function initCheckboxesWithSelectAction() {
initSelectConfirm();
initRangeCheckboxes();
}
function initSelectConfirm()
{
$('select[data-confirm]').change(function(e) {
if($(this).val() == -1) {
return false;
}
var href = $(this).attr('href') + '?modal-confirm=1';
$('#popin-confirm').find('.modal-body').html($(this).attr('data-confirm'));
if($(this).attr('data-url') && $(this).attr('data-elements')) {
if($(this).attr('data-elements') && $("input[type='checkbox'][name='" +$(this).attr('data-elements')+ "']:checked").length == 0) {
$('#popin-confirm').find('.modal-body').html("Vous devez au moins sélectionner un élément.");
$('#popin-confirm').find('.modal-footer').hide();
$('#popin-confirm').modal({
show: true,
backdrop: 'static'
});
return false;
}
$('#popin-confirm').find('.modal-footer').show();
var dataElements = $(this).attr('data-elements');
var dataStatus = $(this).val();
var element = $(this);
$('#dataConfirmOK').on('click', function(ev) {
selectAction(element, dataElements, dataStatus);
});
}
$('#popin-confirm').modal({
show: true,
backdrop: 'static'
});
return false;
});
}
function initRangeCheckboxes()
{
$('input[type="checkbox"][id="checkboxes-toggle-checkbox-all"], input[type="checkbox"][id="checkboxes-toggle-checkbox-page"]').change(function(e) {
var checkboxesParent = $(this).attr('data-id');
//toggle the checkbox
if($(this).is(':checked')) {
//if previous checked, uncheck them all
$('#' + checkboxesParent).checkboxes('uncheck');
//toggler uncheck
$(this).prop('checked',false);
} else {
//if previous checked, check them all
$('#' + checkboxesParent).checkboxes('check');
//toggler check
$(this).prop('checked',true);
}
//toggle
$('#' + checkboxesParent).checkboxes('toggle');
});
}
function initConfirmForm()
{
var modals = $('.modal.form-confirm');
modals.each(function() {
var modal = $(this),
inputs = modal.find('input[required]'),
submit = modal.find('button[type="submit"]');
modals.on('hide.bs.modal', function() {
$(this).find('form')[0].reset();
})
if (!allElementsAreNotEmpty(inputs)) {
submit.attr('disabled', 'disabled');
}
inputs.on('keyup change', function() {
if (!allElementsAreNotEmpty(inputs)) {
submit.attr('disabled', 'disabled');
} else {
submit.removeAttr('disabled');
}
});
});
}
function initPreventLosingFormData()
{
var forms = $('[data-form-notification-exit="true"]');
forms.each(function() {
var form = $(this);
var initialForm = form.serialize();
$('input, textarea, select', form).blur(function() {
var currentForm = form.serialize();
window.onbeforeunload = initialForm !== currentForm ? function() {
return 'Des données ont été saisies.';
} : null;
});
form.submit(function(){
window.onbeforeunload = null;
});
});
}
function allElementsAreNotEmpty(elements)
{
var counter = 0;
elements.each(function(event) {
if ("" == $(this).val()) {
return;
}
counter++;
});
if (counter !== elements.length) {
return false;
}
return true;
}
function initPagination()
{
$('.pagination a').on('click', function(e) {
if ($('.list-filter')[0]) {
if ($('.list-filter').is(':hidden')) {
window.location = $(this).attr('href') + '&filter_close';
} else {
window.location = $(this).attr('href').replace('filter_close', '');
}
return false;
}
});
}
function initDatepicker()
{
$('.date_picker').datepicker({
format: 'dd/mm/yyyy',
language: 'fr-FR',
startDate: '-3d'
});
}
function initMultiselect()
{
$.each($('.multiselect'), function() {
multiselectLauncher($(this));
});
}
function multiselectLauncher(element)
{
var multiselectFiltering = element.attr('data-multiselect-no-filtering') ? false : true;
element.multiselect({
maxHeight: 200,
buttonClass: 'btn tip-top',
enableFiltering: multiselectFiltering,
enableCaseInsensitiveFiltering: multiselectFiltering,
filterPlaceholder: 'Rechercher...',
allSelectedText: $(this).attr('data-multiselect-all-selected-text') ? $(this).attr('data-multiselect-all-selected-text') : '',
nonSelectedText: $(this).attr('data-multiselect-non-selected-text') ? $(this).attr('data-multiselect-non-selected-text') : '',
onChange: function(option, checked, select) {
$(this.$button).tooltip('fixTitle');
}
});
}
function initToggleFilter() {
var filter_button = '.filter-button',
filter_block = '.list-filter';
if($(filter_block).length > 0) {
$(filter_button).on('click', function(evt) {
var $toggler = $(this);
if ($toggler.hasClass('btn-default')) {
$(filter_block).slideUp(250, function(){
$toggler.removeClass('btn-default').addClass('btn-primary');
window.scrollTo(0, 0);
});
} else {
$(filter_block).slideDown(250, function(){
$toggler.removeClass('btn-primary').addClass('btn-default');
window.scrollTo(0, 0);
});
}
});
}
}
function initCheckboxSwitch() {
$("[data-action='checkbox-switch']").each(function() {
var checkboxSwitch = $(this),
callback = $(this).data('callback'),
readOnly = undefined !== $(this).data('read-only') ? $(this).data('read-only') : false;
checkboxSwitch.bootstrapSwitch({
size: "mini",
onColor: "success",
offColor: "danger",
onText: " ",
offText: " ",
readonly: readOnly,
onSwitchChange: function(event, state) {
if (undefined !== callback) {
event.preventDefault();
window[callback](checkboxSwitch, state);
}
},
onInit: function(event, state) {
checkboxSwitch.closest('.bootstrap-switch')
.attr('title', checkboxSwitch.attr('title'))
.tooltip({'placement': 'bottom'})
.parents('label').css('padding-left', '0px')
;
}
});
});
}
function initDisableAfterClick()
{
var els = $('[data-disable-after-click="true"]');
els.each(function() {
$(this).on('click', function() {
$(this).addClass('disabled').siblings('[data-disable-after-click="true"]').addClass('disabled');
});
});
}
/**
* Forms tools
*/
function strip(html)
{
var tmp = document.createElement("DIV");
tmp.innerHTML = html;
return tmp.textContent || tmp.innerText || "";
}
function confirmLoadToggle(block)
{
if (block) {
$('#confirm-overlay').fadeIn();
} else {
if ($('.modal-dialog')) {
$('#confirm-overlay').fadeOut();
}
if($('#popin-confirm')) {
$('#popin-confirm').modal('hide');
}
}
}
function ajaxActivationAction(elem, callParams, messages, status) {
$('body').css('cursor', 'wait');
$.ajax({
method : 'POST',
url : elem.attr('data-url'),
data : callParams
}).done(function (data) {
$('body').css('cursor', 'auto');
confirmLoadToggle(false);
if (data.success) {
if(data.callback) {
window.location.href=data.callback;
} else {
var success_message = "";
if(status) {
success_message = messages['activated']
} else {
success_message = messages['disabled']
}
$('#message-wrapper .alert-success').remove();
$('#message-wrapper .alert-danger').remove();
$('#message-wrapper').prepend('<div class="alert alert-success"></div>');
$('#message-wrapper .alert-success').html(success_message);
}
} else {
if(data.locked) {
$('#message-wrapper .alert-success').remove();
$('#message-wrapper .alert-danger').remove();
$('#message-wrapper').prepend('<div class="alert alert-danger"><button type="button" class="close" data-dismiss="alert">×</button><span class="message-box"></span></div>');
$('#message-wrapper .alert-danger .message-box').html(messages['locked']);
} else {
$('#message-wrapper .alert-success').remove();
$('#message-wrapper .alert-danger').remove();
$('#message-wrapper').prepend('<div class="alert alert-danger"></div>');
$('#message-wrapper .alert-danger').html(messages['no-action']);
}
}
}).fail(function (jqXHR, textStatus, errorThrown) {
confirmLoadToggle(false);
$('body').css('cursor', 'auto');
$('#message-wrapper .alert-success').remove();
$('#message-wrapper .alert-danger').remove();
$('#message-wrapper').prepend('<div class="alert alert-danger">La requête a échoué.</div>');
//$('#message-wrapper .alert-danger').html(data);
console.log('Ajax erreur : ' + jqXHR + ' ' + textStatus + ' ' + errorThrown);
});
}
function selectAction(elem, checkboxesName, status) {
confirmLoadToggle(true);
var messages = {
'activated': Translator.trans('messages.change_status_success', {}, 'back_contracts'),
'disabled': Translator.trans('messages.change_status_failed', {}, 'back_contracts')
}
if($("#checkboxes-toggle-checkbox-all").is(':checked')) {
var callParams = {'contracts': 'all', 'status': status}
} else {
var checked = [];
$("input[type='checkbox'][name='" +checkboxesName+ "']:checked").each(function() {
checked.push(parseInt($(this).val()))
});
var callParams = {'contracts': checked, 'status': status}
}
if(callParams['contracts'].length > 0) {
ajaxActivationAction(elem, callParams, messages, status);
} else {
window.location.reload(true);
}
}
/**
* Actions for users list
*/
function changeUserActivationStatus(elem, status) {
var callParams = {'active': status}
var messages = {
'activated': Translator.trans('messages.user_activated', {}, 'back_users'),
'disabled': Translator.trans('messages.user_disabled', {}, 'back_users'),
'no-action': Translator.trans('messages.no_action', {}, 'back'),
'locked': Translator.trans('messages.locked', {}, 'back')
}
ajaxActivationAction(elem, callParams, messages, status);
}
function changeUserReceivemailStatus(elem, status) {
var callParams = {'receiveMail': status}
var messages = {
'activated': Translator.trans('messages.user_receiveMail_activated', {}, 'back_users'),
'disabled': Translator.trans('messages.user_receiveMail_disabled', {}, 'back_users'),
'no-action': Translator.trans('messages.no_action', {}, 'back'),
'locked': Translator.trans('messages.locked', {}, 'back')
}
ajaxActivationAction(elem, callParams, messages, status);
}
/**
* Actions for contract-sets list
*/
function changeContractSetZoneActivationStatus(elem, status) {
var data_zone = ""
if(elem.attr('data-zone-slug')) {
if("draft" == elem.attr('data-zone-slug')) {
data_zone = "Brouillon";
}
else if("demo" == elem.attr('data-zone-slug')) {
data_zone = "Démo";
}
else if("publish" == elem.attr('data-zone-slug')) {
data_zone = "Publié";
}
}
var callParams = {'status': status}
var messages = {
'activated': Translator.trans('messages.contract_set_zone_activated', {'zone_name':data_zone}, 'back_contracts_sets'),
'disabled': Translator.trans('messages.contract_set_zone_disabled', {'zone_name':data_zone}, 'back_contracts_sets'),
'no-action': Translator.trans('messages.no_action', {}, 'back'),
'locked': Translator.trans('messages.locked', {}, 'back')
}
ajaxActivationAction(elem, callParams, messages, status);
}
|
/**
* @module store
*/
import store from 'store/InitialStore'
/**
* Testing utilities
*/
import { expect } from 'chai'
describe('Store - actual store', () => {
it('it should exist', () => {
expect(store).to.exist
})
it('it should be an object', () => {
expect(store).to.be.an('object')
})
})
|
// Copyright (c) 2014 Readium Foundation and/or its licensees. All rights reserved.
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
// 1. Redistributions of source code must retain the above copyright notice, this
// list of conditions and the following disclaimer.
// 2. Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation and/or
// other materials provided with the distribution.
// 3. Neither the name of the organization nor the names of its contributors may be
// used to endorse or promote products derived from this software without specific
// prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
// IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
// INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
// LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
// OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
// OF THE POSSIBILITY OF SUCH DAMAGE
package org.readium.sdk.android.launcher;
import java.util.Arrays;
import org.readium.sdk.android.Container;
import org.readium.sdk.android.EPub3;
import org.readium.sdk.android.launcher.model.BookmarkDatabase;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.os.Bundle;
import android.view.View;
import android.widget.AdapterView;
import android.widget.ListView;
public class BookDataActivity extends Activity {
private Context context;
private Container container;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.book_data);
context = this;
Intent intent = getIntent();
if (intent.getFlags() == Intent.FLAG_ACTIVITY_NEW_TASK) {
Bundle extras = intent.getExtras();
if (extras != null) {
String value = extras.getString(Constants.BOOK_NAME);
getActionBar().setTitle(value);
container = ContainerHolder.getInstance().get(extras.getLong(Constants.CONTAINER_ID));
if (container == null) {
finish();
return;
}
}
}
initMetadata();
initPageList();
initBookmark();
}
private void initBookmark() {
int number = BookmarkDatabase.getInstance().getBookmarks(container.getName()).size();
final ListView bookmark = (ListView) findViewById(R.id.bookmark);
String bookmarks = "Bookmarks (" + number + ")";
String[] bookmark_values = new String[] { bookmarks };
Class<?>[] classList = new Class<?>[] { BookmarksActivity.class };
this.setListViewContent(bookmark, bookmark_values, classList);
}
private void initPageList() {
final ListView pageList = (ListView) findViewById(R.id.pageList);
String[] pageList_values = new String[] {
getString(R.string.list_of_figures),
getString(R.string.list_of_illustrations),
getString(R.string.list_of_tables),
getString(R.string.page_list),
getString(R.string.table_of_contents) };
Class<?>[] classList = new Class<?>[] {
ListOfFiguresActivity.class,
ListOfIllustrationsActivity.class,
ListOfTablesActivity.class,
PageListActivity.class,
TableOfContentsActivity.class };
this.setListViewContent(pageList, pageList_values, classList);
}
private void initMetadata() {
final ListView metadata = (ListView) findViewById(R.id.metaData);
String[] metadata_values = new String[] {
getString(R.string.metadata),
getString(R.string.spine_items) };
Class<?>[] classList = new Class<?>[] {
MetaDataActivity.class,
SpineItemsActivity.class };
this.setListViewContent(metadata, metadata_values, classList);
}
private void setListViewContent(ListView view, String[] stringArray,final Class<?>[] classes) {
BookListAdapter bookListAdapter = new BookListAdapter(this, Arrays.asList(stringArray));
view.setAdapter(bookListAdapter);
view.setOnItemClickListener(new ListView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> arg0, View arg1, int arg2,
long arg3) {
Intent intent = new Intent(context, classes[arg2]);
intent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
intent.putExtra(Constants.BOOK_NAME, container.getName());
intent.putExtra(Constants.CONTAINER_ID, container.getNativePtr());
startActivity(intent);
}
});
}
@Override
protected void onResume() {
super.onResume();
initBookmark();
}
@Override
public void onBackPressed() {
super.onBackPressed();
if (container != null) {
ContainerHolder.getInstance().remove(container.getNativePtr());
// Close book (need to figure out if this is the best place...)
EPub3.closeBook(container);
}
}
}
|
#!/bin/bash
set -eu
cur=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
source $cur/../_utils/test_prepare
WORK_DIR=$TEST_DIR/$TEST_NAME
API_VERSION="v1alpha1"
WORKER1="worker1"
WORKER2="worker2"
WORKER3="worker3"
function test_worker_restart() {
echo "test worker restart"
# worker1 offline
kill_process dm-worker1
check_port_offline $WORKER1_PORT 20
# source1 bound to worker3
run_dm_ctl_with_retry $WORK_DIR "127.0.0.1:$MASTER_PORT" \
"list-member -w -n worker3" \
"\"stage\": \"bound\"" 1 \
"\"source\": \"mysql-replica-01\"" 1
run_dm_ctl_with_retry $WORK_DIR "127.0.0.1:$MASTER_PORT" \
"list-member -w -n worker1" \
"\"stage\": \"offline\"" 1
run_dm_ctl_with_retry $WORK_DIR "127.0.0.1:$MASTER_PORT" \
"query-status load_task1" \
"different worker in load stage, previous worker: $WORKER1, current worker: $WORKER3" 1 \
"Please check if the previous worker is online." 1
# worker1 online
export GO_FAILPOINTS="github.com/pingcap/tiflow/dm/loader/LoadDataSlowDownByTask=return(\"load_task1\")"
run_dm_worker $WORK_DIR/worker1 $WORKER1_PORT $cur/conf/dm-worker1.toml
check_rpc_alive $cur/../bin/check_worker_online 127.0.0.1:$WORKER1_PORT
# transfer to worker1
check_log_contain_with_retry 'transfer source and worker.*worker1.*worker3.*mysql-replica-01' $WORK_DIR/master/log/dm-master.log
run_dm_ctl_with_retry $WORK_DIR "127.0.0.1:$MASTER_PORT" \
"list-member -w -n worker3" \
"\"stage\": \"free\"" 1
run_dm_ctl_with_retry $WORK_DIR "127.0.0.1:$MASTER_PORT" \
"list-member -w -n worker1" \
"\"stage\": \"bound\"" 1 \
"\"source\": \"mysql-replica-01\"" 1
run_dm_ctl_with_retry $WORK_DIR "127.0.0.1:$MASTER_PORT" \
"query-status load_task1" \
"\"unit\": \"Load\"" 1 \
"\"unit\": \"Sync\"" 1
run_dm_ctl_with_retry $WORK_DIR "127.0.0.1:$MASTER_PORT" \
"query-status load_task2" \
"\"unit\": \"Load\"" 1 \
"\"unit\": \"Sync\"" 1
}
# almost never happen since user hardly start a load task after another load task failed.
function test_transfer_two_sources() {
echo "test_transfer_two_sources"
# worker2 offline
kill_process dm-worker2
check_port_offline $WORKER2_PORT 20
# source2 bound to worker3
run_dm_ctl_with_retry $WORK_DIR "127.0.0.1:$MASTER_PORT" \
"list-member -w -n worker3" \
"\"stage\": \"bound\"" 1 \
"\"source\": \"mysql-replica-02\"" 1
run_dm_ctl_with_retry $WORK_DIR "127.0.0.1:$MASTER_PORT" \
"query-status load_task2" \
"different worker in load stage, previous worker: $WORKER2, current worker: $WORKER3" 1
# start load task for worker3
run_dm_ctl_with_retry $WORK_DIR "127.0.0.1:$MASTER_PORT" \
"start-task $cur/conf/dm-task3.yaml --remove-meta" \
"\"result\": true" 2
run_dm_ctl_with_retry $WORK_DIR "127.0.0.1:$MASTER_PORT" \
"query-status load_task3" \
"\"unit\": \"Load\"" 1
# worker2 online
export GO_FAILPOINTS="github.com/pingcap/tiflow/dm/loader/LoadDataSlowDown=sleep(15000)"
run_dm_worker $WORK_DIR/worker2 $WORKER2_PORT $cur/conf/dm-worker2.toml
check_rpc_alive $cur/../bin/check_worker_online 127.0.0.1:$WORKER2_PORT
# worker2 free since (worker3, source2) has load task(load_task3)
run_dm_ctl_with_retry $WORK_DIR "127.0.0.1:$MASTER_PORT" \
"list-member -w -n worker2" \
"\"stage\": \"free\"" 1
# worker1 offline
kill_process dm-worker1
check_port_offline $WORKER1_PORT 20
# source1 bound to worker2
run_dm_ctl_with_retry $WORK_DIR "127.0.0.1:$MASTER_PORT" \
"list-member -w -n worker2" \
"\"stage\": \"bound\"" 1 \
"\"source\": \"mysql-replica-01\"" 1
# start load_task4 on worker2
run_dm_ctl_with_retry $WORK_DIR "127.0.0.1:$MASTER_PORT" \
"start-task $cur/conf/dm-task4.yaml --remove-meta" \
"\"result\": true" 2
run_dm_ctl_with_retry $WORK_DIR "127.0.0.1:$MASTER_PORT" \
"query-status load_task4" \
"\"unit\": \"Load\"" 1
# worker1 online
export GO_FAILPOINTS=""
run_dm_worker $WORK_DIR/worker1 $WORKER1_PORT $cur/conf/dm-worker1.toml
check_rpc_alive $cur/../bin/check_worker_online 127.0.0.1:$WORKER1_PORT
# worker1 free since (worker2, source1) has load task(load_task4)
run_dm_ctl_with_retry $WORK_DIR "127.0.0.1:$MASTER_PORT" \
"list-member -w -n worker1" \
"\"stage\": \"free\"" 1
# now, worker2 waiting worker3 finish load_task3, worker1 waiting worker2 finish load_task4
# worker3 offline
kill_process dm-worker3
check_port_offline $WORKER3_PORT 20
# source2 bound to worker1
run_dm_ctl_with_retry $WORK_DIR "127.0.0.1:$MASTER_PORT" \
"list-member -w -n worker1" \
"\"stage\": \"bound\"" 1 \
"\"source\": \"mysql-replica-02\"" 1
# (worker1, source2), (worker2, source1)
run_dm_ctl_with_retry $WORK_DIR "127.0.0.1:$MASTER_PORT" \
"query-status load_task1" \
"different worker in load stage, previous worker: $WORKER1, current worker: $WORKER2" 1
run_dm_ctl_with_retry $WORK_DIR "127.0.0.1:$MASTER_PORT" \
"query-status load_task2" \
"different worker in load stage, previous worker: $WORKER2, current worker: $WORKER1" 1
# worker2 finish load_task4
# master transfer (worker1, source2), (worker2, source1) to (worker1, source1), (worker2, source2)
run_dm_ctl_with_retry $WORK_DIR "127.0.0.1:$MASTER_PORT" \
"list-member -w -n worker1" \
"\"stage\": \"bound\"" 1 \
"\"source\": \"mysql-replica-01\"" 1
run_dm_ctl_with_retry $WORK_DIR "127.0.0.1:$MASTER_PORT" \
"list-member -w -n worker2" \
"\"stage\": \"bound\"" 1 \
"\"source\": \"mysql-replica-02\"" 1
# task1, 2, 4 running, task3 fail
run_dm_ctl_with_retry $WORK_DIR "127.0.0.1:$MASTER_PORT" \
"query-status" \
"\"taskStatus\": \"Running\"" 3 \
"taskStatus.*Error" 1
# worker3 online
export GO_FAILPOINTS=""
run_dm_worker $WORK_DIR/worker3 $WORKER3_PORT $cur/conf/dm-worker3.toml
check_rpc_alive $cur/../bin/check_worker_online 127.0.0.1:$WORKER3_PORT
# source2 bound to worker3 since load_task3
run_dm_ctl_with_retry $WORK_DIR "127.0.0.1:$MASTER_PORT" \
"list-member -w -n worker2" \
"\"stage\": \"bound\"" 1 \
"\"source\": \"mysql-replica-02\"" 1
# all task running
run_dm_ctl_with_retry $WORK_DIR "127.0.0.1:$MASTER_PORT" \
"query-status" \
"\"taskStatus\": \"Running\"" 4
}
function stop_task_left_load() {
echo "start DM master, workers and sources"
run_dm_master $WORK_DIR/master $MASTER_PORT1 $cur/conf/dm-master.toml
check_rpc_alive $cur/../bin/check_master_online 127.0.0.1:$MASTER_PORT1
export GO_FAILPOINTS="github.com/pingcap/tiflow/dm/loader/LoadDataSlowDownByTask=return(\"load_task1\")"
run_dm_worker $WORK_DIR/worker1 $WORKER1_PORT $cur/conf/dm-worker1.toml
check_rpc_alive $cur/../bin/check_worker_online 127.0.0.1:$WORKER1_PORT
cp $cur/conf/source1.yaml $WORK_DIR/source1.yaml
sed -i "/relay-binlog-name/i\relay-dir: $WORK_DIR/worker1/relay_log" $WORK_DIR/source1.yaml
dmctl_operate_source create $WORK_DIR/source1.yaml $SOURCE_ID1
dmctl_start_task_standalone "$cur/conf/dm-task-standalone.yaml" "--remove-meta"
export GO_FAILPOINTS=""
run_dm_worker $WORK_DIR/worker2 $WORKER2_PORT $cur/conf/dm-worker2.toml
check_rpc_alive $cur/../bin/check_worker_online 127.0.0.1:$WORKER2_PORT
# kill worker1, load_task1 will be transferred to worker2, but lack local files
kill_process dm-worker1
check_port_offline $WORKER1_PORT 20
run_dm_ctl_with_retry $WORK_DIR "127.0.0.1:$MASTER_PORT" \
"query-status load_task1" \
"different worker in load stage, previous worker: worker1, current worker: worker2" 1
# now stop this task without clean meta (left a load_task KV in etcd)
run_dm_ctl $WORK_DIR "127.0.0.1:$MASTER_PORT" \
"stop-task load_task1" \
"\"result\": true" 2
dmctl_start_task_standalone "$cur/conf/dm-task2-standalone.yaml" "--remove-meta"
run_dm_ctl_with_retry $WORK_DIR "127.0.0.1:$MASTER_PORT" \
"query-status load_task2" \
"\"unit\": \"Sync\"" 1
# after worker1 goes online, although it has unfinished load_task1, but load_task1 is stopped so should not rebound
export GO_FAILPOINTS="github.com/pingcap/tiflow/dm/loader/LoadDataSlowDownByTask=return(\"load_task1\")"
run_dm_worker $WORK_DIR/worker1 $WORKER1_PORT $cur/conf/dm-worker1.toml
check_rpc_alive $cur/../bin/check_worker_online 127.0.0.1:$WORKER1_PORT
run_dm_ctl_with_retry $WORK_DIR "127.0.0.1:$MASTER_PORT" \
"list-member --name worker1" \
"\"source\": \"\"" 1
# start-task again, expect the source is auto transferred back
run_dm_ctl $WORK_DIR "127.0.0.1:$MASTER_PORT" \
"start-task $cur/conf/dm-task-standalone.yaml"
run_dm_ctl_with_retry $WORK_DIR "127.0.0.1:$MASTER_PORT" \
"list-member --name worker1" \
"\"source\": \"mysql-replica-01\"" 1
# repeat again and check start-task --remove-meta will not cause transfer
run_dm_ctl $WORK_DIR "127.0.0.1:$MASTER_PORT" \
"stop-task load_task1" \
"\"result\": true" 2
kill_process dm-worker1
check_port_offline $WORKER1_PORT 20
run_dm_ctl_with_retry $WORK_DIR "127.0.0.1:$MASTER_PORT" \
"query-status load_task2" \
"\"unit\": \"Sync\"" 1
run_dm_worker $WORK_DIR/worker1 $WORKER1_PORT $cur/conf/dm-worker1.toml
check_rpc_alive $cur/../bin/check_worker_online 127.0.0.1:$WORKER1_PORT
dmctl_start_task_standalone "$cur/conf/dm-task-standalone.yaml" "--remove-meta"
run_dm_ctl_with_retry $WORK_DIR "127.0.0.1:$MASTER_PORT" \
"query-status load_task1" \
"\"unit\": \"Sync\"" 1
run_dm_ctl_with_retry $WORK_DIR "127.0.0.1:$MASTER_PORT" \
"list-member --name worker1" \
"\"source\": \"\"" 1
cleanup_process $*
cleanup_data load_task1
cleanup_data load_task2
}
function run() {
echo "import prepare data"
run_sql_file $cur/data/db1.prepare.sql $MYSQL_HOST1 $MYSQL_PORT1 $MYSQL_PASSWORD1
check_contains 'Query OK, 2 rows affected'
run_sql_file $cur/data/db2.prepare.sql $MYSQL_HOST2 $MYSQL_PORT2 $MYSQL_PASSWORD2
check_contains 'Query OK, 3 rows affected'
stop_task_left_load
echo "start DM master, workers and sources"
run_dm_master $WORK_DIR/master $MASTER_PORT1 $cur/conf/dm-master.toml
check_rpc_alive $cur/../bin/check_master_online 127.0.0.1:$MASTER_PORT1
# worker1 loading load_task1
export GO_FAILPOINTS="github.com/pingcap/tiflow/dm/loader/LoadDataSlowDownByTask=return(\"load_task1\")"
run_dm_worker $WORK_DIR/worker1 $WORKER1_PORT $cur/conf/dm-worker1.toml
check_rpc_alive $cur/../bin/check_worker_online 127.0.0.1:$WORKER1_PORT
cp $cur/conf/source1.yaml $WORK_DIR/source1.yaml
sed -i "/relay-binlog-name/i\relay-dir: $WORK_DIR/worker1/relay_log" $WORK_DIR/source1.yaml
dmctl_operate_source create $WORK_DIR/source1.yaml $SOURCE_ID1
# worker2 loading load_task2
export GO_FAILPOINTS="github.com/pingcap/tiflow/dm/loader/LoadDataSlowDownByTask=return(\"load_task2\")"
run_dm_worker $WORK_DIR/worker2 $WORKER2_PORT $cur/conf/dm-worker2.toml
check_rpc_alive $cur/../bin/check_worker_online 127.0.0.1:$WORKER2_PORT
cp $cur/conf/source2.yaml $WORK_DIR/source2.yaml
sed -i "/relay-binlog-name/i\relay-dir: $WORK_DIR/worker2/relay_log" $WORK_DIR/source2.yaml
dmctl_operate_source create $WORK_DIR/source2.yaml $SOURCE_ID2
# worker3 loading load_task3
export GO_FAILPOINTS="github.com/pingcap/tiflow/dm/loader/LoadDataSlowDownByTask=return(\"load_task3\")"
run_dm_worker $WORK_DIR/worker3 $WORKER3_PORT $cur/conf/dm-worker3.toml
check_rpc_alive $cur/../bin/check_worker_online 127.0.0.1:$WORKER3_PORT
echo "start DM task"
dmctl_start_task "$cur/conf/dm-task.yaml" "--remove-meta"
dmctl_start_task "$cur/conf/dm-task2.yaml" "--remove-meta"
check_log_contain_with_retry 'inject failpoint LoadDataSlowDownByTask' $WORK_DIR/worker1/log/dm-worker.log
check_log_contain_with_retry 'inject failpoint LoadDataSlowDownByTask' $WORK_DIR/worker2/log/dm-worker.log
run_dm_ctl_with_retry $WORK_DIR "127.0.0.1:$MASTER_PORT" \
"query-status load_task1" \
"\"unit\": \"Load\"" 1 \
"\"unit\": \"Sync\"" 1
run_dm_ctl_with_retry $WORK_DIR "127.0.0.1:$MASTER_PORT" \
"query-status load_task2" \
"\"unit\": \"Load\"" 1 \
"\"unit\": \"Sync\"" 1
test_worker_restart
test_transfer_two_sources
run_sql_file $cur/data/db1.increment.sql $MYSQL_HOST1 $MYSQL_PORT1 $MYSQL_PASSWORD1
run_sql_file $cur/data/db2.increment.sql $MYSQL_HOST2 $MYSQL_PORT2 $MYSQL_PASSWORD2
check_sync_diff $WORK_DIR $cur/conf/diff_config1.toml
check_sync_diff $WORK_DIR $cur/conf/diff_config2.toml
check_sync_diff $WORK_DIR $cur/conf/diff_config3.toml
check_sync_diff $WORK_DIR $cur/conf/diff_config4.toml
}
cleanup_data load_task1
cleanup_data load_task2
cleanup_data load_task3
cleanup_data load_task4
# also cleanup dm processes in case of last run failed
cleanup_process $*
run $*
cleanup_process $*
echo "[$(date)] <<<<<< test case $TEST_NAME success! >>>>>>"
|
class EquationOfState:
def pressure(self, density, internal_energy):
# Calculate and return the pressure based on the equation of state
# Example equation of state calculation (replace with actual equation):
pressure = density * internal_energy # Example equation, replace with actual equation
return pressure
def internal_energy(self, density, pressure):
# Calculate and return the internal energy based on the equation of state
# Example equation of state calculation (replace with actual equation):
internal_energy = pressure / density # Example equation, replace with actual equation
return internal_energy
# Initialize an instance of the EquationOfState class
n = EquationOfState()
# Call the pressure method with rho0 and u0 as parameters
rho0 = 10 # Example density
u0 = 20 # Example internal energy
resulting_pressure = n.pressure(rho0, u0)
print(resulting_pressure) # Output the calculated pressure
del n |
<reponame>zastari/tropycal<gh_stars>0
r"""Functionality for storing and analyzing a year/season of cyclones."""
import calendar
import numpy as np
import pandas as pd
import re
import scipy.interpolate as interp
import urllib
import warnings
from datetime import datetime as dt,timedelta
from .plot import TrackPlot
from .tools import *
class Season:
r"""
Initializes an instance of Season, retrieved via ``TrackDataset.get_season()``.
Parameters
----------
season : dict
Dict entry containing all storms within the requested season.
info : dict
Dict entry containing general information about the season.
Returns
-------
Season
Instance of a Season object.
"""
def __setitem__(self, key, value):
self.__dict__[key] = value
def __getitem__(self, key):
return self.__dict__[key]
def __repr__(self):
#Label object
summary = ["<tropycal.tracks.Season>"]
#Format keys for summary
season_summary = self.annual_summary()
summary_keys = {'Total Storms':season_summary['season_storms'],
'Named Storms':season_summary['season_named'],
'Hurricanes':season_summary['season_hurricane'],
'Major Hurricanes':season_summary['season_major'],
'Season ACE':season_summary['season_ace']}
#Add season summary
summary.append("Season Summary:")
add_space = np.max([len(key) for key in summary_keys.keys()])+3
for key in summary_keys.keys():
key_name = key+":"
val = '%0.1f'%(summary_keys[key]) if key == 'Season ACE' else summary_keys[key]
summary.append(f'{" "*4}{key_name:<{add_space}}{val}')
#Add additional information
summary.append("\nMore Information:")
add_space = np.max([len(key) for key in self.coords.keys()])+3
for key in self.coords.keys():
key_name = key+":"
val = '%0.1f'%(self.coords[key]) if key == 'ace' else self.coords[key]
summary.append(f'{" "*4}{key_name:<{add_space}}{val}')
return "\n".join(summary)
def __init__(self,season,info):
#Save the dict entry of the season
self.dict = season
#Add other attributes about the storm
keys = info.keys()
self.coords = {}
for key in keys:
if isinstance(info[key], list) == False and isinstance(info[key], dict) == False:
self[key] = info[key]
self.coords[key] = info[key]
def to_dataframe(self):
r"""
Converts the season dict into a pandas DataFrame object.
Returns
-------
`pandas.DataFrame`
A pandas DataFrame object containing information about the season.
"""
#Try importing pandas
try:
import pandas as pd
except ImportError as e:
raise RuntimeError("Error: pandas is not available. Install pandas in order to use this function.") from e
#Get season info
season_info = self.annual_summary()
season_info_keys = season_info['id']
#Set up empty dict for dataframe
ds = {'id':[],'name':[],'vmax':[],'mslp':[],'category':[],'ace':[],'start_time':[],'end_time':[]}
#Add every key containing a list into the dict
keys = [k for k in self.dict.keys()]
for key in keys:
if key in season_info_keys:
sidx = season_info_keys.index(key)
ds['id'].append(key)
ds['name'].append(self.dict[key]['name'])
ds['vmax'].append(season_info['max_wspd'][sidx])
ds['mslp'].append(season_info['min_mslp'][sidx])
ds['category'].append(season_info['category'][sidx])
ds['start_time'].append(self.dict[key]['date'][0])
ds['end_time'].append(self.dict[key]['date'][-1])
ds['ace'].append(np.round(season_info['ace'][sidx],1))
#Convert entire dict to a DataFrame
ds = pd.DataFrame(ds)
#Return dataset
return ds
def plot(self,ax=None,return_ax=False,cartopy_proj=None,prop={},map_prop={}):
r"""
Creates a plot of this season.
Parameters
----------
ax : axes
Instance of axes to plot on. If none, one will be generated. Default is none.
cartopy_proj : ccrs
Instance of a cartopy projection to use. If none, one will be generated. Default is none.
prop : dict
Property of storm track lines.
map_prop : dict
Property of cartopy map.
"""
#Create instance of plot object
self.plot_obj = TrackPlot()
if self.basin in ['east_pacific','west_pacific','south_pacific','australia','all']:
self.plot_obj.create_cartopy(proj='PlateCarree',central_longitude=180.0)
else:
self.plot_obj.create_cartopy(proj='PlateCarree',central_longitude=0.0)
#Plot storm
return_ax = self.plot_obj.plot_season(self,ax=ax,return_ax=return_ax,prop=prop,map_prop=map_prop)
#Return axis
if ax != None or return_ax == True: return return_ax
def annual_summary(self):
r"""
Generates a summary for this season with various cumulative statistics.
Returns
-------
dict
Dictionary containing various statistics about this season.
"""
#Initialize dict with info about all of year's storms
hurdat_year = {'id':[],'operational_id':[],'name':[],'max_wspd':[],'min_mslp':[],'category':[],'ace':[]}
#Search for corresponding entry in keys
count_ss_pure = 0
count_ss_partial = 0
iterate_id = 1
for key in self.dict.keys():
#Retrieve info about storm
temp_name = self.dict[key]['name']
temp_vmax = np.array(self.dict[key]['vmax'])
temp_mslp = np.array(self.dict[key]['mslp'])
temp_type = np.array(self.dict[key]['type'])
temp_time = np.array(self.dict[key]['date'])
temp_ace = self.dict[key]['ace']
#Get indices of all tropical/subtropical time steps
idx = np.where((temp_type == 'SS') | (temp_type == 'SD') | (temp_type == 'TD') | (temp_type == 'TS') | (temp_type == 'HU'))
#Get times during existence of trop/subtrop storms
if len(idx[0]) == 0: continue
trop_time = temp_time[idx]
if 'season_start' not in hurdat_year.keys():
hurdat_year['season_start'] = trop_time[0]
hurdat_year['season_end'] = trop_time[-1]
#Get max/min values and check for nan's
np_wnd = np.array(temp_vmax[idx])
np_slp = np.array(temp_mslp[idx])
if len(np_wnd[~np.isnan(np_wnd)]) == 0:
max_wnd = np.nan
max_cat = -1
else:
max_wnd = int(np.nanmax(temp_vmax[idx]))
max_cat = convert_category(np.nanmax(temp_vmax[idx]))
if len(np_slp[~np.isnan(np_slp)]) == 0:
min_slp = np.nan
else:
min_slp = int(np.nanmin(temp_mslp[idx]))
#Append to dict
hurdat_year['id'].append(key)
hurdat_year['name'].append(temp_name)
hurdat_year['max_wspd'].append(max_wnd)
hurdat_year['min_mslp'].append(min_slp)
hurdat_year['category'].append(max_cat)
hurdat_year['ace'].append(temp_ace)
hurdat_year['operational_id'].append(self.dict[key]['operational_id'])
#Handle operational vs. non-operational storms
#Check for purely subtropical storms
if 'SS' in temp_type and True not in np.isin(temp_type,['TD','TS','HU']):
count_ss_pure += 1
#Check for partially subtropical storms
if 'SS' in temp_type:
count_ss_partial += 1
#Add generic season info
hurdat_year['season_storms'] = len(hurdat_year['name'])
narray = np.array(hurdat_year['max_wspd'])
narray = narray[~np.isnan(narray)]
hurdat_year['season_named'] = len(narray[narray>=34])
hurdat_year['season_hurricane'] = len(narray[narray>=65])
hurdat_year['season_major'] = len(narray[narray>=100])
hurdat_year['season_ace'] = np.sum(hurdat_year['ace'])
hurdat_year['season_subtrop_pure'] = count_ss_pure
hurdat_year['season_subtrop_partial'] = count_ss_partial
#Return object
return hurdat_year
|
"""
Construct an algorithm to predict the class of a given datapoint
"""
def predict_class(X):
"""
Predicts the class of a given datapoint.
Parameters:
X (list): the datapoint
Returns:
int: the predicted class
"""
# implement algorithm
return predicted_class |
# generated from colcon_core/shell/template/hook_prepend_value.sh.em
_colcon_prepend_unique_value PYTHONPATH "$COLCON_CURRENT_PREFIX/lib/python3.5/site-packages"
|
module Tomo
class CLI
class Rules
autoload :Argument, "tomo/cli/rules/argument"
autoload :Switch, "tomo/cli/rules/switch"
autoload :ValueSwitch, "tomo/cli/rules/value_switch"
ARG_PATTERNS = {
/\A\[[A-Z_]+\]\z/ => :optional_arg_rule,
/\A[A-Z_]+\z/ => :required_arg_rule,
/\A\[[A-Z_]+\.\.\.\]\z/ => :mutiple_optional_args_rule
}.freeze
OPTION_PATTERNS = {
/\A--\[no-\]([\-a-z]+)\z/ => :on_off_switch_rule,
/\A(-[a-z]), (--[\-a-z]+)\z/ => :basic_switch_rule,
/\A(-[a-z]), (--[\-a-z]+) [A-Z=_\-]+\z/ => :value_switch_rule
}.freeze
private_constant :ARG_PATTERNS, :OPTION_PATTERNS
def initialize
@rules = []
end
def add_arg(spec, values_proc)
rule = ARG_PATTERNS.find do |regexp, method|
break send(method, spec, values_proc) if regexp.match?(spec)
end
raise ArgumentError, "Unrecognized arg style: #{spec}" if rule.nil?
rules << rule
end
def add_option(key, spec, values_proc, &block)
rule = OPTION_PATTERNS.find do |regexp, method|
match = regexp.match(spec)
break send(method, key, *match.captures, values_proc, block) if match
end
raise ArgumentError, "Unrecognized option style: #{spec}" if rule.nil?
rules << rule
end
def to_a
rules
end
private
attr_reader :rules
def optional_arg_rule(spec, values_proc)
Rules::Argument.new(spec, values_proc: values_proc, required: false, multiple: false)
end
def required_arg_rule(spec, values_proc)
Rules::Argument.new(spec, values_proc: values_proc, required: true, multiple: false)
end
def mutiple_optional_args_rule(spec, values_proc)
Rules::Argument.new(spec, multiple: true, values_proc: values_proc)
end
def on_off_switch_rule(key, name, _values_proc, callback_proc)
Rules::Switch.new(key, "--#{name}", "--no-#{name}", callback_proc: callback_proc) do |arg|
arg == "--#{name}"
end
end
def basic_switch_rule(key, *switches, _values_proc, callback_proc)
Rules::Switch.new(key, *switches, callback_proc: callback_proc)
end
def value_switch_rule(key, *switches, values_proc, callback_proc)
Rules::ValueSwitch.new(key, *switches, values_proc: values_proc, callback_proc: callback_proc)
end
end
end
end
|
<gh_stars>0
#include <stdio.h>
#include <stdlib.h>
#include <stdint.h>
#include <string.h>
#include <sys/queue.h>
#include <time.h>
#include <kore/kore.h>
#include <kore/pgsql.h>
#include <kore/http.h>
#include "model/ticket.h"
#include "model/database_engine.h"
#include "shared/shared_error.h"
#include "shared/shared_time.h"
static const char ticket_insert_query[] =
"INSERT INTO \"Ticket\" (flightidentifier, useridentifier, cost) " \
"VALUES ($1, $2, $3)";
static const char ticket_select_by_ticket_identifier[] =
"SELECT ticketidentifier, flightidentifier, useridentifier, cost FROM \"Ticket\" " \
"WHERE ticketidentifier = $1 ORDER BY ticketidentifier ASC;";
static const char ticket_select_by_flight_identifier[] =
"SELECT ticketidentifier, flightidentifier, useridentifier, cost FROM \"Ticket\" " \
"WHERE flightidentifier = $1 ORDER BY ticketidentifier ASC;";
static const char ticket_select_by_user_identifier[] =
"SELECT ticketidentifier, flightidentifier, useridentifier, cost FROM \"Ticket\" " \
"WHERE useridentifier = $1 ORDER BY ticketidentifier ASC;";
static const char ticket_update_query[] =
"UPDATE \"Ticket\" SET ticketidentifier = $1, flightidentifier = $2, useridentifier = $3, " \
"cost = $4 WHERE ticketidentifier = $1";
static const char ticket_delete_query[] =
"DELETE FROM \"Session\" WHERE sessionidentifier = $1";
static const char ticket_get_all_tickets_query[] =
"SELECT ticketidentifier, flightidentifier, useridentifier, cost FROM \"Ticket\" " \
"ORDER BY ticketidentifier ASC;";
Ticket *
ticket_create(uint32_t ticket_identifier, uint32_t flight_identifier, uint32_t user_identifier,
uint32_t cost, uint32_t *error)
{
Ticket *ticket = malloc(sizeof(Ticket));
if(ticket == NULL)
{
kore_log(LOG_ERR, "ticket_create: Could not allocate memory for a ticket structure.\n");
*error = (SESSION_ERROR_CREATE);
return NULL;
}
ticket->ticket_identifier = ticket_identifier;
ticket->flight_identifier = flight_identifier;
ticket->user_identifier = user_identifier;
ticket->cost = cost;
*error = (SHARED_OK);
return ticket;
}
void
ticket_destroy(Ticket **ticket)
{
if(ticket == NULL)
{
return;
}
free(*ticket);
*ticket = NULL;
}
void *
ticket_create_from_query(void *source_location, uint32_t *error)
{
if(kore_pgsql_nfields((struct kore_pgsql *) source_location) != 10)
{
perror("ticket_create_from_query: Invalid source location.\n");
*error = (DATABASE_ENGINE_ERROR_RESULT_PARSE);
return NULL;
}
int err = 0;
uint32_t ticket_identifier = kore_strtonum64(
kore_pgsql_getvalue((struct kore_pgsql *) source_location, 0, 0), 0, &err);
if(err != (KORE_RESULT_OK))
{
kore_log(LOG_ERR, "ticket_create_from_query: Could not translate db_ticket_identifier " \
"string to uint32_t.");
*error = (DATABASE_ENGINE_ERROR_NO_RESULTS);
return NULL;
}
uint32_t flight_identifier = kore_strtonum64(
kore_pgsql_getvalue((struct kore_pgsql *) source_location, 0, 1), 0, &err);
if(err != (KORE_RESULT_OK))
{
kore_log(LOG_ERR, "ticket_create_from_query: Could not translate db_flight_identifier " \
"string to uint32_t.");
*error = (DATABASE_ENGINE_ERROR_NO_RESULTS);
return NULL;
}
uint32_t user_identifier = kore_strtonum64(
kore_pgsql_getvalue((struct kore_pgsql *) source_location, 0, 2), 0, &err);
if(err != (KORE_RESULT_OK))
{
kore_log(LOG_ERR, "ticket_create_from_query: Could not translate db_user_identifier " \
"string to uint32_t.");
*error = (DATABASE_ENGINE_ERROR_NO_RESULTS);
return NULL;
}
uint32_t cost = kore_strtonum64(
kore_pgsql_getvalue((struct kore_pgsql *) source_location, 0, 3), 0, &err);
if(err != (KORE_RESULT_OK))
{
kore_log(LOG_ERR, "ticket_create_from_query: Could not translate db_cost " \
"string to uint32_t.");
*error = (DATABASE_ENGINE_ERROR_NO_RESULTS);
return NULL;
}
uint32_t create_ticket_result = 0;
void *temp_ticket = ticket_create(ticket_identifier, flight_identifier, user_identifier, cost,
&create_ticket_result);
if(temp_ticket == NULL)
{
kore_log(LOG_ERR, "ticket_create_from_query: Could not create a ticket structure.");
*error = create_ticket_result;
}
return temp_ticket;
}
void *
ticket_create_collection_from_query(void *source_location, uint32_t *error)
{
uint32_t number_of_results = kore_pgsql_ntuples((struct kore_pgsql *) source_location);
struct TicketCollection *ticket_collection = malloc(sizeof(struct TicketCollection));
TAILQ_INIT(ticket_collection);
uint32_t i;
for(i = 0; i < number_of_results; ++i)
{
Ticket *temp_ticket = NULL;
int err = 0;
uint32_t ticket_identifier = kore_strtonum64(
kore_pgsql_getvalue((struct kore_pgsql *) source_location, i, 0), 0, &err);
if(err != (KORE_RESULT_OK))
{
kore_log(LOG_ERR, "ticket_create_from_query: Could not translate db_ticket_identifier " \
"string to uint32_t.");
*error = (DATABASE_ENGINE_ERROR_NO_RESULTS);
ticket_collection_destroy(&ticket_collection);
return NULL;
}
uint32_t flight_identifier = kore_strtonum64(
kore_pgsql_getvalue((struct kore_pgsql *) source_location, i, 1), 0, &err);
if(err != (KORE_RESULT_OK))
{
kore_log(LOG_ERR, "ticket_create_from_query: Could not translate db_flight_identifier " \
"string to uint32_t.");
*error = (DATABASE_ENGINE_ERROR_NO_RESULTS);
ticket_collection_destroy(&ticket_collection);
return NULL;
}
uint32_t user_identifier = kore_strtonum64(
kore_pgsql_getvalue((struct kore_pgsql *) source_location, i, 2), 0, &err);
if(err != (KORE_RESULT_OK))
{
kore_log(LOG_ERR, "ticket_create_from_query: Could not translate db_user_identifier " \
"string to uint32_t.");
*error = (DATABASE_ENGINE_ERROR_NO_RESULTS);
ticket_collection_destroy(&ticket_collection);
return NULL;
}
uint32_t cost = kore_strtonum64(
kore_pgsql_getvalue((struct kore_pgsql *) source_location, i, 3), 0, &err);
if(err != (KORE_RESULT_OK))
{
kore_log(LOG_ERR, "ticket_create_from_query: Could not translate db_cost " \
"string to uint32_t.");
*error = (DATABASE_ENGINE_ERROR_NO_RESULTS);
ticket_collection_destroy(&ticket_collection);
return NULL;
}
uint32_t create_ticket_result = 0;
temp_ticket = ticket_create(ticket_identifier, flight_identifier, user_identifier, cost,
&create_ticket_result);
if(temp_ticket == NULL)
{
kore_log(LOG_ERR, "ticket_create_from_query: Could not create a ticket structure.");
*error = create_ticket_result;
ticket_collection_destroy(&ticket_collection);
return NULL;
}
TicketCollectionNode *temp_ticket_node = malloc(sizeof(TicketCollectionNode));
if(temp_ticket_node == NULL)
{
kore_log(LOG_ERR, "ticket_create_from_queryL Could not allocate memory for " \
"temp_ticket_collection.");
ticket_destroy(&temp_ticket);
ticket_collection_destroy(&ticket_collection);
*error = (SHARED_ERROR_ALLOC_ERROR);
return NULL;
}
temp_ticket_node->ticket = temp_ticket;
TAILQ_INSERT_TAIL(ticket_collection, temp_ticket_node, ticket_collection);
temp_ticket_node = NULL;
}
*error = (SHARED_OK);
return (void *) ticket_collection;
}
uint32_t
ticket_collection_destroy(struct TicketCollection **ticket_collection)
{
if(ticket_collection == NULL || *ticket_collection == NULL)
{
return (SHARED_OK);
}
TicketCollectionNode *temp = NULL;
while(!TAILQ_EMPTY(*ticket_collection))
{
temp = TAILQ_FIRST(*ticket_collection);
TAILQ_REMOVE(*ticket_collection, temp, ticket_collection);
ticket_destroy(&temp->ticket);
free(temp);
temp = NULL;
}
free(*ticket_collection);
*ticket_collection = NULL;
return (SHARED_OK);
}
uint32_t
ticket_insert(const Ticket *ticket)
{
uint32_t flight_identifier = htonl(ticket->flight_identifier);
uint32_t user_identifier = htonl(ticket->user_identifier);
uint32_t cost = htonl(ticket->cost);
uint32_t query_result = database_engine_execute_write(ticket_insert_query, 3,
&flight_identifier, sizeof(flight_identifier), 1,
&user_identifier, sizeof(user_identifier), 1,
&cost, sizeof(cost), 1);
if(query_result != (SHARED_OK))
{
database_engine_log_error("ticket_insert", query_result);
return query_result;
}
return (SHARED_OK);
}
uint32_t
ticket_update(const Ticket *ticket)
{
uint32_t ticket_identifier = htonl(ticket->ticket_identifier);
uint32_t flight_identifier = htonl(ticket->flight_identifier);
uint32_t user_identifier = htonl(ticket->user_identifier);
uint32_t cost = htonl(ticket->cost);
uint32_t query_result = database_engine_execute_write(ticket_update_query, 4,
&ticket_identifier, sizeof(ticket_identifier), 1,
&flight_identifier, sizeof(flight_identifier), 1,
&user_identifier, sizeof(user_identifier), 1,
&cost, sizeof(cost), 1);
if(query_result != (SHARED_OK))
{
database_engine_log_error("ticket_update", query_result);
return query_result;
}
return (SHARED_OK);
}
uint32_t
ticket_delete(Ticket *ticket)
{
uint32_t ticket_identifier = htonl(ticket->ticket_identifier);
uint32_t query_result = database_engine_execute_write(ticket_delete_query, 1,
&ticket_identifier, sizeof(ticket_identifier), 1);
if(query_result != (SHARED_OK))
{
database_engine_log_error("ticket_delete", query_result);
return query_result;
}
return (SHARED_OK);
}
struct TicketCollection *
ticket_collection_find_by_user_identifier(uint32_t user_identifier, uint32_t *error)
{
uint32_t database_user_identifier = htonl(user_identifier);
void *result = NULL;
uint32_t query_result = 0;
result = database_engine_execute_read(ticket_select_by_user_identifier,
&ticket_create_collection_from_query, &query_result, 1,
&database_user_identifier, sizeof(database_user_identifier), 1);
if(result == NULL)
{
if(query_result == (DATABASE_ENGINE_ERROR_NO_RESULTS))
{
*error = query_result;
return result;
}
database_engine_log_error("ticket_find_by_user_identifier", query_result);
*error = query_result;
}
*error = (SHARED_OK);
return result;
}
struct TicketCollection *
ticket_collection_find_by_flight_identifier(uint32_t flight_identifier, uint32_t *error)
{
uint32_t database_flight_identifier = htonl(flight_identifier);
void *result;
uint32_t query_result = 0;
result = database_engine_execute_read(ticket_select_by_flight_identifier,
&ticket_create_collection_from_query, &database_flight_identifier, 1,
sizeof(database_flight_identifier), 1);
if(result == NULL)
{
if(query_result == (DATABASE_ENGINE_ERROR_NO_RESULTS))
{
*error = query_result;
return result;
}
database_engine_log_error("ticket_collection_find_by_flight_identifier", query_result);
*error = query_result;
}
*error = (SHARED_OK);
return result;
}
struct TicketCollection *
ticket_collection_find_by_ticket_identifier(uint32_t ticket_identifier, uint32_t *error)
{
uint32_t database_ticket_identifier = htonl(ticket_identifier);
void *result;
uint32_t query_result = 0;
result = database_engine_execute_read(ticket_select_by_ticket_identifier,
&ticket_create_collection_from_query, &database_ticket_identifier, 1,
sizeof(database_ticket_identifier), 1);
if(result == NULL)
{
if(query_result != (DATABASE_ENGINE_ERROR_NO_RESULTS))
{
database_engine_log_error("ticket_collection_find_by_ticket_identifier", query_result);
*error = query_result;
}
}
*error = (SHARED_OK);
return result;
}
struct TicketCollection *
ticket_get_all_tickets(uint32_t *error)
{
uint32_t query_result = (SHARED_OK);
void *result;
result = database_engine_execute_read(ticket_get_all_tickets_query,
&ticket_create_collection_from_query, &query_result, 0);
if(result == NULL)
{
switch(query_result)
{
case (DATABASE_ENGINE_ERROR_NO_RESULTS):
case (SHARED_OK):
*error = query_result;
break;
case (DATABASE_ENGINE_ERROR_INITIALIZATION):
case (DATABASE_ENGINE_ERROR_QUERY_ERROR):
default:
database_engine_log_error("ticket_get_all_tickets", query_result);
*error = query_result;
break;
}
}
return result;
} |
#!/usr/bin/env bash
# Copyright (c) 2014 The Bitcoin Core developers
# Copyright (c) 2014-2015 The Dash developers
# Copyright (c) 2021-2022 The HostxCoin developers
# Distributed under the MIT/X11 software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
# Functions used by more than one test
function echoerr {
echo "$@" 1>&2;
}
# Usage: ExtractKey <key> "<json_object_string>"
# Warning: this will only work for the very-well-behaved
# JSON produced by hostxcoind, do NOT use it to try to
# parse arbitrary/nested/etc JSON.
function ExtractKey {
echo $2 | tr -d ' "{}\n' | awk -v RS=',' -F: "\$1 ~ /$1/ { print \$2}"
}
function CreateDataDir {
DIR=$1
mkdir -p $DIR
CONF=$DIR/hostxcoin.conf
echo "regtest=1" >> $CONF
echo "keypool=2" >> $CONF
echo "rpcuser=rt" >> $CONF
echo "rpcpassword=rt" >> $CONF
echo "rpcwait=1" >> $CONF
echo "walletnotify=${SENDANDWAIT} -STOP" >> $CONF
shift
while (( "$#" )); do
echo $1 >> $CONF
shift
done
}
function AssertEqual {
if (( $( echo "$1 == $2" | bc ) == 0 ))
then
echoerr "AssertEqual: $1 != $2"
declare -f CleanUp > /dev/null 2>&1
if [[ $? -eq 0 ]] ; then
CleanUp
fi
exit 1
fi
}
# CheckBalance -datadir=... amount account minconf
function CheckBalance {
declare -i EXPECT="$2"
B=$( $CLI $1 getbalance $3 $4 )
if (( $( echo "$B == $EXPECT" | bc ) == 0 ))
then
echoerr "bad balance: $B (expected $2)"
declare -f CleanUp > /dev/null 2>&1
if [[ $? -eq 0 ]] ; then
CleanUp
fi
exit 1
fi
}
# Use: Address <datadir> [account]
function Address {
$CLI $1 getnewaddress $2
}
# Send from to amount
function Send {
from=$1
to=$2
amount=$3
address=$(Address $to)
txid=$( ${SENDANDWAIT} $CLI $from sendtoaddress $address $amount )
}
# Use: Unspent <datadir> <n'th-last-unspent> <var>
function Unspent {
local r=$( $CLI $1 listunspent | awk -F'[ |:,"]+' "\$2 ~ /$3/ { print \$3 }" | tail -n $2 | head -n 1)
echo $r
}
# Use: CreateTxn1 <datadir> <n'th-last-unspent> <destaddress>
# produces hex from signrawtransaction
function CreateTxn1 {
TXID=$(Unspent $1 $2 txid)
AMOUNT=$(Unspent $1 $2 amount)
VOUT=$(Unspent $1 $2 vout)
RAWTXN=$( $CLI $1 createrawtransaction "[{\"txid\":\"$TXID\",\"vout\":$VOUT}]" "{\"$3\":$AMOUNT}")
ExtractKey hex "$( $CLI $1 signrawtransaction $RAWTXN )"
}
# Use: SendRawTxn <datadir> <hex_txn_data>
function SendRawTxn {
${SENDANDWAIT} $CLI $1 sendrawtransaction $2
}
# Use: GetBlocks <datadir>
# returns number of blocks from getinfo
function GetBlocks {
$CLI $1 getblockcount
}
|
const BackgroundSync = require('./BackgroundSync');
module.exports = {
__init__: [ 'backgroundSync' ],
backgroundSync: [ 'type', BackgroundSync ]
}; |
package br.com.controle.financeiro.controller.api;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.when;
import java.util.Optional;
import java.util.UUID;
import br.com.controle.financeiro.ControleFinanceiroApplication;
import br.com.controle.financeiro.controller.RestResponseEntityExceptionHandler;
import br.com.controle.financeiro.controller.api.linkbuilder.BankAccountDTOResourceAssembler;
import br.com.controle.financeiro.model.repository.BankAccountRepository;
import br.com.controle.financeiro.model.repository.ClientRepository;
import br.com.controle.financeiro.model.repository.InstitutionRepository;
import br.com.controle.financeiro.service.UserService;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.autoconfigure.web.servlet.AutoConfigureMockMvc;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.boot.test.mock.mockito.MockBean;
import org.springframework.context.annotation.Import;
import org.springframework.http.MediaType;
import org.springframework.security.test.context.support.WithMockUser;
import org.springframework.test.context.ActiveProfiles;
import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.test.web.servlet.MockMvc;
import org.springframework.test.web.servlet.request.MockMvcRequestBuilders;
import org.springframework.test.web.servlet.result.MockMvcResultMatchers;
@RunWith(SpringRunner.class)
@SpringBootTest(classes = { ControleFinanceiroApplication.class, BankAccountDTOResourceAssembler.class })
@AutoConfigureMockMvc
@ActiveProfiles(profiles = "test")
@Import({ RestResponseEntityExceptionHandler.class })
@WithMockUser(value = "someone")
public class BankAccountControllerTests extends BaseModelTemplate {
private static final String BANK_ACCOUNT_URI = "/api/bankaccount";
private static final String ACCOUNT_JSON =
"{\"agency\": \"agency\",\"number\": \"5423\",\"dac\": \"5423\"," +
"\"owner\":\"4609f55b-9c05-4361-9e7a-be2d52cfd6af\",\"institution\": " +
"\"4609f55b-9c05-4361-9e7a-be2d52cfd6aa\"}";
@Autowired
private MockMvc mockMvc;
@MockBean
private BankAccountRepository accountRepository;
@MockBean
private ClientRepository clientRepository;
@MockBean
private InstitutionRepository institutionRepository;
@MockBean
private UserService userService;
@Before
public void setup() {
this.setupModel();
when(userService.getAuthenticatedUser()).thenReturn(owner);
when(institutionRepository.findById(any())).thenReturn(Optional.of(institution));
when(clientRepository.findByIdAndOwner(any(), any())).thenReturn(Optional.of(client));
when(accountRepository.save(any())).thenReturn(bankAccount);
}
@Test
public void bankAccountGetAllTest() throws Exception {
mockMvc.perform(MockMvcRequestBuilders.get(BANK_ACCOUNT_URI).accept(MediaType.APPLICATION_JSON_UTF8))
.andExpect(MockMvcResultMatchers.status().isOk()).andReturn();
}
@Test
public void bankAccountPostTest() throws Exception {
mockMvc.perform(MockMvcRequestBuilders.post(BANK_ACCOUNT_URI).contentType(MediaType.APPLICATION_JSON_UTF8_VALUE)
.accept(MediaType.APPLICATION_JSON_UTF8).content(ACCOUNT_JSON))
.andExpect(MockMvcResultMatchers.status().isCreated()).andReturn();
}
@Test
public void bankAccountPutOldAccountTest() throws Exception {
mockMvc.perform(MockMvcRequestBuilders.put(BANK_ACCOUNT_URI + "/{id}", UUID.randomUUID())
.contentType(MediaType.APPLICATION_JSON_UTF8_VALUE).content(ACCOUNT_JSON))
.andExpect(MockMvcResultMatchers.status().is2xxSuccessful()).andReturn();
}
@Test
public void bankAccountPutNewAccountTest() throws Exception {
when(accountRepository.findByIdAndOwner(any(), any())).thenReturn(Optional.of(bankAccount));
mockMvc.perform(MockMvcRequestBuilders.put(BANK_ACCOUNT_URI + "/{id}", UUID.randomUUID())
.contentType(MediaType.APPLICATION_JSON_UTF8_VALUE).content(ACCOUNT_JSON))
.andExpect(MockMvcResultMatchers.status().is2xxSuccessful()).andReturn();
}
@Test
public void bankAccountGetOneNotFoundTest() throws Exception {
mockMvc.perform(MockMvcRequestBuilders.get(BANK_ACCOUNT_URI + "/{id}", UUID.randomUUID())
.contentType(MediaType.APPLICATION_JSON_UTF8_VALUE))
.andExpect(MockMvcResultMatchers.status().isNotFound()).andReturn();
}
@Test
public void bankAccountGetOneFoundTest() throws Exception {
when(accountRepository.findByIdAndOwner(any(), any())).thenReturn(Optional.of(bankAccount));
mockMvc.perform(MockMvcRequestBuilders.get(BANK_ACCOUNT_URI + "/{id}", UUID.randomUUID())
.contentType(MediaType.APPLICATION_JSON_UTF8_VALUE))
.andExpect(MockMvcResultMatchers.status().isOk()).andReturn();
}
@Test
public void bankAccountDeleteTest() throws Exception {
mockMvc.perform(MockMvcRequestBuilders.delete(BANK_ACCOUNT_URI + "/{id}", UUID.randomUUID()))
.andExpect(MockMvcResultMatchers.status().isNoContent()).andReturn();
}
}
|
#!/bin/bash
# Copyright (c) 2019, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
data_dir=./data/
if [[ ! -e $data_dir ]]; then
mkdir $data_dir
fi
wget https://storage.googleapis.com/xlnet/released_models/cased_L-12_H-768_A-12.zip
unzip cased_L-12_H-768_A-12.zip
mv xlnet_cased_L-12_H-768_A-12 ${data_dir}
mv cased_L-12_H-768_A-12.zip ${data_dir}
wget https://dl.fbaipublicfiles.com/glue/data/STS-B.zip
unzip STS-B.zip
mv STS-B ${data_dir}
mv STS-B.zip ${data_dir}
|
/*******************************************************************************
* Copyright 2018 Intel Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*******************************************************************************/
#ifndef CPU_JIT_UNI_PLANAR_CONVOLUTION_HPP
#define CPU_JIT_UNI_PLANAR_CONVOLUTION_HPP
#include "c_types_map.hpp"
#include "cpu_convolution_pd.hpp"
#include "cpu_engine.hpp"
#include "cpu_reducer.hpp"
#include "jit_primitive_conf.hpp"
#include "jit_uni_planar_conv_kernel_f32.hpp"
#include "mkldnn_thread.hpp"
#include "jit_uni_depthwise.hpp"
namespace mkldnn {
namespace impl {
namespace cpu {
template <cpu_isa_t isa>
struct _jit_uni_planar_convolution_fwd_t: public cpu_primitive_t {
struct pd_t: public cpu_convolution_fwd_pd_t {
pd_t(engine_t *engine, const convolution_desc_t *adesc,
const primitive_attr_t *attr,
const typename pd_t::base_class *hint_fwd_pd)
: cpu_convolution_fwd_pd_t(engine, adesc, attr, hint_fwd_pd)
, jcp_() {}
DECLARE_COMMON_PD_T(
JIT_IMPL_NAME_HELPER("jit_planar:", isa, ""),
_jit_uni_planar_convolution_fwd_t<isa>);
virtual status_t init() override {
using namespace prop_kind;
assert(this->engine()->kind() == engine_kind::cpu);
bool ok = true
&& this->set_default_params() == status::success
&& utils::one_of(this->desc()->prop_kind, forward_training,
forward_inference)
&& this->desc()->alg_kind == alg_kind::convolution_direct
&& !this->has_zero_dim_memory()
&& utils::everyone_is(data_type::f32,
this->desc()->src_desc.data_type,
this->desc()->weights_desc.data_type,
this->desc()->dst_desc.data_type)
&& IMPLICATION(this->with_bias(),
data_type::f32 == this->desc()->bias_desc.data_type)
&& !this->attr()->has_asymmetric_quantization();
if (!ok) return status::unimplemented;
status_t sts = jit_uni_planar_conv_fwd_kernel_f32<isa>::init_conf(jcp_, *this->desc(),
*this->src_pd_.desc(), *this->weights_pd_.desc(),
*this->dst_pd_.desc(), *this->attr());
return sts;
}
jit_conv_conf_t jcp_;
protected:
virtual status_t set_default_params() override {
using namespace memory_format;
if (this->src_pd_.desc()->format == any)
CHECK(this->src_pd_.set_format(this->ndims() == 4 ? nchw : ncdhw));
if (this->dst_pd_.desc()->format == any)
CHECK(this->dst_pd_.set_format(this->ndims() == 4 ? nchw : ncdhw));
if (this->weights_pd_.desc()->format == any)
CHECK(this->weights_pd_.set_format(this->ndims() == 4 ? oihw : oidhw));
if (this->bias_pd_.desc()->format == any)
CHECK(this->bias_pd_.set_format(x));
return status::success;
}
};
_jit_uni_planar_convolution_fwd_t(const pd_t *apd,
const input_vector &inputs, const output_vector &outputs)
: cpu_primitive_t(apd, inputs, outputs) {
kernel_ = new jit_uni_planar_conv_fwd_kernel_f32<isa>(pd()->jcp_, *pd()->attr());
}
~_jit_uni_planar_convolution_fwd_t() {
delete kernel_;
};
typedef typename prec_traits<data_type::f32>::type data_t;
virtual void execute(event_t *e) const {
execute_forward();
e->set_state(event_t::ready);
}
private:
void execute_forward() const;
const pd_t *pd() const { return (const pd_t *)primitive_t::pd(); }
jit_uni_planar_conv_fwd_kernel_f32<isa> *kernel_;
};
using jit_avx512_common_planar_convolution_fwd_t = _jit_uni_planar_convolution_fwd_t<avx512_common>;
using jit_avx2_planar_convolution_fwd_t = _jit_uni_planar_convolution_fwd_t<avx2>;
}
}
}
#endif
|
<filename>main.go
package main
import (
"errors"
_ "image/jpeg"
"log"
"github.com/ably-labs/Ableye/config"
"github.com/hajimehoshi/ebiten/v2"
)
var (
state gameState
)
func init() {
state = titleScreen
}
type Game struct{}
// NewGame is a constructor for the game.
func NewGame() *Game {
return &Game{}
}
//handleClose is called when the used closes the game window.
func handleClose() {
connectionIDs := []connectionID{clientA, clientB, clientC, clientD}
for _, id := range connectionIDs {
if connections[id] != nil && connections[id].realtimeClient != nil {
connections[id].realtimeClient.Close()
}
}
}
//Update updates the logical state.
func (g *Game) Update() error {
if ebiten.IsWindowBeingClosed() {
handleClose()
// An error must be returned to trigger the window closing once closing has been handled.
return errors.New("window has been closed")
}
// Handle updates for each game state.
switch state {
case titleScreen:
updateTitleScreen()
case clientScreen:
updateClientScreen()
}
return nil
}
//Draw renders the screen.
func (g *Game) Draw(screen *ebiten.Image) {
//Draw debug elements if debug mode is on.
if config.Cfg.DebugMode {
drawDebugText(screen)
}
//Handle drawing for each game state.
switch state {
case titleScreen:
drawTitleScreen(screen)
case clientScreen:
drawRealtimeScreen(screen)
}
}
//Layout returns the logical screen size, the screen is automatically scaled.
func (g *Game) Layout(outsideWidth, outsideHeight int) (int, int) {
return screenWidth, screenHeight
}
func main() {
ebiten.SetWindowSize(screenWidth, screenHeight)
ebiten.SetWindowTitle(titleText)
// initialisation
initialiseTitleScreen()
initialiseRealtimeScreen()
// Create a new instance of game.
game := NewGame()
// Set window closing handled to true.
ebiten.SetWindowClosingHandled(true)
// Run the game.
if err := ebiten.RunGame(game); err != nil {
log.Fatal(err)
}
}
|
/* Copyright 2018 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License. */
#include "whileStructurizatorNode.h"
using namespace generatorBase;
WhileStructurizatorNode::WhileStructurizatorNode(IntermediateStructurizatorNode *headNode
, IntermediateStructurizatorNode *bodyNode
, IntermediateStructurizatorNode *exitNode
, QObject *parent)
: IntermediateStructurizatorNode(parent)
, mHeadNode(headNode)
, mBodyNode(bodyNode)
, mExitNode(exitNode)
{
}
IntermediateStructurizatorNode *WhileStructurizatorNode::headNode() const
{
return mHeadNode;
}
IntermediateStructurizatorNode *WhileStructurizatorNode::bodyNode() const
{
return mBodyNode;
}
IntermediateStructurizatorNode *WhileStructurizatorNode::exitNode() const
{
return mExitNode;
}
bool WhileStructurizatorNode::analyzeBreak()
{
if (mBreakWasAnalyzed) {
return mHasBreakInside;
}
mHasBreakInside = mHeadNode->analyzeBreak() || mBodyNode->analyzeBreak();
mBreakWasAnalyzed = true;
return mHasBreakInside;
}
IntermediateStructurizatorNode::Type WhileStructurizatorNode::type() const
{
return Type::whileloop;
}
qReal::Id WhileStructurizatorNode::firstId() const
{
return mHeadNode->firstId();
}
|
(function () {
"use strict";
var paths = {};
paths.root = '/';
paths.templates = paths.root + 'static/templates/';
var menu = [
{label: 'Family Calendar', url: '/fc', template: 'calendar.html', type: 'calendar'},
{label: 'Personal Calendar', url: '/pc', template: 'calendarMonthView.html', type: 'calendar'},
{label: 'ToDo list', url: '/tl', template: 'construction.html'},
{label: 'Shopping list', url: '/sl', template: 'construction.html'},
{label: 'Meal plan', url: '/mp', template: 'construction.html'},
{label: 'Edit Profile', url: '/ep', template: 'construction.html'},
{label: 'Edit Family', url: '/ef', template: 'construction.html'},
{label: 'Chat', url: '/ct', template: 'construction.html'},
{label: 'Help', url: '/hp', template: 'construction.html'}
];
var more = [
{label: 'Logout', ligature: 'exit_to_app'},
//{label: 'About', ligature: ''},
//{label: 'Export'},
{label: 'Print', ligature: 'print'}
];
angular.module('facadu')
.constant('PATHS', paths)
.constant('MENU', menu)
.constant('MORE', more)
.config(configFn)
configFn.$inject = ['$mdThemingProvider', '$mdIconProvider', '$routeProvider', '$locationProvider', 'PATHS', 'MENU'];
function configFn($mdThemingProvider, $mdIconProvider, $routeProvider, $locationProvider, PATHS, MENU) {
//$mdThemingProvider.theme('default')
// .primaryPalette('deep-purple')
// .accentPalette('amber');
$mdIconProvider
.iconSet('social', 'img/icons/sets/social-icons.svg', 24)
.defaultIconSet('img/icons/sets/core-icons.svg', 24);
$locationProvider.html5Mode(true).hashPrefix('!');
$routeProvider
.when('/', {
templateUrl: PATHS.templates + 'construction.html'
})
.when('/create', {
templateUrl: PATHS.templates + 'edit.html',
controller: 'editCtrl',
controllerAs: 'vm'
})
.when('/edit/:id', {
templateUrl: PATHS.templates + 'edit.html',
controller: 'editCtrl',
controllerAs: 'vm'
})
.when('/edit/:id/:data*', {
templateUrl: PATHS.templates + 'edit.html',
controller: 'editCtrl',
controllerAs: 'vm'
})
.otherwise('/');
configMenuRoutes();
/////////////////////
function configMenuRoutes() {
angular.forEach(MENU, function (item) {
var routeObj = {
templateUrl: PATHS.templates + item.template,
controller: menuTypeToCtrlName(item.type),
controllerAs: 'vm'
};
$routeProvider.when(item.url, routeObj);
});
function menuTypeToCtrlName(type) {
if (!type) return null;
return type.charAt(0).toUpperCase() + type.slice(1) + 'Ctrl';
}
}
}
})();
|
<reponame>ArcheSpace/Arche.js
export * from "./physics/index";
export type { IClone } from "./IClone";
|
<gh_stars>0
import PropTypes from "prop-types";
import CesiumComponent from "./CesiumComponent";
import { sceneType } from "./types";
export default class ScreenSpaceCameraController extends CesiumComponent {
static propTypes = {
...CesiumComponent.propTypes,
bounceAnimationTime: PropTypes.number,
enableCollisionDetection: PropTypes.bool,
enableInputs: PropTypes.bool,
enableLook: PropTypes.bool,
enableRotate: PropTypes.bool,
enableTilt: PropTypes.bool,
enableTranslate: PropTypes.bool,
enableZoom: PropTypes.bool,
inertiaSpin: PropTypes.number,
inertiaTranslate: PropTypes.number,
inertiaZoom: PropTypes.number,
lookEventTypes: PropTypes.any,
maximumMovementRatio: PropTypes.number,
maximumZoomDistance: PropTypes.number,
minimumCollisionTerrainHeight: PropTypes.number,
minimumPickingTerrainHeight: PropTypes.number,
minimumTrackBallHeight: PropTypes.number,
minimumZoomDistance: PropTypes.number,
rotateEventTypes: PropTypes.any,
tiltEventTypes: PropTypes.any,
translateEventTypes: PropTypes.any,
zoomEventTypes: PropTypes.any,
};
static contextTypes = {
scene: sceneType,
};
static cesiumProps = [
"bounceAnimationTime",
"enableCollisionDetection",
"enableInputs",
"enableLook",
"enableRotate",
"enableTilt",
"enableTranslate",
"enableZoom",
"inertiaSpin",
"inertiaTranslate",
"inertiaZoom",
"lookEventTypes",
"maximumMovementRatio",
"maximumZoomDistance",
"minimumCollisionTerrainHeight",
"minimumPickingTerrainHeight",
"minimumTrackBallHeight",
"minimumZoomDistance",
"rotateEventTypes",
"tiltEventTypes",
"translateEventTypes",
"zoomEventTypes",
];
static setCesiumOptionsAfterCreate = true;
static initCesiumComponentWhenComponentDidMount = true;
createCesiumElement() {
return this.context.scene.screenSpaceCameraController;
}
}
|
#include "JsonWriter.hpp"
#include "JsonReader.hpp"
#include "JsonValue.hpp"
#include "File.hpp"
#include "Log.hpp"
using namespace obotcha;
int testremove() {
try {
// Read the JSON file
JsonReader reader("input.json");
JsonValue json = reader.parse();
// Remove the specified key-value pair
json.remove("key_to_remove");
// Write the modified JSON back to the file
JsonWriter writer("input.json");
writer.write(json);
} catch (const std::exception& e) {
Log::error("An error occurred: " + std::string(e.what()));
}
} |
package cz.net21.ttulka.json.mock.generator.source;
import java.util.Random;
/**
* Class to retrieve a random channel.
* @author s.keshmiri
*
*/
public class Channel implements Source<String> {
private String channel;
public Channel() {
this.channel = getNext();
}
@Override
public String getNext() {
return getRandomChannel();
}
private String getRandomChannel() {
String mrp = "MRP";
String ton = "TON";
String nap = "NAP";
Random rn = new Random();
int select = rn.nextInt(3);
switch (select) {
case 0:
return mrp;
case 1:
return ton;
case 2:
return nap;
}
return "MRP";
}
}
|
<gh_stars>0
import React from "react";
import * as ReactRedux from "react-redux";
import { fetchQuestion } from "../../reducers/roundReducer";
class QuestionUI extends React.Component {
render() {
return (
<div className="clear">
<h1>{this.props.question}</h1>
<h2>Category: {this.props.category}</h2>
</div>
);
}
}
function mapStateToProps(state) {
return {
question: state.round.question,
category: state.round.category,
};
}
function mapDispatchToProps(dispatch) {
return {
doFetchQuestion: (questionid) => dispatch(fetchQuestion(questionid)),
};
}
export const Question = ReactRedux.connect(
mapStateToProps,
mapDispatchToProps
)(QuestionUI);
|
<filename>apkanalyser/src/jerl/bcm/util/InjectionBuilder.java
/*
* Copyright (C) 2012 Sony Mobile Communications AB
*
* This file is part of ApkAnalyser.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package jerl.bcm.util;
import java.io.IOException;
import java.io.InputStream;
import java.lang.reflect.Constructor;
import java.util.Enumeration;
import java.util.Hashtable;
import java.util.StringTokenizer;
import jerl.bcm.inj.InjectionClass;
import jerl.bcm.inj.InjectionMethod;
import jerl.blockformat.BFParseException;
import jerl.blockformat.BFReader;
import jerl.blockformat.BFVisitor;
public class InjectionBuilder implements BFVisitor {
private String curClassName = null;
private String curInjClassName = null;
private String curArgTypes = null;
private String[] curArgValues = null;
private final BFReader pr;
// key = class name, value = list of injections
private final Hashtable<String, ClassInjContainer> injectionTable = new Hashtable<String, ClassInjContainer>();
public InjectionBuilder(InputStream is) throws IOException, BFParseException {
pr = new BFReader(is);
pr.accept(this);
}
/**
* Returns enumeration of all class names that have method injections.
* @return
*/
public Enumeration<String> getClassNamesForInjections() {
return injectionTable.keys();
}
public ClassInjContainer getClassInjContainer(String className) {
return injectionTable.get(className);
}
@Override
public void visitBeginBlock(String blockName, String[] args) {
if (blockName.equals("class")) {
// start of class block
String className = args[0];
curClassName = className;
} else if (blockName.equals("injection")) {
int n = Integer.parseInt(args[0]);
curInjClassName = null;
curArgTypes = null;
curArgValues = new String[n];
}
}
@Override
public void visitEndBlock(String blockName) {
if (blockName.equals("class")) {
// end of class block
curClassName = null;
} else if (blockName.equals("injection")) {
// create an injection instance and add to injectionTable
Object obj = null;
try {
obj = createInjection(curInjClassName, curArgTypes, curArgValues);
} catch (Exception e) {
System.err.println("Class: " + curInjClassName + ", argtypes=" + curArgTypes + ", argValues=" + curArgValues);
e.printStackTrace();
}
if (obj != null) {
if (obj instanceof InjectionMethod) {
addMethodInjection(curClassName, (InjectionMethod) obj);
} else if (obj instanceof InjectionClass) {
addClassInjection(curClassName, (InjectionClass) obj);
} else {
System.err.println("WARNING: Unknown class type");
}
}
}
}
@Override
public void visitProperty(String key, String value) {
if (key.equals("ClassName")) {
curInjClassName = value;
} else if (key.equals("ArgTypes")) {
curArgTypes = value;
} else if (key.startsWith("ArgValue")) {
int i = parseIndex(key);
curArgValues[i] = value;
} else {
System.out.println("Unknown property: key='" + key + "', value='" + value + "'");
}
}
@Override
public String toString() {
return injectionTable.toString();
}
private void addMethodInjection(String className, InjectionMethod inj) {
ClassInjContainer c = injectionTable.get(className);
if (c == null) {
c = new ClassInjContainer(className);
injectionTable.put(className, c);
}
c.addMethodInjection(inj);
}
private void addClassInjection(String className, InjectionClass inj) {
ClassInjContainer c = injectionTable.get(className);
if (c == null) {
c = new ClassInjContainer(className);
injectionTable.put(className, c);
}
c.addClassInjection(inj);
}
private static int parseIndex(String str) {
int beginIndex = str.indexOf('[');
int endIndex = str.indexOf(']', beginIndex);
return Integer.parseInt(str.substring(beginIndex + 1, endIndex).trim());
}
private static Object[] createParameters(String types, String[] values) {
StringTokenizer st = new StringTokenizer(types, ",");
String[] aTypes = new String[st.countTokens()];
for (int i = 0; st.hasMoreTokens(); i++) {
aTypes[i] = st.nextToken().trim();
}
Object[] ret = new Object[aTypes.length];
// loop for all types, create object from value string
for (int i = 0; i < aTypes.length; i++) {
if (aTypes[i].equals(InjectionUtil.STRING_TYPE)) {
ret[i] = values[i];
} else if (aTypes[i].equals(InjectionUtil.INT_TYPE)) {
ret[i] = Integer.valueOf(values[i]);
} else if (aTypes[i].equals(InjectionUtil.BOOLEAN_TYPE)) {
ret[i] = Boolean.valueOf(values[i]);
} else {
System.err.println("ERROR: unknown type, '" + aTypes[i] + "'");
}
}
return ret;
}
private static Object createInjection(String className, String types, String[] values) throws Exception {
Class<?> cls = Class.forName(className);
Constructor<?>[] a = cls.getConstructors();
int i = 0;
for (i = 0; i < a.length; i++) {
String tmp = a[i].toString();
if (tmp.indexOf("(" + types + ")") != -1) {
break;
}
}
if (i == a.length) {
// unable to find constructor
System.err.println("ERROR: unable to find constructor");
return null;
}
try {
Object[] initargs = createParameters(types, values);
Object obj = a[i].newInstance(initargs);
return obj;
} catch (Exception e) {
//e.printStackTrace();
throw new Exception("ClassName='" + className + "', types='" + types + "'");
}
}
}
|
<reponame>pvaneck/litelinks-core
/*
* Copyright 2021 IBM Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy
* of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.ibm.watson.litelinks;
public class ServiceRegistryTypes {
private ServiceRegistryTypes() {}
public static final String ZOOKEEPER = "zookeeper";
public static final String ETCD = "etcd";
// only valid client-side
public static final String STATIC = "static";
// only valid server-side
public static final String NONE = "none";
}
|
import React from "react"
import { Link } from "react-router-dom"
import { Text } from "@chakra-ui/react"
const Navigation = (props) => {
const { children, isLast, to = "/", ...rest } = props
var pathArray = window.location.href.split('/');
var newPath = "/" + pathArray[6];
return (
<Text
mb={{ base: isLast ? 0 : 8, sm: 0 }}
mr={{ base: 0, sm: isLast ? 0 : 8 }}
display="block"
as={newPath === to ? 'u' : ''}
{...rest}
>
{console.log("newPath", newPath)}
{console.log(window.location.href)}
{console.log("window.location.pathname", window.location.pathname)}
<Link to={to}>{children}</Link>
</Text>
)
}
export default Navigation |
#include <stdio.h>
int main() {
// create a 5x5 matrix
int matrix[5][5];
// read the elements of the matrix
for (int i = 0; i < 5; i++) {
for (int j = 0; j < 5; j++) {
scanf("%d", &matrix[i][j]);
}
}
// print the diagonalelements
for (int i = 0; i < 5; i++) {
for (int j = 0; j < 5; j++) {
if (i == j) {
printf("%d ", matrix[i][j]);
}
}
}
return 0;
} |
package io.teknek.topshop;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.HashSet;
import java.util.Set;
import java.util.StringTokenizer;
/** This is the bare bones thing that works logically but basically
* uses a stop words list to avoid super common words.
* could use https://github.com/edwardcapriolo/lang-tools
* to get only interesting parts of speech */
public class SimpleTermExtractor implements TermExtractor {
private Set<String> stopWords;
public SimpleTermExtractor(){
stopWords = new HashSet<>();
InputStream in = this.getClass().getClassLoader().getResourceAsStream("stop_words.txt");
String word = null;
try (BufferedReader br = new BufferedReader( new InputStreamReader(in)) ){
while ((word = br.readLine()) != null){
stopWords.add(word);
}
} catch (IOException e) {
throw new RuntimeException(e);
}
}
@Override
public TermResults extractScores(Shop shop) {
TermResults termResults = new TermResults();
for (Listing item: shop.getItems()){
StringTokenizer st = new StringTokenizer(item.getTitle(), " .!,?|");
while (st.hasMoreTokens()){
String token = st.nextToken();
scoreTerm(token, termResults);
}
st = new StringTokenizer(item.getDescription(), " .!,?|");
while (st.hasMoreTokens()){
String token = st.nextToken();
scoreTerm(token, termResults);
}
}
return termResults;
}
private void scoreTerm(String term, TermResults termResults){
term = term.toLowerCase();
if (term.endsWith("'s")){
term = term.replace("'s", "");
}
if (term.endsWith("’s")){
term = term.replace("’s", "");
}
if (term.contains("\n")){
term = term.replace("\n", "");
}
if (stopWords.contains(term)){
return;
}
termResults.addTerm(term);
}
}
|
#!/bin/bash
# pmc
# 2013-03-28
# section 1 Thursday
# variables global (and local)
function func1()
{
echo "$FUNCNAME (before) \$name $name"
local name="tricia"
bananas="Maja"
echo $FUNCNAME \$name $name
echo $FUNCNAME \$bananas $bananas
}
echo $0 \$name $name
echo $0 \$bananas $bananas
name="Fozzie"
func1
echo $0 \$name $name
echo $0 \$bananas $bananas
|
def find_sum(arr):
result = 0
for num in arr:
result += num
return result |
<reponame>zouvier/BlockChain-Voting<gh_stars>1-10
export default tokens;
|
require 'spec_helper'
require 'rollbar'
require 'socket'
Rollbar.plugins.load!
shared_examples 'unloadable' do
it "doesn't load" do
subject.load!
expect(subject.loaded).to eq(false)
end
end
describe 'basic_socket plugin' do
subject { Rollbar.plugins.get('basic_socket') }
before(:all) do
Rollbar.plugins.get('basic_socket').unload!
end
after(:each) do
subject.unload!
end
it 'is an on demand plugin' do
expect(subject.on_demand).to eq(true)
end
it "doesn't load by default" do
expect(subject.loaded).to eq(false)
end
describe '#load_scoped!' do
context 'with core monkey patching enabled' do
before { subject.configuration.disable_core_monkey_patch = false }
if Gem::Version.new(ActiveSupport::VERSION::STRING) < Gem::Version.new('4.1.0')
context 'using active_support < 4.1' do
it 'changes implementation of ::BasicSocket#as_json temporarily' do
original_implementation = BasicSocket
.public_instance_method(:as_json)
.source_location
subject.load_scoped! do
expect(subject.loaded).to eq(true)
socket = TCPSocket.new 'example.com', 80
expect(socket.as_json).to include(:value)
expect(socket.as_json[:value]).to match(/TCPSocket/)
end
expect(subject.loaded).to eq(false)
expect(BasicSocket.public_instance_method(:as_json).source_location)
.to(eq(original_implementation))
end
end
else
context 'using active_support >= 4.1' do
context 'when called as transparent' do
it 'executes provided block even when dependencies are unmet' do
result = false
subject.load_scoped!(true) do
result = true
expect(subject.loaded).to eq(false) # Plugin should not load
end
expect(result).to eq(true)
expect(subject.loaded).to eq(false)
end
end
end
end
end
end
describe '#load!' do
if Gem::Version.new(ActiveSupport::VERSION::STRING) < Gem::Version.new('4.1.0')
context 'using active_support < 4.1' do
context 'with core monkey patching enabled' do
before { subject.configuration.disable_core_monkey_patch = false }
it 'loads' do
subject.load!
expect(subject.loaded).to eq(true)
subject.unload!
expect(subject.loaded).to eq(false)
end
it 'changes implementation of ::BasicSocket#as_json' do
subject.load!
socket = TCPSocket.new 'example.com', 80
expect(socket.as_json).to include(:value)
expect(socket.as_json[:value]).to match(/TCPSocket/)
subject.unload!
expect(subject.loaded).to eq(false)
end
end
context 'with core monkey patching disabled' do
before { subject.configuration.disable_core_monkey_patch = true }
it_should_behave_like 'unloadable'
end
end
else
context 'using active_support >= 4.1' do
it_should_behave_like 'unloadable'
end
end
end
end
|
<filename>lib/kickin_ristretto/version.rb
module KickinRistretto
VERSION = "0.1.2"
end
|
<gh_stars>100-1000
/***********************************************************************************************************************
* OpenStudio(R), Copyright (c) 2008-2021, Alliance for Sustainable Energy, LLC, and other contributors. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
* following conditions are met:
*
* (1) Redistributions of source code must retain the above copyright notice, this list of conditions and the following
* disclaimer.
*
* (2) Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided with the distribution.
*
* (3) Neither the name of the copyright holder nor the names of any contributors may be used to endorse or promote products
* derived from this software without specific prior written permission from the respective party.
*
* (4) Other than as required in clauses (1) and (2), distributions in any form of modifications or other derivative works
* may not use the "OpenStudio" trademark, "OS", "os", or any other confusingly similar designation without specific prior
* written permission from Alliance for Sustainable Energy, LLC.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER(S) AND ANY CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER(S), ANY CONTRIBUTORS, THE UNITED STATES GOVERNMENT, OR THE UNITED
* STATES DEPARTMENT OF ENERGY, NOR ANY OF THEIR EMPLOYEES, BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
* USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
***********************************************************************************************************************/
#include "SizingZone.hpp"
#include "SizingZone_Impl.hpp"
#include "ThermalZone.hpp"
#include "ThermalZone_Impl.hpp"
#include "Model.hpp"
#include "Model_Impl.hpp"
#include <utilities/idd/IddFactory.hxx>
#include <utilities/idd/OS_Sizing_Zone_FieldEnums.hxx>
#include <utilities/idd/IddEnums.hxx>
#include "../utilities/units/Unit.hpp"
#include "../utilities/core/Assert.hpp"
#include "../utilities/sql/SqlFile.hpp"
namespace openstudio {
namespace model {
namespace detail {
SizingZone_Impl::SizingZone_Impl(const IdfObject& idfObject, Model_Impl* model, bool keepHandle)
: ModelObject_Impl(idfObject, model, keepHandle) {
OS_ASSERT(idfObject.iddObject().type() == SizingZone::iddObjectType());
}
SizingZone_Impl::SizingZone_Impl(const openstudio::detail::WorkspaceObject_Impl& other, Model_Impl* model, bool keepHandle)
: ModelObject_Impl(other, model, keepHandle) {
OS_ASSERT(other.iddObject().type() == SizingZone::iddObjectType());
}
SizingZone_Impl::SizingZone_Impl(const SizingZone_Impl& other, Model_Impl* model, bool keepHandle) : ModelObject_Impl(other, model, keepHandle) {}
const std::vector<std::string>& SizingZone_Impl::outputVariableNames() const {
static const std::vector<std::string> result;
return result;
}
IddObjectType SizingZone_Impl::iddObjectType() const {
return SizingZone::iddObjectType();
}
ThermalZone SizingZone_Impl::thermalZone() const {
boost::optional<ThermalZone> value = optionalThermalZone();
if (!value) {
LOG_AND_THROW(briefDescription() << " does not have an Zone or Zone List attached.");
}
return value.get();
}
std::string SizingZone_Impl::zoneCoolingDesignSupplyAirTemperatureInputMethod() const {
boost::optional<std::string> value = getString(OS_Sizing_ZoneFields::ZoneCoolingDesignSupplyAirTemperatureInputMethod, true);
OS_ASSERT(value);
return value.get();
}
double SizingZone_Impl::zoneCoolingDesignSupplyAirTemperature() const {
boost::optional<double> value = getDouble(OS_Sizing_ZoneFields::ZoneCoolingDesignSupplyAirTemperature, true);
OS_ASSERT(value);
return value.get();
}
double SizingZone_Impl::zoneCoolingDesignSupplyAirTemperatureDifference() const {
boost::optional<double> value = getDouble(OS_Sizing_ZoneFields::ZoneCoolingDesignSupplyAirTemperatureDifference, true);
OS_ASSERT(value);
return value.get();
}
std::string SizingZone_Impl::zoneHeatingDesignSupplyAirTemperatureInputMethod() const {
boost::optional<std::string> value = getString(OS_Sizing_ZoneFields::ZoneHeatingDesignSupplyAirTemperatureInputMethod, true);
OS_ASSERT(value);
return value.get();
}
double SizingZone_Impl::zoneHeatingDesignSupplyAirTemperature() const {
boost::optional<double> value = getDouble(OS_Sizing_ZoneFields::ZoneHeatingDesignSupplyAirTemperature, true);
OS_ASSERT(value);
return value.get();
}
double SizingZone_Impl::zoneHeatingDesignSupplyAirTemperatureDifference() const {
boost::optional<double> value = getDouble(OS_Sizing_ZoneFields::ZoneHeatingDesignSupplyAirTemperatureDifference, true);
OS_ASSERT(value);
return value.get();
}
double SizingZone_Impl::zoneCoolingDesignSupplyAirHumidityRatio() const {
boost::optional<double> value = getDouble(OS_Sizing_ZoneFields::ZoneCoolingDesignSupplyAirHumidityRatio, true);
OS_ASSERT(value);
return value.get();
}
double SizingZone_Impl::zoneHeatingDesignSupplyAirHumidityRatio() const {
boost::optional<double> value = getDouble(OS_Sizing_ZoneFields::ZoneHeatingDesignSupplyAirHumidityRatio, true);
OS_ASSERT(value);
return value.get();
}
boost::optional<double> SizingZone_Impl::zoneHeatingSizingFactor() const {
return getDouble(OS_Sizing_ZoneFields::ZoneHeatingSizingFactor, true);
}
boost::optional<double> SizingZone_Impl::zoneCoolingSizingFactor() const {
return getDouble(OS_Sizing_ZoneFields::ZoneCoolingSizingFactor, true);
}
std::string SizingZone_Impl::coolingDesignAirFlowMethod() const {
boost::optional<std::string> value = getString(OS_Sizing_ZoneFields::CoolingDesignAirFlowMethod, true);
OS_ASSERT(value);
return value.get();
}
bool SizingZone_Impl::isCoolingDesignAirFlowMethodDefaulted() const {
return isEmpty(OS_Sizing_ZoneFields::CoolingDesignAirFlowMethod);
}
double SizingZone_Impl::coolingDesignAirFlowRate() const {
boost::optional<double> value = getDouble(OS_Sizing_ZoneFields::CoolingDesignAirFlowRate, true);
OS_ASSERT(value);
return value.get();
}
bool SizingZone_Impl::isCoolingDesignAirFlowRateDefaulted() const {
return isEmpty(OS_Sizing_ZoneFields::CoolingDesignAirFlowRate);
}
double SizingZone_Impl::coolingMinimumAirFlowperZoneFloorArea() const {
boost::optional<double> value = getDouble(OS_Sizing_ZoneFields::CoolingMinimumAirFlowperZoneFloorArea, true);
OS_ASSERT(value);
return value.get();
}
bool SizingZone_Impl::isCoolingMinimumAirFlowperZoneFloorAreaDefaulted() const {
return isEmpty(OS_Sizing_ZoneFields::CoolingMinimumAirFlowperZoneFloorArea);
}
double SizingZone_Impl::coolingMinimumAirFlow() const {
boost::optional<double> value = getDouble(OS_Sizing_ZoneFields::CoolingMinimumAirFlow, true);
OS_ASSERT(value);
return value.get();
}
bool SizingZone_Impl::isCoolingMinimumAirFlowDefaulted() const {
return isEmpty(OS_Sizing_ZoneFields::CoolingMinimumAirFlow);
}
double SizingZone_Impl::coolingMinimumAirFlowFraction() const {
boost::optional<double> value = getDouble(OS_Sizing_ZoneFields::CoolingMinimumAirFlowFraction, true);
OS_ASSERT(value);
return value.get();
}
bool SizingZone_Impl::isCoolingMinimumAirFlowFractionDefaulted() const {
return isEmpty(OS_Sizing_ZoneFields::CoolingMinimumAirFlowFraction);
}
std::string SizingZone_Impl::heatingDesignAirFlowMethod() const {
boost::optional<std::string> value = getString(OS_Sizing_ZoneFields::HeatingDesignAirFlowMethod, true);
OS_ASSERT(value);
return value.get();
}
bool SizingZone_Impl::isHeatingDesignAirFlowMethodDefaulted() const {
return isEmpty(OS_Sizing_ZoneFields::HeatingDesignAirFlowMethod);
}
double SizingZone_Impl::heatingDesignAirFlowRate() const {
boost::optional<double> value = getDouble(OS_Sizing_ZoneFields::HeatingDesignAirFlowRate, true);
OS_ASSERT(value);
return value.get();
}
bool SizingZone_Impl::isHeatingDesignAirFlowRateDefaulted() const {
return isEmpty(OS_Sizing_ZoneFields::HeatingDesignAirFlowRate);
}
double SizingZone_Impl::heatingMaximumAirFlowperZoneFloorArea() const {
boost::optional<double> value = getDouble(OS_Sizing_ZoneFields::HeatingMaximumAirFlowperZoneFloorArea, true);
OS_ASSERT(value);
return value.get();
}
bool SizingZone_Impl::isHeatingMaximumAirFlowperZoneFloorAreaDefaulted() const {
return isEmpty(OS_Sizing_ZoneFields::HeatingMaximumAirFlowperZoneFloorArea);
}
double SizingZone_Impl::heatingMaximumAirFlow() const {
boost::optional<double> value = getDouble(OS_Sizing_ZoneFields::HeatingMaximumAirFlow, true);
OS_ASSERT(value);
return value.get();
}
bool SizingZone_Impl::isHeatingMaximumAirFlowDefaulted() const {
return isEmpty(OS_Sizing_ZoneFields::HeatingMaximumAirFlow);
}
double SizingZone_Impl::heatingMaximumAirFlowFraction() const {
boost::optional<double> value = getDouble(OS_Sizing_ZoneFields::HeatingMaximumAirFlowFraction, true);
OS_ASSERT(value);
return value.get();
}
bool SizingZone_Impl::isHeatingMaximumAirFlowFractionDefaulted() const {
return isEmpty(OS_Sizing_ZoneFields::HeatingMaximumAirFlowFraction);
}
double SizingZone_Impl::designZoneAirDistributionEffectivenessinCoolingMode() const {
boost::optional<double> value = getDouble(OS_Sizing_ZoneFields::DesignZoneAirDistributionEffectivenessinCoolingMode, true);
OS_ASSERT(value);
return value.get();
}
bool SizingZone_Impl::isDesignZoneAirDistributionEffectivenessinCoolingModeDefaulted() const {
return isEmpty(OS_Sizing_ZoneFields::DesignZoneAirDistributionEffectivenessinCoolingMode);
}
double SizingZone_Impl::designZoneAirDistributionEffectivenessinHeatingMode() const {
boost::optional<double> value = getDouble(OS_Sizing_ZoneFields::DesignZoneAirDistributionEffectivenessinHeatingMode, true);
OS_ASSERT(value);
return value.get();
}
bool SizingZone_Impl::isDesignZoneAirDistributionEffectivenessinHeatingModeDefaulted() const {
return isEmpty(OS_Sizing_ZoneFields::DesignZoneAirDistributionEffectivenessinHeatingMode);
}
double SizingZone_Impl::designZoneSecondaryRecirculationFraction() const {
boost::optional<double> value = getDouble(OS_Sizing_ZoneFields::DesignZoneSecondaryRecirculationFraction, true);
OS_ASSERT(value);
return value.get();
}
bool SizingZone_Impl::isDesignZoneSecondaryRecirculationFractionDefaulted() const {
return isEmpty(OS_Sizing_ZoneFields::DesignZoneSecondaryRecirculationFraction);
}
double SizingZone_Impl::designMinimumZoneVentilationEfficiency() const {
boost::optional<double> value = getDouble(OS_Sizing_ZoneFields::DesignMinimumZoneVentilationEfficiency, true);
OS_ASSERT(value);
return value.get();
}
bool SizingZone_Impl::isDesignMinimumZoneVentilationEfficiencyDefaulted() const {
return isEmpty(OS_Sizing_ZoneFields::DesignMinimumZoneVentilationEfficiency);
}
bool SizingZone_Impl::accountforDedicatedOutdoorAirSystem() const {
boost::optional<std::string> value = getString(OS_Sizing_ZoneFields::AccountforDedicatedOutdoorAirSystem, true);
OS_ASSERT(value);
return openstudio::istringEqual(value.get(), "Yes");
}
std::string SizingZone_Impl::dedicatedOutdoorAirSystemControlStrategy() const {
boost::optional<std::string> value = getString(OS_Sizing_ZoneFields::DedicatedOutdoorAirSystemControlStrategy, true);
OS_ASSERT(value);
return value.get();
}
boost::optional<double> SizingZone_Impl::dedicatedOutdoorAirLowSetpointTemperatureforDesign() const {
return getDouble(OS_Sizing_ZoneFields::DedicatedOutdoorAirLowSetpointTemperatureforDesign, true);
}
bool SizingZone_Impl::isDedicatedOutdoorAirLowSetpointTemperatureforDesignAutosized() const {
bool result = false;
boost::optional<std::string> value = getString(OS_Sizing_ZoneFields::DedicatedOutdoorAirLowSetpointTemperatureforDesign, true);
if (value) {
result = openstudio::istringEqual(value.get(), "autosize");
}
return result;
}
boost::optional<double> SizingZone_Impl::dedicatedOutdoorAirHighSetpointTemperatureforDesign() const {
return getDouble(OS_Sizing_ZoneFields::DedicatedOutdoorAirHighSetpointTemperatureforDesign, true);
}
bool SizingZone_Impl::isDedicatedOutdoorAirHighSetpointTemperatureforDesignAutosized() const {
bool result = false;
boost::optional<std::string> value = getString(OS_Sizing_ZoneFields::DedicatedOutdoorAirHighSetpointTemperatureforDesign, true);
if (value) {
result = openstudio::istringEqual(value.get(), "autosize");
}
return result;
}
bool SizingZone_Impl::setThermalZone(const ThermalZone& thermalZone) {
bool result = setPointer(OS_Sizing_ZoneFields::ZoneorZoneListName, thermalZone.handle());
return result;
}
bool SizingZone_Impl::setZoneCoolingDesignSupplyAirTemperature(double zoneCoolingDesignSupplyAirTemperature) {
bool result = setDouble(OS_Sizing_ZoneFields::ZoneCoolingDesignSupplyAirTemperature, zoneCoolingDesignSupplyAirTemperature);
OS_ASSERT(result);
return result;
}
bool SizingZone_Impl::setZoneCoolingDesignSupplyAirTemperatureDifference(double value) {
bool result = setDouble(OS_Sizing_ZoneFields::ZoneCoolingDesignSupplyAirTemperatureDifference, value);
OS_ASSERT(result);
return result;
}
bool SizingZone_Impl::setZoneHeatingDesignSupplyAirTemperature(double zoneHeatingDesignSupplyAirTemperature) {
bool result = setDouble(OS_Sizing_ZoneFields::ZoneHeatingDesignSupplyAirTemperature, zoneHeatingDesignSupplyAirTemperature);
OS_ASSERT(result);
return result;
}
bool SizingZone_Impl::setZoneHeatingDesignSupplyAirTemperatureDifference(double value) {
bool result = setDouble(OS_Sizing_ZoneFields::ZoneHeatingDesignSupplyAirTemperatureDifference, value);
OS_ASSERT(result);
return result;
}
bool SizingZone_Impl::setZoneCoolingDesignSupplyAirHumidityRatio(double zoneCoolingDesignSupplyAirHumidityRatio) {
bool result = setDouble(OS_Sizing_ZoneFields::ZoneCoolingDesignSupplyAirHumidityRatio, zoneCoolingDesignSupplyAirHumidityRatio);
return result;
}
bool SizingZone_Impl::setZoneHeatingDesignSupplyAirHumidityRatio(double zoneHeatingDesignSupplyAirHumidityRatio) {
bool result = setDouble(OS_Sizing_ZoneFields::ZoneHeatingDesignSupplyAirHumidityRatio, zoneHeatingDesignSupplyAirHumidityRatio);
return result;
}
bool SizingZone_Impl::setZoneHeatingSizingFactor(boost::optional<double> zoneHeatingSizingFactor) {
bool result(false);
if (zoneHeatingSizingFactor) {
result = setDouble(OS_Sizing_ZoneFields::ZoneHeatingSizingFactor, zoneHeatingSizingFactor.get());
} else {
resetZoneHeatingSizingFactor();
result = true;
}
return result;
}
void SizingZone_Impl::resetZoneHeatingSizingFactor() {
bool result = setString(OS_Sizing_ZoneFields::ZoneHeatingSizingFactor, "");
OS_ASSERT(result);
}
bool SizingZone_Impl::setZoneCoolingSizingFactor(boost::optional<double> zoneCoolingSizingFactor) {
bool result(false);
if (zoneCoolingSizingFactor) {
result = setDouble(OS_Sizing_ZoneFields::ZoneCoolingSizingFactor, zoneCoolingSizingFactor.get());
} else {
resetZoneCoolingSizingFactor();
result = true;
}
return result;
}
void SizingZone_Impl::resetZoneCoolingSizingFactor() {
bool result = setString(OS_Sizing_ZoneFields::ZoneCoolingSizingFactor, "");
OS_ASSERT(result);
}
bool SizingZone_Impl::setCoolingDesignAirFlowMethod(const std::string& coolingDesignAirFlowMethod) {
bool result = setString(OS_Sizing_ZoneFields::CoolingDesignAirFlowMethod, coolingDesignAirFlowMethod);
return result;
}
void SizingZone_Impl::resetCoolingDesignAirFlowMethod() {
bool result = setString(OS_Sizing_ZoneFields::CoolingDesignAirFlowMethod, "");
OS_ASSERT(result);
}
bool SizingZone_Impl::setCoolingDesignAirFlowRate(double coolingDesignAirFlowRate) {
bool result = setDouble(OS_Sizing_ZoneFields::CoolingDesignAirFlowRate, coolingDesignAirFlowRate);
return result;
}
void SizingZone_Impl::resetCoolingDesignAirFlowRate() {
bool result = setString(OS_Sizing_ZoneFields::CoolingDesignAirFlowRate, "");
OS_ASSERT(result);
}
bool SizingZone_Impl::setCoolingMinimumAirFlowperZoneFloorArea(double coolingMinimumAirFlowperZoneFloorArea) {
bool result = setDouble(OS_Sizing_ZoneFields::CoolingMinimumAirFlowperZoneFloorArea, coolingMinimumAirFlowperZoneFloorArea);
return result;
}
void SizingZone_Impl::resetCoolingMinimumAirFlowperZoneFloorArea() {
bool result = setString(OS_Sizing_ZoneFields::CoolingMinimumAirFlowperZoneFloorArea, "");
OS_ASSERT(result);
}
bool SizingZone_Impl::setCoolingMinimumAirFlow(double coolingMinimumAirFlow) {
bool result = setDouble(OS_Sizing_ZoneFields::CoolingMinimumAirFlow, coolingMinimumAirFlow);
return result;
}
void SizingZone_Impl::resetCoolingMinimumAirFlow() {
bool result = setString(OS_Sizing_ZoneFields::CoolingMinimumAirFlow, "");
OS_ASSERT(result);
}
bool SizingZone_Impl::setCoolingMinimumAirFlowFraction(double coolingMinimumAirFlowFraction) {
bool result = setDouble(OS_Sizing_ZoneFields::CoolingMinimumAirFlowFraction, coolingMinimumAirFlowFraction);
return result;
}
void SizingZone_Impl::resetCoolingMinimumAirFlowFraction() {
bool result = setString(OS_Sizing_ZoneFields::CoolingMinimumAirFlowFraction, "");
OS_ASSERT(result);
}
bool SizingZone_Impl::setHeatingDesignAirFlowMethod(const std::string& heatingDesignAirFlowMethod) {
bool result = setString(OS_Sizing_ZoneFields::HeatingDesignAirFlowMethod, heatingDesignAirFlowMethod);
return result;
}
void SizingZone_Impl::resetHeatingDesignAirFlowMethod() {
bool result = setString(OS_Sizing_ZoneFields::HeatingDesignAirFlowMethod, "");
OS_ASSERT(result);
}
bool SizingZone_Impl::setHeatingDesignAirFlowRate(double heatingDesignAirFlowRate) {
bool result = setDouble(OS_Sizing_ZoneFields::HeatingDesignAirFlowRate, heatingDesignAirFlowRate);
return result;
}
void SizingZone_Impl::resetHeatingDesignAirFlowRate() {
bool result = setString(OS_Sizing_ZoneFields::HeatingDesignAirFlowRate, "");
OS_ASSERT(result);
}
bool SizingZone_Impl::setHeatingMaximumAirFlowperZoneFloorArea(double heatingMaximumAirFlowperZoneFloorArea) {
bool result = setDouble(OS_Sizing_ZoneFields::HeatingMaximumAirFlowperZoneFloorArea, heatingMaximumAirFlowperZoneFloorArea);
return result;
}
void SizingZone_Impl::resetHeatingMaximumAirFlowperZoneFloorArea() {
bool result = setString(OS_Sizing_ZoneFields::HeatingMaximumAirFlowperZoneFloorArea, "");
OS_ASSERT(result);
}
bool SizingZone_Impl::setHeatingMaximumAirFlow(double heatingMaximumAirFlow) {
bool result = setDouble(OS_Sizing_ZoneFields::HeatingMaximumAirFlow, heatingMaximumAirFlow);
return result;
}
void SizingZone_Impl::resetHeatingMaximumAirFlow() {
bool result = setString(OS_Sizing_ZoneFields::HeatingMaximumAirFlow, "");
OS_ASSERT(result);
}
bool SizingZone_Impl::setHeatingMaximumAirFlowFraction(double heatingMaximumAirFlowFraction) {
bool result = setDouble(OS_Sizing_ZoneFields::HeatingMaximumAirFlowFraction, heatingMaximumAirFlowFraction);
return result;
}
void SizingZone_Impl::resetHeatingMaximumAirFlowFraction() {
bool result = setString(OS_Sizing_ZoneFields::HeatingMaximumAirFlowFraction, "");
OS_ASSERT(result);
}
bool SizingZone_Impl::setDesignZoneAirDistributionEffectivenessinCoolingMode(double designZoneAirDistributionEffectivenessinCoolingMode) {
bool result =
setDouble(OS_Sizing_ZoneFields::DesignZoneAirDistributionEffectivenessinCoolingMode, designZoneAirDistributionEffectivenessinCoolingMode);
return result;
}
void SizingZone_Impl::resetDesignZoneAirDistributionEffectivenessinCoolingMode() {
bool result = setString(OS_Sizing_ZoneFields::DesignZoneAirDistributionEffectivenessinCoolingMode, "");
OS_ASSERT(result);
}
bool SizingZone_Impl::setDesignZoneAirDistributionEffectivenessinHeatingMode(double designZoneAirDistributionEffectivenessinHeatingMode) {
bool result =
setDouble(OS_Sizing_ZoneFields::DesignZoneAirDistributionEffectivenessinHeatingMode, designZoneAirDistributionEffectivenessinHeatingMode);
return result;
}
void SizingZone_Impl::resetDesignZoneAirDistributionEffectivenessinHeatingMode() {
bool result = setString(OS_Sizing_ZoneFields::DesignZoneAirDistributionEffectivenessinHeatingMode, "");
OS_ASSERT(result);
}
bool SizingZone_Impl::setDesignZoneSecondaryRecirculationFraction(double designZoneSecondaryRecirculationFraction) {
bool result = setDouble(OS_Sizing_ZoneFields::DesignZoneSecondaryRecirculationFraction, designZoneSecondaryRecirculationFraction);
return result;
}
void SizingZone_Impl::resetDesignZoneSecondaryRecirculationFraction() {
bool result = setString(OS_Sizing_ZoneFields::DesignZoneSecondaryRecirculationFraction, "");
OS_ASSERT(result);
}
bool SizingZone_Impl::setDesignMinimumZoneVentilationEfficiency(double designMinimumZoneVentilationEfficiency) {
bool result = setDouble(OS_Sizing_ZoneFields::DesignMinimumZoneVentilationEfficiency, designMinimumZoneVentilationEfficiency);
return result;
}
void SizingZone_Impl::resetDesignMinimumZoneVentilationEfficiency() {
bool result = setString(OS_Sizing_ZoneFields::DesignMinimumZoneVentilationEfficiency, "");
OS_ASSERT(result);
}
boost::optional<ThermalZone> SizingZone_Impl::optionalThermalZone() const {
return getObject<ModelObject>().getModelObjectTarget<ThermalZone>(OS_Sizing_ZoneFields::ZoneorZoneListName);
}
std::vector<std::string> SizingZone_Impl::coolingDesignAirFlowMethodValues() const {
return SizingZone::coolingDesignAirFlowMethodValues();
}
std::vector<std::string> SizingZone_Impl::heatingDesignAirFlowMethodValues() const {
return SizingZone::heatingDesignAirFlowMethodValues();
}
boost::optional<ModelObject> SizingZone_Impl::thermalZoneAsModelObject() const {
OptionalModelObject result = thermalZone();
return result;
}
bool SizingZone_Impl::setThermalZoneAsModelObject(const boost::optional<ModelObject>& modelObject) {
if (modelObject) {
OptionalThermalZone intermediate = modelObject->optionalCast<ThermalZone>();
if (intermediate) {
ThermalZone thermalZone(*intermediate);
return setThermalZone(thermalZone);
}
}
return false;
}
bool SizingZone_Impl::setZoneCoolingDesignSupplyAirTemperatureInputMethod(const std::string& value) {
return setString(OS_Sizing_ZoneFields::ZoneCoolingDesignSupplyAirTemperatureInputMethod, value);
}
bool SizingZone_Impl::setZoneHeatingDesignSupplyAirTemperatureInputMethod(const std::string& value) {
return setString(OS_Sizing_ZoneFields::ZoneHeatingDesignSupplyAirTemperatureInputMethod, value);
}
bool SizingZone_Impl::setAccountforDedicatedOutdoorAirSystem(bool accountforDedicatedOutdoorAirSystem) {
return setBooleanFieldValue(OS_Sizing_ZoneFields::AccountforDedicatedOutdoorAirSystem, accountforDedicatedOutdoorAirSystem);
;
}
bool SizingZone_Impl::setDedicatedOutdoorAirSystemControlStrategy(std::string dedicatedOutdoorAirSystemControlStrategy) {
bool result = setString(OS_Sizing_ZoneFields::DedicatedOutdoorAirSystemControlStrategy, dedicatedOutdoorAirSystemControlStrategy);
return result;
}
bool SizingZone_Impl::setDedicatedOutdoorAirLowSetpointTemperatureforDesign(
boost::optional<double> dedicatedOutdoorAirLowSetpointTemperatureforDesign) {
bool result(false);
if (dedicatedOutdoorAirLowSetpointTemperatureforDesign) {
result = setDouble(OS_Sizing_ZoneFields::DedicatedOutdoorAirLowSetpointTemperatureforDesign,
dedicatedOutdoorAirLowSetpointTemperatureforDesign.get());
}
OS_ASSERT(result);
return result;
}
void SizingZone_Impl::autosizeDedicatedOutdoorAirLowSetpointTemperatureforDesign() {
bool result = setString(OS_Sizing_ZoneFields::DedicatedOutdoorAirLowSetpointTemperatureforDesign, "autosize");
OS_ASSERT(result);
}
bool SizingZone_Impl::setDedicatedOutdoorAirHighSetpointTemperatureforDesign(
boost::optional<double> dedicatedOutdoorAirHighSetpointTemperatureforDesign) {
bool result(false);
if (dedicatedOutdoorAirHighSetpointTemperatureforDesign) {
result = setDouble(OS_Sizing_ZoneFields::DedicatedOutdoorAirHighSetpointTemperatureforDesign,
dedicatedOutdoorAirHighSetpointTemperatureforDesign.get());
}
OS_ASSERT(result);
return result;
}
void SizingZone_Impl::autosizeDedicatedOutdoorAirHighSetpointTemperatureforDesign() {
bool result = setString(OS_Sizing_ZoneFields::DedicatedOutdoorAirHighSetpointTemperatureforDesign, "autosize");
OS_ASSERT(result);
}
boost::optional<double> SizingZone_Impl::autosizedDedicatedOutdoorAirLowSetpointTemperatureforDesign() const {
boost::optional<double> result;
std::string setpointType = "Low";
std::string setpointParam = "DOAS Design " + setpointType + " Setpoint Temperature {C}";
// Get the parent ThermalZone
ThermalZone parZone = thermalZone();
// Get the name of the thermal zone
if (!parZone.name()) {
LOG(Warn, "This object's parent ThermalZone does not have a name, cannot retrieve the autosized Dedicated Outdoor Air" + setpointType
+ " Setpoint Temperature.");
return result;
}
// Get the object name and transform to the way it is recorded
// in the sql file
std::string sqlName = parZone.name().get();
boost::to_upper(sqlName);
// Check that the model has a sql file
if (!model().sqlFile()) {
LOG(Warn, "This model has no sql file, cannot retrieve the autosized Dedicated Outdoor Air" + setpointType + " Setpoint Temperature.");
return result;
}
// Query the Intialization Summary -> Zone Sizing DOAS Inputs Information table to get
// the row names that contains information for this component.
std::string rowsQuery = R"(
SELECT RowName FROM TabularDataWithStrings
WHERE ReportName = 'Initialization Summary'
AND ReportForString = 'Entire Facility'
AND TableName = 'Zone Sizing DOAS Inputs'
AND Value = ?;)";
boost::optional<std::vector<std::string>> rowNames = model().sqlFile().get().execAndReturnVectorOfString(rowsQuery,
// Bind args
sqlName);
// Warn if the query failed
if (!rowNames) {
LOG(Debug, "Could not find a component called '" + sqlName + "' in any rows of the Initialization Summary Zone Sizing DOAS Inputs table.");
return result;
}
// Query each row of the Intialization Summary -> Zone Sizing DOAS Inputs table
// that contains this component to get the desired value.
for (std::string rowName : rowNames.get()) {
std::string valQuery = R"(
SELECT Value FROM TabularDataWithStrings
WHERE ReportName = 'Initialization Summary'
AND ReportForString = 'Entire Facility'
AND TableName = 'Zone Sizing DOAS Inputs'
AND RowName = ?
AND ColumnName = ?;)";
boost::optional<double> val = model().sqlFile().get().execAndReturnFirstDouble(valQuery, rowName, setpointParam);
// Check if the query succeeded
if (val) {
result = val.get();
break;
}
}
if (!result) {
LOG(Debug,
"The autosized value query for Dedicated Outdoor Air " + setpointType + " Setpoint Temperature for " + sqlName + " returned no value.");
}
return result;
}
boost::optional<double> SizingZone_Impl::autosizedDedicatedOutdoorAirHighSetpointTemperatureforDesign() const {
boost::optional<double> result;
std::string setpointType = "High";
std::string setpointParam = "DOAS Design " + setpointType + " Setpoint Temperature {C}";
// Get the parent ThermalZone
ThermalZone parZone = thermalZone();
// Get the name of the thermal zone
if (!parZone.name()) {
LOG(Warn, "This object's parent ThermalZone does not have a name, cannot retrieve the autosized Dedicated Outdoor Air" + setpointType
+ " Setpoint Temperature.");
return result;
}
// Get the object name and transform to the way it is recorded
// in the sql file
std::string sqlName = parZone.name().get();
boost::to_upper(sqlName);
// Check that the model has a sql file
if (!model().sqlFile()) {
LOG(Warn, "This model has no sql file, cannot retrieve the autosized Dedicated Outdoor Air" + setpointType + " Setpoint Temperature.");
return result;
}
// Query the Intialization Summary -> Zone Sizing DOAS Inputs Information table to get
// the row names that contains information for this component.
std::string rowsQuery = R"(
SELECT RowName FROM TabularDataWithStrings
WHERE ReportName = 'Initialization Summary'
AND ReportForString = 'Entire Facility'
AND TableName = 'Zone Sizing DOAS Inputs'
AND Value = ?;)";
boost::optional<std::vector<std::string>> rowNames = model().sqlFile().get().execAndReturnVectorOfString(rowsQuery,
// Bind args
sqlName);
// Warn if the query failed
if (!rowNames) {
LOG(Debug, "Could not find a component called '" + sqlName + "' in any rows of the Initialization Summary Zone Sizing DOAS Inputs table.");
return result;
}
// Query each row of the Intialization Summary -> Zone Sizing DOAS Inputs table
// that contains this component to get the desired value.
for (std::string rowName : rowNames.get()) {
std::string valQuery = R"(
SELECT Value FROM TabularDataWithStrings
WHERE ReportName = 'Initialization Summary'
AND ReportForString = 'Entire Facility'
AND TableName = 'Zone Sizing DOAS Inputs'
AND RowName = ?
AND ColumnName = ?;)";
boost::optional<double> val = model().sqlFile().get().execAndReturnFirstDouble(valQuery, rowName, setpointParam);
// Check if the query succeeded
if (val) {
result = val.get();
break;
}
}
if (!result) {
LOG(Debug,
"The autosized value query for Dedicated Outdoor Air " + setpointType + " Setpoint Temperature for " + sqlName + " returned no value.");
}
return result;
}
void SizingZone_Impl::autosize() {
autosizeDedicatedOutdoorAirLowSetpointTemperatureforDesign();
autosizeDedicatedOutdoorAirHighSetpointTemperatureforDesign();
}
void SizingZone_Impl::applySizingValues() {
boost::optional<double> val;
val = autosizedDedicatedOutdoorAirLowSetpointTemperatureforDesign();
if (val) {
setDedicatedOutdoorAirLowSetpointTemperatureforDesign(val.get());
}
val = autosizedDedicatedOutdoorAirHighSetpointTemperatureforDesign();
if (val) {
setDedicatedOutdoorAirHighSetpointTemperatureforDesign(val.get());
}
}
std::vector<EMSActuatorNames> SizingZone_Impl::emsActuatorNames() const {
std::vector<EMSActuatorNames> actuators{{"Sizing:Zone", "Zone Design Heating Air Mass Flow Rate"},
{"Sizing:Zone", "Zone Design Cooling Air Mass Flow Rate"},
{"Sizing:Zone", "Zone Design Heating Load"},
{"Sizing:Zone", "Zone Design Cooling Load"},
{"Sizing:Zone", "Zone Design Heating Vol Flow"},
{"Sizing:Zone", "Zone Design Cooling Vol Flow"}};
return actuators;
}
std::vector<std::string> SizingZone_Impl::emsInternalVariableNames() const {
std::vector<std::string> types{"Final Zone Design Heating Air Mass Flow Rate",
"Intermediate Zone Design Heating Air Mass Flow Rate",
"Final Zone Design Cooling Air Mass Flow Rate",
"Intermediate Zone Design Cooling Air Mass Flow Rate",
"Final Zone Design Heating Load",
"Intermediate Zone Design Heating Load",
"Final Zone Design Cooling Load",
"Intermediate Zone Design Cooling Load",
"Final Zone Design Heating Air Density",
"Intermediate Zone Design Heating Air Density",
"Final Zone Design Cooling Air Density",
"Intermediate Zone Design Cooling Air Density",
"Final Zone Design Heating Volume Flow",
"Intermediate Zone Design Heating Volume Flow",
"Final Zone Design Cooling Volume Flow",
"Intermediate Zone Design Cooling Volume Flow",
"Zone Outdoor Air Design Volume Flow Rate"};
return types;
}
} // namespace detail
SizingZone::SizingZone(const Model& model, const ThermalZone& thermalZone) : ModelObject(SizingZone::iddObjectType(), model) {
OS_ASSERT(getImpl<detail::SizingZone_Impl>());
setThermalZone(thermalZone);
setZoneCoolingDesignSupplyAirTemperatureInputMethod("SupplyAirTemperature");
setZoneCoolingDesignSupplyAirTemperatureDifference(11.11);
setZoneCoolingDesignSupplyAirTemperature(14.0);
setZoneHeatingDesignSupplyAirTemperatureInputMethod("SupplyAirTemperature");
setZoneHeatingDesignSupplyAirTemperatureDifference(11.11);
setZoneHeatingDesignSupplyAirTemperature(40.0);
setZoneCoolingDesignSupplyAirHumidityRatio(0.0085);
setZoneHeatingDesignSupplyAirHumidityRatio(0.0080);
setCoolingDesignAirFlowMethod("DesignDay");
setHeatingDesignAirFlowMethod("DesignDay");
setAccountforDedicatedOutdoorAirSystem(false);
setDedicatedOutdoorAirSystemControlStrategy("NeutralSupplyAir");
autosizeDedicatedOutdoorAirLowSetpointTemperatureforDesign();
autosizeDedicatedOutdoorAirHighSetpointTemperatureforDesign();
}
IddObjectType SizingZone::iddObjectType() {
return IddObjectType(IddObjectType::OS_Sizing_Zone);
}
std::vector<std::string> SizingZone::coolingDesignAirFlowMethodValues() {
return getIddKeyNames(IddFactory::instance().getObject(iddObjectType()).get(), OS_Sizing_ZoneFields::CoolingDesignAirFlowMethod);
}
std::vector<std::string> SizingZone::heatingDesignAirFlowMethodValues() {
return getIddKeyNames(IddFactory::instance().getObject(iddObjectType()).get(), OS_Sizing_ZoneFields::HeatingDesignAirFlowMethod);
}
std::vector<std::string> SizingZone::zoneCoolingDesignSupplyAirTemperatureInputMethodValues() {
return getIddKeyNames(IddFactory::instance().getObject(iddObjectType()).get(),
OS_Sizing_ZoneFields::ZoneCoolingDesignSupplyAirTemperatureInputMethod);
}
std::vector<std::string> SizingZone::zoneHeatingDesignSupplyAirTemperatureInputMethodValues() {
return getIddKeyNames(IddFactory::instance().getObject(iddObjectType()).get(),
OS_Sizing_ZoneFields::ZoneHeatingDesignSupplyAirTemperatureInputMethod);
}
ThermalZone SizingZone::thermalZone() const {
return getImpl<detail::SizingZone_Impl>()->thermalZone();
}
std::string SizingZone::zoneCoolingDesignSupplyAirTemperatureInputMethod() const {
return getImpl<detail::SizingZone_Impl>()->zoneCoolingDesignSupplyAirTemperatureInputMethod();
}
double SizingZone::zoneCoolingDesignSupplyAirTemperature() const {
return getImpl<detail::SizingZone_Impl>()->zoneCoolingDesignSupplyAirTemperature();
}
double SizingZone::zoneCoolingDesignSupplyAirTemperatureDifference() const {
return getImpl<detail::SizingZone_Impl>()->zoneCoolingDesignSupplyAirTemperatureDifference();
}
std::string SizingZone::zoneHeatingDesignSupplyAirTemperatureInputMethod() const {
return getImpl<detail::SizingZone_Impl>()->zoneHeatingDesignSupplyAirTemperatureInputMethod();
}
double SizingZone::zoneHeatingDesignSupplyAirTemperature() const {
return getImpl<detail::SizingZone_Impl>()->zoneHeatingDesignSupplyAirTemperature();
}
double SizingZone::zoneHeatingDesignSupplyAirTemperatureDifference() const {
return getImpl<detail::SizingZone_Impl>()->zoneHeatingDesignSupplyAirTemperatureDifference();
}
double SizingZone::zoneCoolingDesignSupplyAirHumidityRatio() const {
return getImpl<detail::SizingZone_Impl>()->zoneCoolingDesignSupplyAirHumidityRatio();
}
double SizingZone::zoneHeatingDesignSupplyAirHumidityRatio() const {
return getImpl<detail::SizingZone_Impl>()->zoneHeatingDesignSupplyAirHumidityRatio();
}
boost::optional<double> SizingZone::zoneHeatingSizingFactor() const {
return getImpl<detail::SizingZone_Impl>()->zoneHeatingSizingFactor();
}
boost::optional<double> SizingZone::zoneCoolingSizingFactor() const {
return getImpl<detail::SizingZone_Impl>()->zoneCoolingSizingFactor();
}
std::string SizingZone::coolingDesignAirFlowMethod() const {
return getImpl<detail::SizingZone_Impl>()->coolingDesignAirFlowMethod();
}
bool SizingZone::isCoolingDesignAirFlowMethodDefaulted() const {
return getImpl<detail::SizingZone_Impl>()->isCoolingDesignAirFlowMethodDefaulted();
}
double SizingZone::coolingDesignAirFlowRate() const {
return getImpl<detail::SizingZone_Impl>()->coolingDesignAirFlowRate();
}
bool SizingZone::isCoolingDesignAirFlowRateDefaulted() const {
return getImpl<detail::SizingZone_Impl>()->isCoolingDesignAirFlowRateDefaulted();
}
double SizingZone::coolingMinimumAirFlowperZoneFloorArea() const {
return getImpl<detail::SizingZone_Impl>()->coolingMinimumAirFlowperZoneFloorArea();
}
bool SizingZone::isCoolingMinimumAirFlowperZoneFloorAreaDefaulted() const {
return getImpl<detail::SizingZone_Impl>()->isCoolingMinimumAirFlowperZoneFloorAreaDefaulted();
}
double SizingZone::coolingMinimumAirFlow() const {
return getImpl<detail::SizingZone_Impl>()->coolingMinimumAirFlow();
}
bool SizingZone::isCoolingMinimumAirFlowDefaulted() const {
return getImpl<detail::SizingZone_Impl>()->isCoolingMinimumAirFlowDefaulted();
}
double SizingZone::coolingMinimumAirFlowFraction() const {
return getImpl<detail::SizingZone_Impl>()->coolingMinimumAirFlowFraction();
}
bool SizingZone::isCoolingMinimumAirFlowFractionDefaulted() const {
return getImpl<detail::SizingZone_Impl>()->isCoolingMinimumAirFlowFractionDefaulted();
}
std::string SizingZone::heatingDesignAirFlowMethod() const {
return getImpl<detail::SizingZone_Impl>()->heatingDesignAirFlowMethod();
}
bool SizingZone::isHeatingDesignAirFlowMethodDefaulted() const {
return getImpl<detail::SizingZone_Impl>()->isHeatingDesignAirFlowMethodDefaulted();
}
double SizingZone::heatingDesignAirFlowRate() const {
return getImpl<detail::SizingZone_Impl>()->heatingDesignAirFlowRate();
}
bool SizingZone::isHeatingDesignAirFlowRateDefaulted() const {
return getImpl<detail::SizingZone_Impl>()->isHeatingDesignAirFlowRateDefaulted();
}
double SizingZone::heatingMaximumAirFlowperZoneFloorArea() const {
return getImpl<detail::SizingZone_Impl>()->heatingMaximumAirFlowperZoneFloorArea();
}
bool SizingZone::isHeatingMaximumAirFlowperZoneFloorAreaDefaulted() const {
return getImpl<detail::SizingZone_Impl>()->isHeatingMaximumAirFlowperZoneFloorAreaDefaulted();
}
double SizingZone::heatingMaximumAirFlow() const {
return getImpl<detail::SizingZone_Impl>()->heatingMaximumAirFlow();
}
bool SizingZone::isHeatingMaximumAirFlowDefaulted() const {
return getImpl<detail::SizingZone_Impl>()->isHeatingMaximumAirFlowDefaulted();
}
double SizingZone::heatingMaximumAirFlowFraction() const {
return getImpl<detail::SizingZone_Impl>()->heatingMaximumAirFlowFraction();
}
bool SizingZone::isHeatingMaximumAirFlowFractionDefaulted() const {
return getImpl<detail::SizingZone_Impl>()->isHeatingMaximumAirFlowFractionDefaulted();
}
double SizingZone::designZoneAirDistributionEffectivenessinCoolingMode() const {
return getImpl<detail::SizingZone_Impl>()->designZoneAirDistributionEffectivenessinCoolingMode();
}
bool SizingZone::isDesignZoneAirDistributionEffectivenessinCoolingModeDefaulted() const {
return getImpl<detail::SizingZone_Impl>()->isDesignZoneAirDistributionEffectivenessinCoolingModeDefaulted();
}
double SizingZone::designZoneAirDistributionEffectivenessinHeatingMode() const {
return getImpl<detail::SizingZone_Impl>()->designZoneAirDistributionEffectivenessinHeatingMode();
}
bool SizingZone::isDesignZoneAirDistributionEffectivenessinHeatingModeDefaulted() const {
return getImpl<detail::SizingZone_Impl>()->isDesignZoneAirDistributionEffectivenessinHeatingModeDefaulted();
}
double SizingZone::designZoneSecondaryRecirculationFraction() const {
return getImpl<detail::SizingZone_Impl>()->designZoneSecondaryRecirculationFraction();
}
bool SizingZone::isDesignZoneSecondaryRecirculationFractionDefaulted() const {
return getImpl<detail::SizingZone_Impl>()->isDesignZoneSecondaryRecirculationFractionDefaulted();
}
double SizingZone::designMinimumZoneVentilationEfficiency() const {
return getImpl<detail::SizingZone_Impl>()->designMinimumZoneVentilationEfficiency();
}
bool SizingZone::isDesignMinimumZoneVentilationEfficiencyDefaulted() const {
return getImpl<detail::SizingZone_Impl>()->isDesignMinimumZoneVentilationEfficiencyDefaulted();
}
bool SizingZone::accountforDedicatedOutdoorAirSystem() const {
return getImpl<detail::SizingZone_Impl>()->accountforDedicatedOutdoorAirSystem();
}
std::string SizingZone::dedicatedOutdoorAirSystemControlStrategy() const {
return getImpl<detail::SizingZone_Impl>()->dedicatedOutdoorAirSystemControlStrategy();
}
boost::optional<double> SizingZone::dedicatedOutdoorAirLowSetpointTemperatureforDesign() const {
return getImpl<detail::SizingZone_Impl>()->dedicatedOutdoorAirLowSetpointTemperatureforDesign();
}
bool SizingZone::isDedicatedOutdoorAirLowSetpointTemperatureforDesignAutosized() const {
return getImpl<detail::SizingZone_Impl>()->isDedicatedOutdoorAirLowSetpointTemperatureforDesignAutosized();
}
boost::optional<double> SizingZone::dedicatedOutdoorAirHighSetpointTemperatureforDesign() const {
return getImpl<detail::SizingZone_Impl>()->dedicatedOutdoorAirHighSetpointTemperatureforDesign();
}
bool SizingZone::isDedicatedOutdoorAirHighSetpointTemperatureforDesignAutosized() const {
return getImpl<detail::SizingZone_Impl>()->isDedicatedOutdoorAirHighSetpointTemperatureforDesignAutosized();
}
bool SizingZone::setThermalZone(const ThermalZone& thermalZone) {
return getImpl<detail::SizingZone_Impl>()->setThermalZone(thermalZone);
}
bool SizingZone::setZoneCoolingDesignSupplyAirTemperature(double zoneCoolingDesignSupplyAirTemperature) {
return getImpl<detail::SizingZone_Impl>()->setZoneCoolingDesignSupplyAirTemperature(zoneCoolingDesignSupplyAirTemperature);
}
bool SizingZone::setZoneCoolingDesignSupplyAirTemperatureDifference(double value) {
return getImpl<detail::SizingZone_Impl>()->setZoneCoolingDesignSupplyAirTemperatureDifference(value);
}
bool SizingZone::setZoneHeatingDesignSupplyAirTemperature(double zoneHeatingDesignSupplyAirTemperature) {
return getImpl<detail::SizingZone_Impl>()->setZoneHeatingDesignSupplyAirTemperature(zoneHeatingDesignSupplyAirTemperature);
}
bool SizingZone::setZoneHeatingDesignSupplyAirTemperatureDifference(double value) {
return getImpl<detail::SizingZone_Impl>()->setZoneHeatingDesignSupplyAirTemperatureDifference(value);
}
bool SizingZone::setZoneCoolingDesignSupplyAirHumidityRatio(double zoneCoolingDesignSupplyAirHumidityRatio) {
return getImpl<detail::SizingZone_Impl>()->setZoneCoolingDesignSupplyAirHumidityRatio(zoneCoolingDesignSupplyAirHumidityRatio);
}
bool SizingZone::setZoneHeatingDesignSupplyAirHumidityRatio(double zoneHeatingDesignSupplyAirHumidityRatio) {
return getImpl<detail::SizingZone_Impl>()->setZoneHeatingDesignSupplyAirHumidityRatio(zoneHeatingDesignSupplyAirHumidityRatio);
}
bool SizingZone::setZoneHeatingSizingFactor(double zoneHeatingSizingFactor) {
return getImpl<detail::SizingZone_Impl>()->setZoneHeatingSizingFactor(zoneHeatingSizingFactor);
}
void SizingZone::resetZoneHeatingSizingFactor() {
getImpl<detail::SizingZone_Impl>()->resetZoneHeatingSizingFactor();
}
bool SizingZone::setZoneCoolingSizingFactor(double zoneCoolingSizingFactor) {
return getImpl<detail::SizingZone_Impl>()->setZoneCoolingSizingFactor(zoneCoolingSizingFactor);
}
void SizingZone::resetZoneCoolingSizingFactor() {
getImpl<detail::SizingZone_Impl>()->resetZoneCoolingSizingFactor();
}
bool SizingZone::setCoolingDesignAirFlowMethod(const std::string& coolingDesignAirFlowMethod) {
return getImpl<detail::SizingZone_Impl>()->setCoolingDesignAirFlowMethod(coolingDesignAirFlowMethod);
}
void SizingZone::resetCoolingDesignAirFlowMethod() {
getImpl<detail::SizingZone_Impl>()->resetCoolingDesignAirFlowMethod();
}
bool SizingZone::setCoolingDesignAirFlowRate(double coolingDesignAirFlowRate) {
return getImpl<detail::SizingZone_Impl>()->setCoolingDesignAirFlowRate(coolingDesignAirFlowRate);
}
void SizingZone::resetCoolingDesignAirFlowRate() {
getImpl<detail::SizingZone_Impl>()->resetCoolingDesignAirFlowRate();
}
bool SizingZone::setCoolingMinimumAirFlowperZoneFloorArea(double coolingMinimumAirFlowperZoneFloorArea) {
return getImpl<detail::SizingZone_Impl>()->setCoolingMinimumAirFlowperZoneFloorArea(coolingMinimumAirFlowperZoneFloorArea);
}
void SizingZone::resetCoolingMinimumAirFlowperZoneFloorArea() {
getImpl<detail::SizingZone_Impl>()->resetCoolingMinimumAirFlowperZoneFloorArea();
}
bool SizingZone::setCoolingMinimumAirFlow(double coolingMinimumAirFlow) {
return getImpl<detail::SizingZone_Impl>()->setCoolingMinimumAirFlow(coolingMinimumAirFlow);
}
void SizingZone::resetCoolingMinimumAirFlow() {
getImpl<detail::SizingZone_Impl>()->resetCoolingMinimumAirFlow();
}
bool SizingZone::setCoolingMinimumAirFlowFraction(double coolingMinimumAirFlowFraction) {
return getImpl<detail::SizingZone_Impl>()->setCoolingMinimumAirFlowFraction(coolingMinimumAirFlowFraction);
}
void SizingZone::resetCoolingMinimumAirFlowFraction() {
getImpl<detail::SizingZone_Impl>()->resetCoolingMinimumAirFlowFraction();
}
bool SizingZone::setHeatingDesignAirFlowMethod(const std::string& heatingDesignAirFlowMethod) {
return getImpl<detail::SizingZone_Impl>()->setHeatingDesignAirFlowMethod(heatingDesignAirFlowMethod);
}
void SizingZone::resetHeatingDesignAirFlowMethod() {
getImpl<detail::SizingZone_Impl>()->resetHeatingDesignAirFlowMethod();
}
bool SizingZone::setHeatingDesignAirFlowRate(double heatingDesignAirFlowRate) {
return getImpl<detail::SizingZone_Impl>()->setHeatingDesignAirFlowRate(heatingDesignAirFlowRate);
}
void SizingZone::resetHeatingDesignAirFlowRate() {
getImpl<detail::SizingZone_Impl>()->resetHeatingDesignAirFlowRate();
}
bool SizingZone::setHeatingMaximumAirFlowperZoneFloorArea(double heatingMaximumAirFlowperZoneFloorArea) {
return getImpl<detail::SizingZone_Impl>()->setHeatingMaximumAirFlowperZoneFloorArea(heatingMaximumAirFlowperZoneFloorArea);
}
void SizingZone::resetHeatingMaximumAirFlowperZoneFloorArea() {
getImpl<detail::SizingZone_Impl>()->resetHeatingMaximumAirFlowperZoneFloorArea();
}
bool SizingZone::setHeatingMaximumAirFlow(double heatingMaximumAirFlow) {
return getImpl<detail::SizingZone_Impl>()->setHeatingMaximumAirFlow(heatingMaximumAirFlow);
}
void SizingZone::resetHeatingMaximumAirFlow() {
getImpl<detail::SizingZone_Impl>()->resetHeatingMaximumAirFlow();
}
bool SizingZone::setHeatingMaximumAirFlowFraction(double heatingMaximumAirFlowFraction) {
return getImpl<detail::SizingZone_Impl>()->setHeatingMaximumAirFlowFraction(heatingMaximumAirFlowFraction);
}
void SizingZone::resetHeatingMaximumAirFlowFraction() {
getImpl<detail::SizingZone_Impl>()->resetHeatingMaximumAirFlowFraction();
}
bool SizingZone::setDesignZoneAirDistributionEffectivenessinCoolingMode(double designZoneAirDistributionEffectivenessinCoolingMode) {
return getImpl<detail::SizingZone_Impl>()->setDesignZoneAirDistributionEffectivenessinCoolingMode(
designZoneAirDistributionEffectivenessinCoolingMode);
}
void SizingZone::resetDesignZoneAirDistributionEffectivenessinCoolingMode() {
getImpl<detail::SizingZone_Impl>()->resetDesignZoneAirDistributionEffectivenessinCoolingMode();
}
bool SizingZone::setDesignZoneAirDistributionEffectivenessinHeatingMode(double designZoneAirDistributionEffectivenessinHeatingMode) {
return getImpl<detail::SizingZone_Impl>()->setDesignZoneAirDistributionEffectivenessinHeatingMode(
designZoneAirDistributionEffectivenessinHeatingMode);
}
void SizingZone::resetDesignZoneAirDistributionEffectivenessinHeatingMode() {
getImpl<detail::SizingZone_Impl>()->resetDesignZoneAirDistributionEffectivenessinHeatingMode();
}
bool SizingZone::setDesignZoneSecondaryRecirculationFraction(double designZoneSecondaryRecirculationFraction) {
return getImpl<detail::SizingZone_Impl>()->setDesignZoneSecondaryRecirculationFraction(designZoneSecondaryRecirculationFraction);
}
void SizingZone::resetDesignZoneSecondaryRecirculationFraction() {
getImpl<detail::SizingZone_Impl>()->resetDesignZoneSecondaryRecirculationFraction();
}
bool SizingZone::setDesignMinimumZoneVentilationEfficiency(double designMinimumZoneVentilationEfficiency) {
return getImpl<detail::SizingZone_Impl>()->setDesignMinimumZoneVentilationEfficiency(designMinimumZoneVentilationEfficiency);
}
void SizingZone::resetDesignMinimumZoneVentilationEfficiency() {
getImpl<detail::SizingZone_Impl>()->resetDesignMinimumZoneVentilationEfficiency();
}
bool SizingZone::setZoneCoolingDesignSupplyAirTemperatureInputMethod(const std::string& value) {
return getImpl<detail::SizingZone_Impl>()->setZoneCoolingDesignSupplyAirTemperatureInputMethod(value);
}
bool SizingZone::setZoneHeatingDesignSupplyAirTemperatureInputMethod(const std::string& value) {
return getImpl<detail::SizingZone_Impl>()->setZoneHeatingDesignSupplyAirTemperatureInputMethod(value);
}
bool SizingZone::setAccountforDedicatedOutdoorAirSystem(bool accountforDedicatedOutdoorAirSystem) {
return getImpl<detail::SizingZone_Impl>()->setAccountforDedicatedOutdoorAirSystem(accountforDedicatedOutdoorAirSystem);
}
bool SizingZone::setDedicatedOutdoorAirSystemControlStrategy(std::string dedicatedOutdoorAirSystemControlStrategy) {
return getImpl<detail::SizingZone_Impl>()->setDedicatedOutdoorAirSystemControlStrategy(dedicatedOutdoorAirSystemControlStrategy);
}
bool SizingZone::setDedicatedOutdoorAirLowSetpointTemperatureforDesign(double dedicatedOutdoorAirLowSetpointTemperatureforDesign) {
return getImpl<detail::SizingZone_Impl>()->setDedicatedOutdoorAirLowSetpointTemperatureforDesign(
dedicatedOutdoorAirLowSetpointTemperatureforDesign);
}
void SizingZone::autosizeDedicatedOutdoorAirLowSetpointTemperatureforDesign() {
getImpl<detail::SizingZone_Impl>()->autosizeDedicatedOutdoorAirLowSetpointTemperatureforDesign();
}
bool SizingZone::setDedicatedOutdoorAirHighSetpointTemperatureforDesign(double dedicatedOutdoorAirHighSetpointTemperatureforDesign) {
return getImpl<detail::SizingZone_Impl>()->setDedicatedOutdoorAirHighSetpointTemperatureforDesign(
dedicatedOutdoorAirHighSetpointTemperatureforDesign);
}
void SizingZone::autosizeDedicatedOutdoorAirHighSetpointTemperatureforDesign() {
getImpl<detail::SizingZone_Impl>()->autosizeDedicatedOutdoorAirHighSetpointTemperatureforDesign();
}
/// @cond
SizingZone::SizingZone(std::shared_ptr<detail::SizingZone_Impl> impl) : ModelObject(std::move(impl)) {}
/// @endcond
boost::optional<double> SizingZone::autosizedDedicatedOutdoorAirLowSetpointTemperatureforDesign() const {
return getImpl<detail::SizingZone_Impl>()->autosizedDedicatedOutdoorAirLowSetpointTemperatureforDesign();
}
boost::optional<double> SizingZone::autosizedDedicatedOutdoorAirHighSetpointTemperatureforDesign() const {
return getImpl<detail::SizingZone_Impl>()->autosizedDedicatedOutdoorAirHighSetpointTemperatureforDesign();
}
void SizingZone::autosize() {
return getImpl<detail::SizingZone_Impl>()->autosize();
}
void SizingZone::applySizingValues() {
return getImpl<detail::SizingZone_Impl>()->applySizingValues();
}
} // namespace model
} // namespace openstudio
|
<filename>client/components/Post.js
import React, { Component } from 'react';
class Post extends Component {
render() {
return (
<div className="post">
Post
</div>
)
}
};
export default Post;
|
<reponame>pandrey2003/flask-with-db
# The backend for the contact form on contact.html
# For interactive message
from flask import flash
# For secure password typing
from getpass import getpass
# For sending emails
import smtplib, ssl
# By default, we want to decrypt password for <EMAIL>
from app.mod_db.signingup import decrypt_password, encrypted_pwd
def contact_mail(name, email, contents):
port = 465 # For SSL
smtp_server = "smtp.gmail.com"
try:
# Trying to get key.key file (absent on GitHub for security reasons)
sender_password = decrypt_password(encrypted_pwd)
except FileNotFoundError:
# If this app is used by other developer
sender_email = input("Enter your gmail: ")
# Because Gmail has to be given access to lesser-known apps
print("Grant access to apps in your account!")
# For security reasons, we type not seeing the password
sender_password = getpass()
else:
# If the file was found, we can use the default Gmail
sender_email = "<EMAIL>"
finally:
# Below is a standard form for sending plain emails
receiver_email = sender_email
message = f"""\
Subject: {name} from {email}
Name: {name}
Email: {email}
Contents: {contents}
"""
context = ssl.create_default_context()
with smtplib.SMTP_SSL(
smtp_server,
port,
context=context
) as server:
server.login(sender_email, sender_password)
server.sendmail(sender_email, receiver_email, message)
# Interactive message which will be shown in bold in our template
flash(f"If necessary, we may contact you on {email}", "contact")
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.