text stringlengths 1 1.05M |
|---|
#!/bin/sh
set -e
set -u
set -o pipefail
function on_error {
echo "$(realpath -mq "${0}"):$1: error: Unexpected failure"
}
trap 'on_error $LINENO' ERR
if [ -z ${FRAMEWORKS_FOLDER_PATH+x} ]; then
# If FRAMEWORKS_FOLDER_PATH is not set, then there's nowhere for us to copy
# frameworks to, so exit 0 (signalling the script phase was successful).
exit 0
fi
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
COCOAPODS_PARALLEL_CODE_SIGN="${COCOAPODS_PARALLEL_CODE_SIGN:-false}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
# Used as a return value for each invocation of `strip_invalid_archs` function.
STRIP_BINARY_RETVAL=0
# This protects against multiple targets copying the same framework dependency at the same time. The solution
# was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html
RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????")
# Copies and strips a vendored framework
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# Use filter instead of exclude so missing patterns don't throw errors.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
elif [ -L "${binary}" ]; then
echo "Destination binary is symlinked..."
dirname="$(dirname "${binary}")"
binary="${dirname}/$(readlink "${binary}")"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u)
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Copies and strips a vendored dSYM
install_dsym() {
local source="$1"
if [ -r "$source" ]; then
# Copy the dSYM into a the targets temp dir.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${DERIVED_FILES_DIR}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${DERIVED_FILES_DIR}"
local basename
basename="$(basename -s .framework.dSYM "$source")"
binary="${DERIVED_FILES_DIR}/${basename}.framework.dSYM/Contents/Resources/DWARF/${basename}"
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"Mach-O "*"dSYM companion"* ]]; then
strip_invalid_archs "$binary"
fi
if [[ $STRIP_BINARY_RETVAL == 1 ]]; then
# Move the stripped file into its final destination.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${DERIVED_FILES_DIR}/${basename}.framework.dSYM\" \"${DWARF_DSYM_FOLDER_PATH}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${DERIVED_FILES_DIR}/${basename}.framework.dSYM" "${DWARF_DSYM_FOLDER_PATH}"
else
# The dSYM was not stripped at all, in this case touch a fake folder so the input/output paths from Xcode do not reexecute this script because the file is missing.
touch "${DWARF_DSYM_FOLDER_PATH}/${basename}.framework.dSYM"
fi
fi
}
# Copies the bcsymbolmap files of a vendored framework
install_bcsymbolmap() {
local bcsymbolmap_path="$1"
local destination="${BUILT_PRODUCTS_DIR}"
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}"
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY:-}" -a "${CODE_SIGNING_REQUIRED:-}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identity
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS:-} --preserve-metadata=identifier,entitlements '$1'"
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
code_sign_cmd="$code_sign_cmd &"
fi
echo "$code_sign_cmd"
eval "$code_sign_cmd"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current target binary
binary_archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | awk '{$1=$1;print}' | rev)"
# Intersect them with the architectures we are building for
intersected_archs="$(echo ${ARCHS[@]} ${binary_archs[@]} | tr ' ' '\n' | sort | uniq -d)"
# If there are no archs supported by this binary then warn the user
if [[ -z "$intersected_archs" ]]; then
echo "warning: [CP] Vendored binary '$binary' contains architectures ($binary_archs) none of which match the current build architectures ($ARCHS)."
STRIP_BINARY_RETVAL=0
return
fi
stripped=""
for arch in $binary_archs; do
if ! [[ "${ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary"
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
STRIP_BINARY_RETVAL=1
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/AFNetworking/AFNetworking.framework"
install_framework "${BUILT_PRODUCTS_DIR}/FKNetworking/FKNetworking.framework"
install_framework "${BUILT_PRODUCTS_DIR}/Nimble/Nimble.framework"
install_framework "${BUILT_PRODUCTS_DIR}/Quick/Quick.framework"
install_framework "${BUILT_PRODUCTS_DIR}/SwiftProtobuf/SwiftProtobuf.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/AFNetworking/AFNetworking.framework"
install_framework "${BUILT_PRODUCTS_DIR}/FKNetworking/FKNetworking.framework"
install_framework "${BUILT_PRODUCTS_DIR}/Nimble/Nimble.framework"
install_framework "${BUILT_PRODUCTS_DIR}/Quick/Quick.framework"
install_framework "${BUILT_PRODUCTS_DIR}/SwiftProtobuf/SwiftProtobuf.framework"
fi
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
wait
fi
|
<filename>src/main/java/com/alipay/api/domain/KoubeiMarketingCampaignCrowdModifyModel.java<gh_stars>0
package com.alipay.api.domain;
import com.alipay.api.AlipayObject;
import com.alipay.api.internal.mapping.ApiField;
/**
* 口碑商户人群组修改接口
*
* @author <NAME>
* @since 1.0, 2018-12-18 10:23:56
*/
public class KoubeiMarketingCampaignCrowdModifyModel extends AlipayObject {
private static final long serialVersionUID = 3224269958781868141L;
/**
* 圈人的条件。
op:表示操作符,目前支持 EQ(相等)、GT(大于)、GTEQ(大于等于)、LT(小于)、LTEQ(小于等于)、NEQ(不等)、LIKE(模糊匹配)、IN(在枚举范围内)、NOTIN(不在枚举范围内)、BETWEEN(范围比较)、LEFTDAYS(几天以内)、RIGHTDAYS(几天以外)、LOCATE(地理位置比较)LBS(地图位置数据);
tagCode:标签code,详细标签 code 参见附件 <a href="https://gw.alipayobjects.com/os/bmw-prod/4b028911-cb1a-40e9-8abe-49cb04c9a379.zip">标签信息</a>。
value:标签值。
*/
@ApiField("conditions")
private String conditions;
/**
* 人群组的唯一标识ID
*/
@ApiField("crowd_group_id")
private String crowdGroupId;
/**
* 操作人id,必须和operator_type配对出现,不填时默认是商户
*/
@ApiField("operator_id")
private String operatorId;
/**
* 操作人类型,有以下值可填:MER(外部商户),MER_OPERATOR(外部商户操作员),PROVIDER(外部服务商),PROVIDER_STAFF(外部服务商员工),默认不需要填这个字段,默认为MER
*/
@ApiField("operator_type")
private String operatorType;
/**
* 外部流水号
*/
@ApiField("out_biz_no")
private String outBizNo;
public String getConditions() {
return this.conditions;
}
public void setConditions(String conditions) {
this.conditions = conditions;
}
public String getCrowdGroupId() {
return this.crowdGroupId;
}
public void setCrowdGroupId(String crowdGroupId) {
this.crowdGroupId = crowdGroupId;
}
public String getOperatorId() {
return this.operatorId;
}
public void setOperatorId(String operatorId) {
this.operatorId = operatorId;
}
public String getOperatorType() {
return this.operatorType;
}
public void setOperatorType(String operatorType) {
this.operatorType = operatorType;
}
public String getOutBizNo() {
return this.outBizNo;
}
public void setOutBizNo(String outBizNo) {
this.outBizNo = outBizNo;
}
}
|
function bubbleSort(arr) {
let length = arr.length;
for (let i = 0; i < length - 1; i++) {
for (let j = 0; j < length - i - 1; j++) {
if (arr[j] > arr[j + 1]) {
let temp = arr[j];
arr[j] = arr[j + 1];
arr[j + 1] = temp;
}
}
}
return arr;
}
//example
bubbleSort([5, 4, 3, 7, 8, 2, 1])
// returns [1, 2, 3, 4, 5, 7, 8] |
#!/bin/bash
set -o pipefail
test_directory=chatbot
image_name=open-intent-${test_directory}-test
echo "Build docker image ${image_name}"
docker build -t ${image_name} ${test_directory}
echo "Run docker image ${image_name}"
# Run the test
docker run --rm -i --volumes-from open-intent-source-volume ${image_name}
|
import { Color } from "@/components/Business/Grade/Grade";
import { useGet } from "@/hooks/useGet";
import { badSmellWordingKeys, DashboardGroup, } from "@/pages/system/systemEvolving/MeasureIndicators/Components/ChartCard";
import { IndicatorLevel } from "@/pages/system/systemEvolving/MeasureIndicators/Components/Group";
import { storage } from "@/store/storage/sessionStorage";
import axios from '../axios';
export interface MethodLines {
moduleName: string;
packageName: string;
typeName: string;
methodName: string;
systemId: number;
lines: number;
}
interface MethodLinesPagedDataResponse {
count: number;
currentPageNumber: number;
data: MethodLines[];
}
export function getOverviewUsingMethods(currentPageNumber: number, numberPerPage: number) {
const systemId = storage.getSystemId();
return axios<MethodLinesPagedDataResponse>({
url: `/api/systems/${systemId}/codeline/methods/above-threshold`,
method: "GET",
params: { currentPageNumber, numberPerPage },
});
}
export interface GroupDataItem {
type: badSmellWordingKeys;
level: IndicatorLevel;
graphData: Array<{
date: string;
value: number;
}>;
}
export interface MeasureIndicatorsData {
// 度量指标
dashboardGroup: keyof typeof DashboardGroup;
groupData: GroupDataItem[];
}
export const useDashboard = () => {
const systemId = storage.getSystemId();
return useGet<MeasureIndicatorsData[]>(`/api/systems/${ systemId }/dashboard`);
};
interface LineCount {
lineCount: number,
fileCount: number,
language: String
}
interface UseOverviewCount {
repoCount: number;
moduleCount: number;
lineCounts: LineCount[];
contributorCount: number;
qualityLevel: keyof typeof Color;
}
export const useOverviewCount = () => {
const systemId = storage.getSystemId();
return useGet<UseOverviewCount>(`/api/systems/${ systemId }/overview/system`);
};
export const LEVEL_SCORE = {
A: 1,
B: 2,
C: 3,
D: 4,
} as const;
export interface Overview {
level: keyof typeof LEVEL_SCORE;
badSmell: string;
category: string;
count: number;
}
interface UseOverview {
data: Overview[];
}
export function useOverview() {
const systemId = storage.getSystemId();
const { data, run } = useGet<UseOverview>(`/api/systems/${systemId}/overview`);
return {
data: data?.data || [],
run,
};
}
|
#!/bin/sh
ip link del veth0
ip netns del ns1
|
#!/bin/bash
#PBS -q normal
#PBS -l nodes=1:ppn=16:native
#PBS -l walltime=1:00:00
#PBS -N sedov3a0016
#PBS -o out.stdout
#PBS -e out.stderr
#PBS -M jobordner@ucsd.edu
#PBS -m abe
#PBS -V
# Start of user commands - comments start with a hash sign (#)
P=0016
T=3a
H="sdsc-gordon"
source $HOME/Cello/cello-src/input/Sedov/include.sh
|
def insertion_sort(arr):
for i in range(1, len(arr)):
key = arr[i]
j = i-1
while j >= 0 and key < arr[j]:
arr[j+1] = arr[j]
j -= 1
arr[j+1] = key
return arr |
import numpy as np
import tensorflow as tf
def calculate_definite_integrals(a, b, ms, low, up):
omegas_cos = 2.0 * np.pi * ms / (b - a)
w = omegas_cos.reshape(-1, 1)
m = omegas_cos.reshape(1, -1)
coscos = (
-m * tf.sin(low * m) * tf.cos(low * w)
+ w * tf.cos(low * m) * tf.sin(low * w)
+ m * tf.sin(up * m) * tf.cos(up * w)
- w * tf.cos(up * m) * tf.sin(up * w)
) / (tf.square(m) - tf.square(w))
integral_coscos = coscos.numpy()
integral_cos_squared = (2 * w * (b - a) - tf.sin(2 * a * w) + tf.sin(2 * b * w)) / (4 * w)
return integral_coscos, integral_cos_squared |
import {inject, injectable, LazyServiceIdentifer} from 'inversify'
import {LiquidityRemoveTransformation} from '../models/transformation/liquidityRemoveTransformation'
import {LiquidityAddTransformation} from '../models/transformation/liquidityAddTransformation'
import {SwapBuyTransformation} from '../models/transformation/swapBuyTransformation'
import {MatchRecord} from '../models/matches/matchRecord'
import {SwapSellTransformation} from '../models/transformation/swapSellTransformation'
// @ts-ignore
import sqrt from 'bigint-isqrt'
import {workerLogger} from '../../utils/workerLogger'
import MonitorService from './monitorService'
import {modules} from '../../container'
@injectable()
export default class MatcherService {
#monitorService: MonitorService
// @ts-ignore
#info = (outpoint: string, msg: string) => {
workerLogger.info(`MatcherService: ${msg}`)
this.#monitorService.update(outpoint, msg)
}
// @ts-ignore
#error = (outpoint: string, msg: string) => {
workerLogger.error(`MatcherService: ${msg}`)
this.#monitorService.update(outpoint, msg)
}
constructor(
@inject(new LazyServiceIdentifer(() => modules[MonitorService.name])) monitorService: MonitorService,
) {
this.#monitorService = monitorService
}
/*
Info -> Info
Pool -> Pool
[removeReq] -> [removeRes(Sudt-cell)]
[addReq] -> [addRes(Lpt-cell + Sudt-change-cell)]
*/
match = (matcheRecord: MatchRecord): void => {
for (let sellXform of matcheRecord.sellXforms) {
this.processSwapSellTransformation(matcheRecord, sellXform)
}
for (let buyXform of matcheRecord.buyXforms) {
this.processSwapBuyTransformation(matcheRecord, buyXform)
}
for (let addXform of matcheRecord.addXforms) {
this.processLiquidityAddTransformation(matcheRecord, addXform)
}
for (let removelXform of matcheRecord.removeXforms) {
this.processLiquidityRemoveTransformation(matcheRecord, removelXform)
}
if (
matcheRecord.sellXforms.every(xform => xform.skip) &&
matcheRecord.buyXforms.every(xform => xform.skip) &&
matcheRecord.removeXforms.every(xform => xform.skip) &&
matcheRecord.addXforms.every(xform => xform.skip)
) {
matcheRecord.skip = true
} else {
matcheRecord.matcherChange.reduceBlockMinerFee()
}
}
/*
info -> info
pool -> pool
matcherChange_ckb -> matcherChange_ckb
swap_buy -> sudt
buy = buy sudt
ckb -> sudt
*/
private processSwapBuyTransformation = (matchRecord: MatchRecord, swapBuyXform: SwapBuyTransformation): void => {
// spend ckb to get sudt
// ckb -> sudt
// since you can not spend all ckb because we need some to hold the output sudt cell
if (swapBuyXform.request.capacity - swapBuyXform.request.tips <= swapBuyXform.minCapacity()) {
this.#info(swapBuyXform.request.getOutPoint(),
'process swap buy, txHash: ' + swapBuyXform.request.outPoint.tx_hash +
` swapBuyXform.request.capacity ${swapBuyXform.request.capacity} - swapBuyXform.request.tips ${swapBuyXform.request.tips} <= swapBuyXform.minCapacity() ${swapBuyXform.minCapacity()}`,
)
swapBuyXform.skip = true
return
}
// the formula is as same as before :0
// (ckb_reserve + spent_ckb * 99.7% ) (sudt_reserve - sudt_got) <= sudt_reserve * ckb_reserve
// ckb_reserve * - sudt_got + spent_ckb * 99.7% * sudt_reserve + spent_ckb * 99.7% * - sudt_got <=0
// spent_ckb * 99.7% * sudt_reserve <= ckb_reserve * sudt_got + spent_ckb * 99.7% * sudt_got
// spent_ckb * 99.7% * sudt_reserve <= (ckb_reserve + spent_ckb * 99.7% )* sudt_got
// spent_ckb * 997 * sudt_reserve <= (ckb_reserve * 1000 + spent_ckb * 997 )* sudt_got
// sudt_got >= spent_ckb * 997 * sudt_reserve / (ckb_reserve * 1000 + spent_ckb * 997 )
let ckbIn = swapBuyXform.request.capacity - swapBuyXform.request.tips - swapBuyXform.minCapacity()
let sudtGot =
(ckbIn * 997n * matchRecord.info.sudtReserve) / (matchRecord.info.ckbReserve * 1000n + ckbIn * 997n) + 1n
if (sudtGot < swapBuyXform.request.amountOutMin) {
this.#info(swapBuyXform.request.getOutPoint(),
'process swap buy, txHash: ' + swapBuyXform.request.outPoint.tx_hash +
` sudtGot ${sudtGot}< swapBuyXform.request.amountOutMin ${swapBuyXform.request.amountOutMin}`)
swapBuyXform.skip = true
return
}
if (matchRecord.info.sudtReserve < sudtGot) {
this.#info(swapBuyXform.request.getOutPoint(),
'process swap buy, txHash: ' + swapBuyXform.request.outPoint.tx_hash +
` matchRecord.info.sudtReserve ${matchRecord.info.sudtReserve} < sudtGot ${sudtGot}`)
swapBuyXform.skip = true
return
}
// flush the result into Xform
swapBuyXform.sudtAmount = sudtGot
// update context of matchRecord
// matcher get tips
matchRecord.matcherChange.capacity += swapBuyXform.request.tips
// then all ckb, include of 0.3% as liquidity provider interest, goes into pool
matchRecord.info.ckbReserve += ckbIn
matchRecord.info.sudtReserve -= sudtGot
matchRecord.pool.capacity += ckbIn
matchRecord.pool.sudtAmount -= sudtGot
}
/*
info -> info
pool -> pool
matcherChange_ckb -> matcherChange_ckb
swap_sell -> ckb
sell = sell sudt
sudt -> ckb
*/
private processSwapSellTransformation = (matchRecord: MatchRecord, swapSellXform: SwapSellTransformation): void => {
// spend sudt to get ckb
// sudt -> ckb
// (sudt_reserve + spent_sudt * 99.7%)(ckb_reserve - ckb_got) <= sudt_reserve * ckb_reserve
// sudt_reserve * - ckb_got + spent_sudt * 99.7% * ckb_reserve + spent_sudt * 99.7% * - ckb_got <=0
// spent_sudt * 99.7% * ckb_reserve <= sudt_reserve * ckb_got + spent_sudt * 99.7% * ckb_got
// spent_sudt * 99.7% * ckb_reserve <= (sudt_reserve + spent_sudt * 99.7% )* ckb_got
// spent_sudt * 997 * ckb_reserve <= (sudt_reserve * 1000 + spent_sudt * 997 )* ckb_got
// ckb_got >= spent_sudt * 997 * ckb_reserve / (sudt_reserve * 1000 + spent_sudt * 997 )
let ckbOut: bigint =
(swapSellXform.request.sudtAmount * 997n * matchRecord.info.ckbReserve) /
(matchRecord.info.sudtReserve * 1000n + swapSellXform.request.sudtAmount * 997n) +
1n
if (ckbOut < swapSellXform.request.amountOutMin) {
this.#info(swapSellXform.request.getOutPoint(),
'process swap sell, txHash: ' + swapSellXform.request.outPoint.tx_hash +
` ckbOut ${ckbOut} < swapSellXform.request.amountOutMin ${swapSellXform.request.amountOutMin}`)
swapSellXform.skip = true
return
}
if (
ckbOut + swapSellXform.request.capacity <
swapSellXform.minCapacity() + swapSellXform.request.tips
) {
this.#info(swapSellXform.request.getOutPoint(),
'process swap sell, txHash: ' + swapSellXform.request.outPoint.tx_hash +
` ckbOut ${ckbOut} + swapSellXform.request.capacity ${swapSellXform.request.capacity}< swapSellXform.minCapacity() ${swapSellXform.minCapacity()} + swapSellXform.request.tips ${swapSellXform.request.tips}`,
)
swapSellXform.skip = true
return
}
swapSellXform.capacity = ckbOut + swapSellXform.request.capacity - swapSellXform.request.tips
if (swapSellXform.capacity < swapSellXform.minCapacity()) {
this.#info(swapSellXform.request.getOutPoint(),
'process swap sell, txHash: ' + swapSellXform.request.outPoint.tx_hash +
` swapSellXform.capacity ${swapSellXform.capacity}< swapSellXform.minCapacity() ${swapSellXform.minCapacity()}`,
)
}
if (matchRecord.info.ckbReserve < ckbOut) {
this.#info(swapSellXform.request.getOutPoint(),
'process swap sell, txHash: ' + swapSellXform.request.outPoint.tx_hash +
` matchRecord.info.ckbReserve ${matchRecord.info.ckbReserve} < ckbOut ${ckbOut}`,
)
}
matchRecord.matcherChange.capacity += swapSellXform.request.tips
// then all remaining sudtChangeAmount, include of 0.3% as liquidity provider interest, goes into pool
matchRecord.info.sudtReserve += swapSellXform.request.sudtAmount
matchRecord.info.ckbReserve -= ckbOut
matchRecord.pool.sudtAmount += swapSellXform.request.sudtAmount
matchRecord.pool.capacity -= ckbOut
}
/*
total lpt total sudtChangeAmount total ckb
--------------- = ----------------- = ----------------
withdrawn lpt withdrawn sudtChangeAmount? withdrawn ckb?
LiquidityRemoveReq -> Sudt + Ckb
*/
private processLiquidityRemoveTransformation = (
matchRecord: MatchRecord,
liquidityRemoveXform: LiquidityRemoveTransformation,
): void => {
let withdrawnSudt =
(liquidityRemoveXform.request.lptAmount * matchRecord.info.sudtReserve) / matchRecord.info.totalLiquidity + 1n
let withdrawnCkb =
(liquidityRemoveXform.request.lptAmount * matchRecord.info.ckbReserve) / matchRecord.info.totalLiquidity + 1n
if (withdrawnSudt < liquidityRemoveXform.request.sudtMin || withdrawnCkb < liquidityRemoveXform.request.ckbMin) {
this.#info(liquidityRemoveXform.request.getOutPoint(),
'process liquidity remove, txHash: ' + liquidityRemoveXform.request.outPoint.tx_hash +
`withdrawnSudt ${withdrawnSudt}< liquidityRemoveXform.request.sudtMin ${liquidityRemoveXform.request.sudtMin} || withdrawnCkb ${withdrawnCkb}< liquidityRemoveXform.request.ckbMin ${liquidityRemoveXform.request.ckbMin}`,
)
liquidityRemoveXform.skip = true
return
}
if (
liquidityRemoveXform.request.capacityAmount + withdrawnCkb <
liquidityRemoveXform.minCapacity() + liquidityRemoveXform.request.tips
) {
this.#info(liquidityRemoveXform.request.getOutPoint(),
'process liquidity remove, txHash: ' + liquidityRemoveXform.request.outPoint.tx_hash +
` liquidityRemoveXform.request.capacityAmount ${liquidityRemoveXform.request.capacityAmount} + withdrawnCkb ${withdrawnCkb} < liquidityRemoveXform.minCapacity() ${liquidityRemoveXform.minCapacity()} + liquidityRemoveXform.request.tips ${liquidityRemoveXform.request.tips}`,
)
liquidityRemoveXform.skip = true
return
}
if (matchRecord.info.ckbReserve < withdrawnCkb || matchRecord.pool.sudtAmount < withdrawnSudt) {
this.#info(liquidityRemoveXform.request.getOutPoint(),
'process liquidity remove, txHash: ' + liquidityRemoveXform.request.outPoint.tx_hash +
`matchRecord.info.ckbReserve ${matchRecord.info.ckbReserve} < withdrawnCkb ${withdrawnCkb} || matchRecord.pool.sudtAmount ${matchRecord.pool.sudtAmount} < withdrawnSudt ${withdrawnSudt}`,
)
liquidityRemoveXform.skip = true
return
}
matchRecord.matcherChange.capacity += liquidityRemoveXform.request.tips
liquidityRemoveXform.sudtAmount = withdrawnSudt
liquidityRemoveXform.capacityAmount =
liquidityRemoveXform.request.capacityAmount + withdrawnCkb - liquidityRemoveXform.request.tips
// update info
matchRecord.info.ckbReserve -= withdrawnCkb
matchRecord.info.sudtReserve -= withdrawnSudt
matchRecord.info.totalLiquidity -= liquidityRemoveXform.request.lptAmount
// update pool
matchRecord.pool.sudtAmount -= withdrawnSudt
matchRecord.pool.capacity -= withdrawnCkb
}
/*
total ckb add ckb
------------- = ----------
total sudt add sudt
total lpt total ckb
------------ = -----------
return lpt? add ckb
LiquidityAddReq -> Lpt(size is fixed if script is determined) + change(size is fixed if script is determined)
*/
private processLiquidityAddTransformation = (
matchRecord: MatchRecord,
liquidityAddXform: LiquidityAddTransformation,
): void => {
// first we try to use all available and have to has sudt cell to keep the change
let ckbAvailable =
liquidityAddXform.request.capacityAmount -
liquidityAddXform.request.tips -
liquidityAddXform.minCapacity('sudt')
if (ckbAvailable <= 0n) {
this.#info(liquidityAddXform.request.getOutPoint(),
'process liquidity add, txHash: ' + liquidityAddXform.request.outPoint.tx_hash +
` ckbAvailable ${ckbAvailable} <= 0n}`)
liquidityAddXform.skip = true
return
}
let sudtNeeded = (ckbAvailable * matchRecord.info.sudtReserve) / matchRecord.info.ckbReserve + 1n
if (sudtNeeded < liquidityAddXform.request.sudtAmount) {
// exhaust all available ckb and sudt remains
if (sudtNeeded < liquidityAddXform.request.sudtMin || ckbAvailable < liquidityAddXform.request.ckbMin) {
this.#info(liquidityAddXform.request.getOutPoint(),
'process liquidity add, exhaust all ckb, txHash: ' + liquidityAddXform.request.outPoint.tx_hash +
` sudtNeeded ${sudtNeeded} < liquidityAddXform.request.sudtMin ${liquidityAddXform.request.sudtMin}|| ckbAvailable ${ckbAvailable}< liquidityAddXform.request.ckbMin ${liquidityAddXform.request.ckbMin}`,
)
liquidityAddXform.skip = true
return
}
// OK, we use all ckb we can use and sudt remains
let lptGot = (matchRecord.info.totalLiquidity * ckbAvailable) / matchRecord.info.ckbReserve + 1n
matchRecord.matcherChange.capacity += liquidityAddXform.request.tips
// of cause, because we drain all available ckbs and only leave these for hold cells
liquidityAddXform.capacityChangeAmount = liquidityAddXform.minCapacity('sudt')
liquidityAddXform.sudtChangeAmount = liquidityAddXform.request.sudtAmount - sudtNeeded
liquidityAddXform.lptAmount = lptGot
// update info
matchRecord.info.ckbReserve += ckbAvailable
matchRecord.info.sudtReserve += sudtNeeded
matchRecord.info.totalLiquidity += lptGot
matchRecord.pool.capacity += ckbAvailable
matchRecord.pool.sudtAmount += sudtNeeded
} else {
// sudt is not enough, we drain all sudts and ckb remain
let ckbNeeded =
(liquidityAddXform.request.sudtAmount * matchRecord.info.ckbReserve) / matchRecord.info.sudtReserve + 1n
if (
liquidityAddXform.request.sudtAmount < liquidityAddXform.request.sudtMin ||
ckbNeeded < liquidityAddXform.request.ckbMin
) {
this.#info(liquidityAddXform.request.getOutPoint(),
'process liquidity add, exhaust all sudt, txHash: ' + liquidityAddXform.request.outPoint.tx_hash +
` liquidityAddXform.request.sudtAmount ${liquidityAddXform.request.sudtAmount} < liquidityAddXform.request.sudtMin ${liquidityAddXform.request.sudtMin}|| ckbNeeded ${ckbNeeded}< liquidityAddXform.request.ckbMin ${liquidityAddXform.request.ckbMin}`,
)
liquidityAddXform.skip = true
return
}
const ckbLeft = liquidityAddXform.request.capacityAmount - liquidityAddXform.request.tips - ckbNeeded
// remaining ckbs should be enough to compose lpt cell and ckb cell
if (ckbLeft < liquidityAddXform.minCapacity('ckb')) {
// this shouldn't happens
this.#info(liquidityAddXform.request.getOutPoint(),
'process liquidity add, exhaust all sudt, txHash: ' + liquidityAddXform.request.outPoint.tx_hash +
` ckbLeft ${ckbLeft}< Ckb.CKB_FIXED_MIN_CAPACITY + Lpt.LPT_FIXED_CAPACITY`)
liquidityAddXform.skip = true
return
}
let lptGot = (matchRecord.info.totalLiquidity * liquidityAddXform.request.sudtAmount) / matchRecord.info.sudtReserve + 1n
matchRecord.matcherChange.capacity += liquidityAddXform.request.tips
// of cause, because we drain all available ckbs and only leave these for hold cells
liquidityAddXform.capacityChangeAmount = ckbLeft
liquidityAddXform.sudtChangeAmount = 0n
liquidityAddXform.lptAmount = lptGot
// update info
matchRecord.info.ckbReserve += ckbNeeded
matchRecord.info.sudtReserve += liquidityAddXform.request.sudtAmount
matchRecord.info.totalLiquidity += lptGot
matchRecord.pool.capacity += ckbNeeded
matchRecord.pool.sudtAmount += liquidityAddXform.request.sudtAmount
}
}
// req -> lpt
initLiquidity = (matchRecord: MatchRecord): void => {
let liquidityInitXform = matchRecord.initXforms!
let ckbAvailable =
liquidityInitXform.request.capacityAmount -
liquidityInitXform.request.tips -
liquidityInitXform.minCapacity()
let lptMinted = sqrt(ckbAvailable * liquidityInitXform.request.sudtAmount)
liquidityInitXform.lptAmount = lptMinted
matchRecord.matcherChange.capacity += liquidityInitXform.request.tips
// update info
matchRecord.info.ckbReserve += ckbAvailable
matchRecord.info.sudtReserve += liquidityInitXform.request.sudtAmount
matchRecord.info.totalLiquidity += lptMinted
matchRecord.pool.capacity += ckbAvailable
matchRecord.pool.sudtAmount += liquidityInitXform.request.sudtAmount
matchRecord.matcherChange.reduceBlockMinerFee()
}
}
|
import logging
# Implement the log_arguments function
def log_arguments(*args, **kwargs):
lg = logging.getLogger(__name__)
for arg in args:
lg.debug('{}={}'.format('arg', arg))
for key, value in kwargs.items():
lg.debug('{}={}'.format(key, value))
# Example usage
logging.basicConfig(level=logging.DEBUG)
log_arguments('x_value', y='y_value', z='z_value') |
tensorboard --logdir=/tmp/log
|
void generateForests(TreeNode** forestRoot) {
// Create and populate the first tree
TreeNode* tree1 = (TreeNode*)malloc(sizeof(TreeNode));
tree1->val = 1;
tree1->left = NULL;
tree1->right = NULL;
*forestRoot = tree1; // Set the root of the forest to the first tree
// Create and populate the second tree
TreeNode* tree2 = (TreeNode*)malloc(sizeof(TreeNode));
tree2->val = 2;
tree2->left = NULL;
tree2->right = NULL;
// Link the second tree to the first tree
(*forestRoot)->left = tree2;
// Create and populate the third tree
TreeNode* tree3 = (TreeNode*)malloc(sizeof(TreeNode));
tree3->val = 3;
tree3->left = NULL;
tree3->right = NULL;
// Link the third tree to the first tree
(*forestRoot)->right = tree3;
} |
import requests
from bs4 import BeautifulSoup
def get_top_10_search_results(keyword):
google_url = f"https://www.google.com/search?q={keyword}"
r = requests.get(google_url)
soup = BeautifulSoup(r.text, 'html.parser')
links = soup.find_all('h3', attrs={'class':'LC20lb'})
for link in links[:10]:
title = link.text
url = link.find('a')['href']
print(title, url)
get_top_10_search_results('artificial intelligence') |
<reponame>BasinMC/Basin<filename>faucet/src/main/java/org/basinmc/faucet/command/CommandSpec.java
/*
* Copyright 2017 Hex <<EMAIL>>
* and other copyright owners as documented in the project's IP log.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.basinmc.faucet.command;
import java.util.Set;
import edu.umd.cs.findbugs.annotations.NonNull;
public interface CommandSpec {
/**
* Get the name of this command.
*/
@NonNull
String getName();
/**
* Get a set of aliases that can be used to refer to this command, or an empty set.
*/
@NonNull
Set<String> getAliases();
/**
* Get the bundle that provides this command. Will either be a extension or the server.
*/
@NonNull
Module getOwner();
}
|
import { formatDatePair } from './index'
import moment from 'moment'
describe('formatDatePair', () => {
it('displays differences of dates', () => {
const d1 = moment(1551908483315).month(1).day(1).hour(18)
const d2 = moment(d1).hour(21)
const d3 = moment(d2).day(2)
const d4 = moment(d3).month(2)
expect(formatDatePair(d1)).toMatchSnapshot()
expect(formatDatePair(d1, d2)).toMatchSnapshot()
expect(formatDatePair(d1, d3)).toMatchSnapshot()
expect(formatDatePair(d1, d4)).toMatchSnapshot()
})
})
|
silnite)
# credit: Søren Theilgaard (@theilgaard)
name="silnite"
type="pkgInZip"
downloadURL=$(curl -fs https://eclecticlight.co/downloads/ | grep -i $name | grep zip | sed -E 's/.*href=\"(https.*)\">.*/\1/g')
appNewVersion=$(curl -fs https://eclecticlight.co/downloads/ | grep zip | grep -o -E "silnite [0-9.]*" | awk '{print $2}')
expectedTeamID="QWY4LRW926"
blockingProcesses=( NONE )
;;
|
<filename>core/src/uk/org/ulcompsoc/ld32/systems/EnemySpawningSystem.java
package uk.org.ulcompsoc.ld32.systems;
import java.util.Random;
import uk.org.ulcompsoc.ld32.CircleMap;
import uk.org.ulcompsoc.ld32.CircleMap.RingSegment;
import uk.org.ulcompsoc.ld32.LD32;
import uk.org.ulcompsoc.ld32.components.CanItDrop;
import uk.org.ulcompsoc.ld32.components.Damage;
import uk.org.ulcompsoc.ld32.components.DeathAnimation;
import uk.org.ulcompsoc.ld32.components.Killable;
import uk.org.ulcompsoc.ld32.components.PathFollower;
import uk.org.ulcompsoc.ld32.components.Position;
import uk.org.ulcompsoc.ld32.components.Renderable;
import uk.org.ulcompsoc.ld32.components.SphericalBound;
import uk.org.ulcompsoc.ld32.components.Tower;
import uk.org.ulcompsoc.ld32.components.enemies.Antineutron;
import uk.org.ulcompsoc.ld32.components.enemies.Antiproton;
import uk.org.ulcompsoc.ld32.components.enemies.Enemy;
import uk.org.ulcompsoc.ld32.components.enemies.Positron;
import uk.org.ulcompsoc.ld32.util.TextureName;
import com.badlogic.ashley.core.Engine;
import com.badlogic.ashley.core.Entity;
import com.badlogic.ashley.core.Family;
import com.badlogic.ashley.systems.IntervalSystem;
import com.badlogic.gdx.graphics.Color;
import com.badlogic.gdx.graphics.g2d.Animation;
import com.badlogic.gdx.graphics.g2d.Animation.PlayMode;
public class EnemySpawningSystem extends IntervalSystem {
public static final float MAX_SPAWN_TIME = 10.0f;
public static final float MIN_SPAWN_TIME = 0.5f;
private final CircleMap map;
private Engine engine = null;
private final Animation greyEnemy;
private final Random random = new Random();
private final float interval;
private float timeElapsed = 0.0f;
private float totalTimeElapsed = 0.0f;
// TODO: change this after spawn rate is calculated correctly.
private float spawnTime;
public EnemySpawningSystem(int priority, float interval, final CircleMap map) {
super(interval, priority);
this.interval = interval;
this.map = map;
this.greyEnemy = new Animation(0.15f, LD32.textureManager.animationRegionMap.get(TextureName.ENEMY_ANIM));
this.greyEnemy.setPlayMode(PlayMode.LOOP_PINGPONG);
}
@Override
public void addedToEngine(Engine engine) {
super.addedToEngine(engine);
this.engine = engine;
}
@Override
public void removedFromEngine(Engine engine) {
super.removedFromEngine(engine);
this.engine = null;
}
@Override
protected void updateInterval() {
timeElapsed += interval;
totalTimeElapsed += interval;
if (timeElapsed >= spawnTime) {
timeElapsed -= spawnTime;
spawnTime = 1.0f / calculateSpawnRate(totalTimeElapsed);
// Gdx.app.log("SPAWN_TIME", "Enemy spawn time is now: " +
// spawnTime);
engine.addEntity(generateEnemy());
}
}
private Entity generateEnemy() {
if (totalTimeElapsed > 60) {
Enemy.setMultiplier(1 + (totalTimeElapsed / 200));
} else if (totalTimeElapsed > 150) {
Enemy.setMultiplier(1 + (totalTimeElapsed / 100));
}
// System.out.println("EnemyMultiplier: " + Enemy.getMultiplier());
final Entity entity = new Entity();
final RingSegment firstSegment = map.getFirstSegment();
final EnemyType type = EnemyType.getRandomType(random);
entity.add(Position.fromPolar(firstSegment.middleR, firstSegment.middlePhi));
Renderable r = new Renderable(greyEnemy).setScale(0.25f).setColor(type.renderColor);
entity.add(r);
if (type == EnemyType.YELLOW) {
Positron pos = new Positron();
entity.add(pos);
entity.add(new Killable(pos.health));
entity.add(new PathFollower(firstSegment, 1 / pos.speed).continueToNull().killWhenDone());
entity.add(new Damage(pos.damage));
} else if (type == EnemyType.BLUE) {
Antiproton antiP = new Antiproton();
entity.add(antiP);
entity.add(new Killable(antiP.health));
entity.add(new PathFollower(firstSegment, 1 / antiP.speed).continueToNull().killWhenDone());
entity.add(new Damage(antiP.damage));
} else if (type == EnemyType.GREEN) {
Antineutron antiN = new Antineutron();
entity.add(antiN);
entity.add(new Killable(antiN.health));
entity.add(new PathFollower(firstSegment, 1 / antiN.speed).continueToNull().killWhenDone());
entity.add(new Damage(antiN.damage));
}
entity.add(new CanItDrop());
entity.add(new DeathAnimation(greyEnemy));
entity.add(new SphericalBound(r.getWidth() / 2));
return entity;
}
@SuppressWarnings("unchecked")
private float calculateSpawnRate(float elapsedTime) {
float factor = 0.1f;
if (elapsedTime > 90) {
factor = 0.5f;
}
int numTowers = engine.getEntitiesFor(Family.all(Tower.class).get()).size();
// System.out.println("Num towers: " + numTowers);
final float scale = (float) (Math.sqrt(elapsedTime) * factor);
final float percentIncrease = 1 + (numTowers * 0.1f);
// System.out.println("Scale: " + scale + " percentIncrease: " +
// percentIncrease);
// float spawnRate = Math.max(MIN_SPAWN_TIME, (scale *
// percentIncrease));
float spawnRate = scale * percentIncrease;
// System.out.println("Spawn rate1: " + spawnRate);
spawnRate = Math.min(spawnRate, MAX_SPAWN_TIME);
// System.out.println("SpawnRate2: " + spawnRate);
// System.out.println("SPAWN_RATE: " + spawnRate);
return spawnRate;
}
public static enum EnemyType {
// RED(Color.RED.cpy()), //
GREEN(Color.GREEN.cpy()), //
BLUE(Color.BLUE.cpy()), //
YELLOW(Color.YELLOW.cpy()); //
// PURPLE(Color.PURPLE.cpy());
public final Color renderColor;
private EnemyType(final Color color) {
this.renderColor = color;
}
public static EnemyType getRandomType(final Random random) {
return EnemyType.values()[random.nextInt(EnemyType.values().length)];
}
}
}
|
class ShareController < ApplicationController
before_filter :authenticate_user!
def index
# user's share code is generated in the create action of registrations_controller.rb for regular signups
# or in the users_controller.rb for oauth users so it's saved to the user before the welcome email goes
@user = current_user
@share_code = current_user.share_code
end
end
|
<filename>src/handlers/__tests__/propTypeHandler-test.js
/*
* Copyright (c) 2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
*/
/*global jest, describe, it, expect, beforeEach*/
jest.disableAutomock();
jest.mock('../../Documentation');
describe('propTypeHandler', () => {
var statement, expression;
var getPropTypeMock;
var documentation;
var propTypeHandler;
beforeEach(() => {
({statement, expression} = require('../../../tests/utils'));
getPropTypeMock = jest.genMockFunction().mockImplementation(() => ({}));
jest.setMock('../../utils/getPropType', getPropTypeMock);
jest.mock('../../utils/getPropType');
documentation = new (require('../../Documentation'));
propTypeHandler = require('../propTypeHandler').default;
});
function template(src) {
return `
${src}
var React = require('React');
var PropTypes = React.PropTypes;
var {PropTypes: OtherPropTypes} = React;
`;
}
function test(getSrc, parse) {
it('passes the correct argument to getPropType', () => {
var propTypesSrc = `
{
foo: PropTypes.bool,
abc: PropTypes.xyz,
}
`;
var definition = parse(getSrc(propTypesSrc));
var propTypesAST = expression(propTypesSrc);
var fooPath = propTypesAST.get('properties', 0, 'value');
var xyzPath = propTypesAST.get('properties', 1, 'value');
propTypeHandler(documentation, definition);
expect(getPropTypeMock.mock.calls[0][0].node)
.toEqualASTNode(fooPath.node);
expect(getPropTypeMock.mock.calls[1][0].node)
.toEqualASTNode(xyzPath.node);
});
it('finds definitions via React.PropTypes', () => {
var definition = parse(getSrc(`
{
foo: PropTypes.bool,
bar: require("react").PropTypes.bool,
baz: OtherPropTypes.bool,
}
`));
propTypeHandler(documentation, definition);
expect(documentation.descriptors).toEqual({
foo: {
type: {},
required: false,
},
bar: {
type: {},
required: false,
},
baz: {
type: {},
required: false,
},
});
});
it('finds definitions via the ReactPropTypes module', () => {
var definition = parse(getSrc(`
{
foo: require("ReactPropTypes").bool,
}
`));
propTypeHandler(documentation, definition);
expect(documentation.descriptors).toEqual({
foo: {
type: {},
required: false,
},
});
});
it('detects whether a prop is required', () => {
var definition = parse(getSrc(`
{
simple_prop: PropTypes.array.isRequired,
complex_prop:
PropTypes.oneOfType([PropTypes.number, PropTypes.bool]).isRequired,
}
`));
propTypeHandler(documentation, definition);
expect(documentation.descriptors).toEqual({
simple_prop: { // eslint-disable-line camelcase
type: {},
required: true,
},
complex_prop: { // eslint-disable-line camelcase
type: {},
required: true,
},
});
});
it('only considers definitions from React or ReactPropTypes', () => {
var definition = parse(getSrc(`
{
custom_propA: PropTypes.bool,
custom_propB: Prop.bool.isRequired,
}
`));
propTypeHandler(documentation, definition);
expect(documentation.descriptors).toEqual({
custom_propA: { // eslint-disable-line camelcase
type: {},
required: false,
},
custom_propB: { // eslint-disable-line camelcase
type: {
name: 'custom',
raw: 'Prop.bool.isRequired',
},
required: false,
},
});
});
it('resolves variables', () => {
var definitionSrc = getSrc('props');
var definition = parse(`
${definitionSrc}
var props = {bar: PropTypes.bool};
`);
propTypeHandler(documentation, definition);
expect(documentation.descriptors).toEqual({
bar: {
type: {},
required: false,
},
});
});
}
describe('React.createClass', () => {
test(
propTypesSrc => template(`({propTypes: ${propTypesSrc}})`),
src => statement(src).get('expression')
);
});
describe('class definition', () => {
test(
propTypesSrc => template(`
class Component {
static propTypes = ${propTypesSrc};
}
`),
src => statement(src)
);
});
describe('stateless component', () => {
test(
propTypesSrc => template(`
var Component = (props) => <div />;
Component.propTypes = ${propTypesSrc};
`),
src => statement(src)
);
});
it('does not error if propTypes cannot be found', () => {
var definition = expression('{fooBar: 42}');
expect(() => propTypeHandler(documentation, definition))
.not.toThrow();
definition = statement('class Foo {}');
expect(() => propTypeHandler(documentation, definition))
.not.toThrow();
definition = statement('function Foo() {}');
expect(() => propTypeHandler(documentation, definition))
.not.toThrow();
definition = expression('() => {}');
expect(() => propTypeHandler(documentation, definition))
.not.toThrow();
});
// This case is handled by propTypeCompositionHandler
it('does not error if propTypes is a member expression', () => {
var definition = expression('{propTypes: Foo.propTypes}');
expect(() => propTypeHandler(documentation, definition))
.not.toThrow();
});
});
|
declare namespace GlobalMixins
{
interface LoaderResource
{
/** Texture reference for loading images and other textures. */
animation?: import('./').AnimatedGIF;
}
}
|
<filename>database.js
/**
* This is a really lightweight app; the data isn't changing nor complex
* so let's cheat and use a JSON file instead of a "real" database
*
* We'll pretend to be a database though, for the sake of it
*/
var path = require('path')
module.exports = {
data: {},
getRecord: function (pk) {
return this.data[pk]
},
getKeys: function () {
return Object.keys(this.data)
},
loadData: function (file) {
if (!file) {
file = path.join(__dirname, 'data/applicants.json')
}
// you can import JSON via require()
// I guess I could use fs.readFile() instead
this.data = require(file)
},
init: function (file) {
this.loadData(file)
return this
}
}
|
package org.museautomation.ui.step.inline;
import javafx.scene.*;
import javafx.scene.layout.*;
import org.museautomation.ui.step.*;
import org.museautomation.core.step.*;
import org.museautomation.ui.extend.edit.*;
import org.museautomation.ui.extend.edit.step.*;
/**
* Responsible for choosing and switching between the quick and full editors within the containing node.
*
* @author <NAME> (see LICENSE.txt for license details)
*/
public class InlineStepEditorContainerImplementation implements InlineStepEditorContainer, Validatable
{
public InlineStepEditorContainerImplementation(StepEditContext context, StepConfiguration step, EditInProgress edit, boolean start_in_full_mode)
{
_context = context;
_step = step;
_edit = edit;
_grid = new StackPane();
_quick_editor = InlineStepEditors.get(context.getProject()).findEditor(context, step);
_quick_editor.edit(context, step, edit, this);
if (start_in_full_mode)
moreEditOptionsRequested();
else
try
{
lessEditOptionsRequested();
}
catch (Exception e)
{
moreEditOptionsRequested();
}
}
public void requestFocus()
{
_quick_editor.requestFocus();
}
public Node getNode()
{
return _grid;
}
public void moreEditOptionsRequested()
{
_grid.getChildren().clear();
if (_full_editor == null)
{
_full_editor = new StepEditorStack(_context, _edit);
_full_editor.setStep(_step);
_full_node = _full_editor.getNode();
}
_grid.getChildren().add(_full_node);
_full_editor.requestFocus();
_validatable_editor = _full_editor;
}
@SuppressWarnings("unused,WeakerAccess") // public API
public void lessEditOptionsRequested()
{
_grid.getChildren().clear();
if (_quick_editor_node == null)
{
_quick_editor_node = _quick_editor.getNode();
GridPane.setFillWidth(_quick_editor_node, true);
}
_grid.getChildren().add(_quick_editor_node);
_quick_editor.requestFocus();
_validatable_editor = _quick_editor;
}
@Override
public boolean isValid()
{
return _validatable_editor.isValid();
}
public void destroy()
{
if (_full_editor != null)
_full_editor.destroy();
}
private final StepEditContext _context;
private final StepConfiguration _step;
private final EditInProgress _edit;
private InlineStepEditor _quick_editor;
private Node _quick_editor_node;
private StepEditorStack _full_editor;
private Node _full_node;
private final StackPane _grid;
private Validatable _validatable_editor;
}
|
<reponame>18chetanpatel/vue2-filter-list
/**
* checks whether string ends with the ends parameter
*/
function uppercase (input) {
return (input || input === 0)
? input.toString().toUpperCase()
: ''
}
export default uppercase
|
<reponame>d4hines/Formality
#!/usr/bin/env node
require("./lib.js")._evm2fm_(process.argv[2]);
|
<reponame>weltam/idylfin
/**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.analytics.financial.model.finitedifference;
/**
*
*/
@Deprecated
public interface ZZPDEDataBundle {
}
|
//Written by geoffreycs
function urlGetter() {
if (location.href.indexOf("www.nejm.org") == -1) { //Sanity check 1: Is this even a NEJM page?
videoURL = "invalid"
} else if (location.href.indexOf("/doi/") == -1) { //Sanity check 2: Is this a support "doi" page?
videoURL = "unsupported"
} else { //Sanity checks passed, begin searching through page as per rules
var urlFound = false;
var videoURL = "";
try {
//if (urlFound == false) {
videoURL = document.querySelector("#article_body > aside > p > a").getAttribute("data-mobileurl")
if (videoURL.indexOf(".mp4") !== -1) { //Make sure we found an MP4 video, not an MP3 track
urlFound = true;
}
//}
} catch (e) {
console.log("Not found under #article_body > aside > p > a");
}
try {
if (urlFound == false) {
videoURL = document.querySelector("#article_introduction > div > div > aside > p > a").getAttribute("data-mobileurl")
if (videoURL.indexOf(".mp4") !== -1) {
urlFound = true;
}
}
} catch (e) {
console.log("Not found under #article_introduction > div > div > aside > p > a")
}
try {
if (urlFound == false) {
videoURL = document.querySelector("#article__s001 > div > div > aside > p > a").getAttribute("data-mobileurl")
if (videoURL.indexOf(".mp4") !== -1) {
urlFound = true;
}
}
} catch (e) {
console.log("Not found under #article__s001 > div > div > aside > p > a")
}
try {
if (urlFound == false) {
videoURL = document.querySelector("#full > section:nth-child(2) > aside > p > a").getAttribute("data-mobileurl")
if (videoURL.indexOf(".mp4") !== -1) {
urlFound = true;
}
}
} catch (e) {
console.log("Not found under #full > section:nth-child(2) > aside > p > a")
}
try {
if (urlFound == false) {
videoURL = document.querySelector("#article_figures > ol > li.m-media-item.m-audio-video > a").getAttribute("data-mobileurl")
if (videoURL.indexOf(".mp4") !== -1) {
urlFound = true;
}
}
} catch (e) {
console.log("Not found under #article_figures > ol > li.m-media-item.m-audio-video > a")
}
try {
if (urlFound == false) {
videoURL = document.querySelector("#article_introduction > div > div > span > p > a").getAttribute("data-mobileurl")
if (videoURL.indexOf(".mp4") !== -1) {
urlFound = true;
}
}
} catch (e) {
console.log("Not found under #article_introduction > div > div > span > p > a")
}
try {
if (urlFound == false) {
videoURL = document.querySelector("#article_figures > ol > span > p > a").getAttribute("data-mobileurl")
if (videoURL.indexOf(".mp4") !== -1) {
urlFound = true;
}
}
} catch (e) {
console.log("Not found under #article_figures > ol > span > p > a")
}
try {
if (urlFound == false) {
videoURL = document.querySelector("#article__sc1 > div > div > aside > p > a.m-media-item__link.js__audioWindow").getAttribute("data-mobileurl")
if (videoURL.indexOf(".mp4") !== -1) {
urlFound = true;
}
}
} catch (e) {
console.log("Not found under #article__sc1 > div > div > aside > p > a.m-media-item__link.js__audioWindow")
}
try {
if (urlFound == false) {
videoURL = document.querySelector("body > table > tbody > tr:nth-child(2024) > td.line-content > span:nth-child(19) > span:nth-child(6)").innerText
if (videoURL.indexOf(".mp4") !== -1) {
urlFound = true;
}
}
} catch (e) {
console.log("Not found under body > table > tbody > tr:nth-child(2024) > td.line-content > span:nth-child(19) > span:nth-child(6)")
}
try {
if (urlFound == false) {
videoURL = document.querySelector("#article_introduction > div > div > aside:nth-child(2) > p > a").getAttribute("data-mobileurl")
if (videoURL.indexOf(".mp4") !== -1) {
urlFound = true;
}
}
} catch (e) {
console.log("Not found under #article_introduction > div > div > aside:nth-child(2) > p > a")
}
try {
if (urlFound == false) {
videoURL = document.querySelector("body > table > tbody > tr:nth-child(2035) > td.line-content > span:nth-child(10) > span:nth-child(6)").innerText
if (videoURL.indexOf(".mp4") !== -1) {
urlFound = true;
}
}
} catch (e) {
console.log("Not found under body > table > tbody > tr:nth-child(2035) > td.line-content > span:nth-child(10) > span:nth-child(6)")
console.log("Out of ideas."); //We've run through every single test pattern and no URL was found.
videoURL = "null";
}
}
return videoURL //Return the results
}
/* //Old implementation
chrome.runtime.onMessage.addListener( //Set up handler for the message from extension toolbar
function(message, sender, sendResponse) {
//console.log("Message received from popup"); //For testing only
sendResponse({
response: urlGetter() //Send back our results
});
}
);
*/
chrome.runtime.onConnect.addListener((port) => { //Set up handler for the message from extension toolbar
port.onMessage.addListener((msg) => {
port.postMessage(urlGetter()); //Send back our results
});
}); |
<reponame>ashrftvm/gatsby-starter
import React from "react"
import Layout from "../components/layout"
const ContactPage = () => {
return (
<Layout>
<h1>Contact Us</h1>
<p>
Lorem ipsum dolor, sit amet consectetur adipisicing elit. Blanditiis
quia ullam possimus enim odio laboriosam tempore sint aspernatur atque
maxime!
</p>
<p>
Contact me at{" "}
<a
href="https://twitter.com/ashrftvm"
target="_blank"
rel="noopener noreferrer"
>
@ashrftvm
</a>
</p>
</Layout>
)
}
export default ContactPage
|
declare -a gpu=0,1,2,3
declare -a manual_seed=(3)
for ((j=0;j<${#manual_seed[@]};++j));
do
python main.py \
--dset=imagenet \
--dset_path=/home/Datasets/ILSVRC \
--arch=resnet50 \
--config_path=./configs/neurips/iht_imagenet_resnet50_insta_cosinelr_ep100_ftdense.yaml \
--workers=20 \
--epochs=100 \
--fp16 \
--reset_momentum_after_recycling \
--checkpoint_freq 10 \
--batch_size=256 \
--gpus=${gpu} \
--manual_seed=${manual_seed[j]} \
--experiment_root_path "./experiments_iht" \
--exp_name=iht_imagenet_resnet50_oneshot_cosinelr_fp16_s80_ep100_ftdense \
--from_checkpoint_path "CHECKPOINT HERE" \
--wandb_project "imagenet_resnet50"
done
|
<reponame>vchoudhari45/codingcargo
package com.vc.hard
object L1106 {
def parseBoolExpr(expression: String): Boolean = {
import scala.collection.mutable
def helper(operation: Char, list: mutable.ListBuffer[Boolean]): Boolean = {
operation match {
case '!' => !list.head
case '|' => list.contains(true)
case '&' => !list.contains(false)
}
}
val st = new mutable.Stack[Char]()
expression.foreach(ch => {
if(ch == ')') {
val list = new mutable.ListBuffer[Boolean]
while(st.head != '!' && st.head != '&' && st.head != '|') {
val popped = st.pop
if(popped == 't') list += true
else if(popped == 'f') list += false
}
val res = helper(st.pop, list)
if(res) st.push('t') else st.push('f')
}
else if(ch != ',' && ch != '(') st.push(ch)
})
st.pop == 't'
}
} |
import { RequestHandler } from 'express';
import yaml from 'yamljs';
export const openAPIJSONMiddleware = (): RequestHandler => (
req,
res,
next
): void => {
res.setHeader('Content-Type', 'application/json');
res.send(yaml.load('./src/routes/OPENAPI.yaml'));
next();
};
|
<gh_stars>0
class AdminAuctionStatusPresenter::OverdueDelivery < AdminAuctionStatusPresenter::Base
def header
I18n.t('statuses.admin_auction_status_presenter.overdue_delivery.header')
end
def body
I18n.t(
'statuses.admin_auction_status_presenter.overdue_delivery.body',
winner_url: winner_url
)
end
def action_partial
'admin/auctions/overdue_delivery'
end
end
|
#!/usr/bin/env bash
set -e -u -o pipefail # Fail on error
dir=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
client_dir="$dir/../.."
cd $client_dir/go
arg=${1:-}
if [[ "$arg" != "ios" && "$arg" != "android" ]]; then
echo "Nothing to build, you need to specify 'ios' or 'android'"
exit 1
fi
# For CI, this is run like
#
# env KEYBASE_BUILD=ci DEST_DIR=/tmp ... /path/to/gobuild.sh android|ios
#
# so make sure doing so doesn't assume anything about where this file is.
# If KEYBASE_BUILD is set and non-empty (e.g., for CI), use it.
if [[ -n ${KEYBASE_BUILD+x} && "$KEYBASE_BUILD" ]]; then
keybase_build="$KEYBASE_BUILD"
else
## TODO(mm) consolidate this with packaging/prerelease/
current_date=`date -u +%Y%m%d%H%M%S` # UTC
commit_short=`git log -1 --pretty=format:%h`
keybase_build="$current_date+$commit_short"
fi
check_ci=${CHECK_CI:-}
echo "Using GOPATH: $GOPATH"
# gomobile looks for gobind in $PATH, so put $GOPATH/bin in $PATH. We
# also want executables from our own GOPATH to override anything
# already in $PATH (like the old GOPATH), so put $GOPATH/bin first.
PATH="$GOPATH/bin:$PATH"
# need to allowlist some flags we use
export CGO_CFLAGS_ALLOW="-fmodules|-fblocks"
if [ "$check_ci" = "1" ]; then
(cd "$client_dir/go/buildtools"; go install "github.com/keybase/release")
release wait-ci --repo="client" --commit="$(git rev-parse HEAD)" --context="continuous-integration/jenkins/branch" --context="ci/circleci"
fi
package="github.com/keybase/client/go/bind"
tags=${TAGS:-"prerelease production"}
ldflags="-X github.com/keybase/client/go/libkb.PrereleaseBuild=$keybase_build -s -w"
build_gomobile ()
{
echo "Build gomobile..."
(go install golang.org/x/mobile/cmd/{gomobile,gobind} && gomobile init)
}
if [ "$arg" = "ios" ]; then
ios_dir=${DEST_DIR:-"$client_dir/shared/ios"}
ios_dest="$ios_dir/keybase.xcframework"
echo "Building for iOS ($ios_dest)..."
set +e
OUTPUT="$(gomobile bind -target=ios -tags="ios $tags" -ldflags "$ldflags" -o "$ios_dest" "$package" 2>&1)"
set -e
if [[ $OUTPUT == *gomobile* ]]; then
build_gomobile
gomobile bind -target=ios -tags="ios $tags" -ldflags "$ldflags" -o "$ios_dest" "$package"
else
echo $OUTPUT
fi
elif [ "$arg" = "android" ]; then
android_dir=${DEST_DIR:-"$client_dir/shared/android/keybaselib"}
android_dest="$android_dir/keybaselib.aar"
echo "Building for Android ($android_dest)..."
set +e
OUTPUT="$(gomobile bind -target=android -tags="android $tags" -ldflags "$ldflags" -o "$android_dest" "$package" 2>&1)"
set -e
if [[ $OUTPUT == *gomobile* ]]; then
build_gomobile
gomobile bind -target=android -tags="android $tags" -ldflags "$ldflags" -o "$android_dest" "$package"
else
echo $OUTPUT
fi
else
# Shouldn't get here.
echo "Nothing to build, you need to specify 'ios' or 'android'"
exit 1
fi
# tidy indirect reference to gomobile
go mod tidy
|
# this standard preamble doens't work in nixos.
# #!/bin/bash
# clear the terminal window for a fresh compile.
clear
: '
# allow dirty repos, but not in production mode.
if [ "$1" = "--optimize" ]; then
DIRTY=""
else
DIRTY="--dirtyok"
fi
# get git dependencies. Or if we have them already, do nothing.
mkdir gitdeps -p
dl=("git-at-revision cbbb2ee3ab7cdf41b95b9e5d41bb305021fb071d git@github.com:bburdette/dial-a-log.git gitdeps/dial-a-log $DIRTY")
ec=("git-at-revision a7ebd901230b7d9c6e3e921406e47caf86c17342 git@github.com:bburdette/elm-common.git gitdeps/elm-common $DIRTY")
# everything ok?
if !($dl && $ec)
then
echo "git dependency problems! exiting build."
exit 1
fi
'
# this seems to help clear the screen before the build.
# 'clear' alone sometimes doesn't happen until the build is almost done.
echo "starting elm make... wait for 'build complete'!"
# do the build!
time elm make src/Main.elm --output ../static/main.js $1 2> >(tee build-elm-out.txt)
grep -q "elm: not enough bytes" build-elm-out.txt
if [ $? -eq 0 ]
then
echo "deleting elm-stuff and retrying compile!"
rm -rf elm-stuff
time elm make src/Main.elm --output ../static/main.js $1 2> >(tee build-elm-out.txt)
fi
# optionally update the rust string_default.rs
# ./make-string-defaults.sh
# make ../example/index.html
./make-example-html.sh
# print this because elm doesn't print a message when the link
# step is finally done.
echo build complete!
|
require 'rom/viewpoint/support/client_id'
require 'rom/viewpoint/support/null_client'
module ROM
module Viewpoint
class ClientPool < Registry
def initialize(elements={})
super({null: NullClient.new(self)}.merge elements)
end
def []=(id, client)
elements[id] = client
end
def [](id)
elements.fetch(id) { raise(ClientMissingError, id) }
end
def null_client
self[:null]
end
# alias_method :client?, :key?
def client(id_or_endpoint, username=nil, password=<PASSWORD>, options={})
if id_or_endpoint.is_a?(ClientID)
return self[id]
else
realize_client(id_or_endpoint, username, password, options)
end
end
private
def realize_client(endpoint, user, pass, opts={})
id = ClientID.new(endpoint: endpoint, username: user, options: opts)
self[id] = ::Viewpoint::EWSClient.new(endpoint, user, pass, opts)
end
end
end
end
|
#pragma once
#include <cereal/types/memory.hpp>
#include <cereal/types/vector.hpp>
#include <cereal/types/unordered_map.hpp>
#include <ostream>
#include <iostream>
#include "util.hpp"
#include "iwkats_core_global.h"
using std::unique_ptr;
using std::shared_ptr;
using std::string;
/**
* Exception due to violation of logic rules of Component.
* The error is fatal and should be immediately corrected.
*/
class IWKATS_CORE_EXPORT TopologyException : public std::logic_error {
public:
TopologyException(const string &msg) : logic_error(msg) { }
};
/**
* Thrown by Component instances interacting with files, e.g., Reader, Writer
* as well as configuration parsing classes.
* The exception could lead to dysfunction of some classes.
*/
class IWKATS_CORE_EXPORT DataIOException : public std::ios_base::failure {
public:
DataIOException(const string &msg) : std::ios_base::failure(msg) { }
};
/** Base class for topology component.
* A topology component is distinguished by its id, which is set
* at instantiation. The base class also provides logging utilities.
*/
class IWKATS_CORE_EXPORT Component {
protected:
static Logger logger; //!< logger shared by all Component instances
const string id;
public:
Component(const string &componentId) : id(componentId) { }
string getComponentId() const { return id; }
/** Logging method.
* Always use this method to print/log information! Change output to redirect message
* @param level Available options: Logger::INFO, Logger::DBG, Logger::ERR
*/
void log(const string &msg, Logger::LogLevel level = Logger::INFO, std::ostream &output = std::cout) {
//logger.log(id, msg, level, output);
}
/** Logging method.
* The method provides inline conveniently outputs success or failure message depending on a flag.
* Log level will be set to Logger::ERR when there is an error, or to Logger::INFO otherwise.
* @param error Indicates whether the operation was successful (0 for no error)
*/
void log(const string &successMsg, const string &failureMsg, int error, std::ostream &output = std::cout) {
if (error == 0)
logger.log(id, successMsg, Logger::INFO, output);
else
logger.log(id, failureMsg, Logger::ERR, output);
}
};
/** A data stream processing Component in topology.
* Processor receives data stream via Processor::process.
* Subclasses inheriting Processor must implement Processor::process.
*/
template<typename T>
class IWKATS_CORE_EXPORT Processor : public Component {
public:
Processor(const string &componentId) : Component(componentId) { };
virtual ~Processor() { };
/** Logic to process incoming data should be implemented here.
* Copy of the shared pointer is encouraged when storing data (no additional space will be consumed).
*/
virtual void process(const shared_ptr<T> &) = 0;
};
/** A data stream generating Component in topology.
* DataHub uses a broadcast model which send data to all interested Processor instances via DataHub::dispatch.
*/
template<typename T>
class IWKATS_CORE_EXPORT DataHub : public Component {
protected:
const static string DEFAULT_STREAM_ID; // defined below, valued "_DEFAULT_"
EasyMap<string, std::vector<shared_ptr<Processor<T>>>> subscribers; //!< streamID => interested Processor instances
public:
/**
* A default stream id is always declared upon instantiation.
*/
DataHub(string datahubId) : Component(datahubId) {
declareStream(DEFAULT_STREAM_ID);
};
virtual ~DataHub() { };
/** Declares what channels of stream this DataHub will be producing.
* For example, a CTPMarket might provide market data for several market instruments.
* The names of data streams should be declared here in the first place so that
* downstream Processor instances can be attached to them.
* This method allows no duplicate stream names.
*/
void declareStream(const string &streamId) {
// ensure no key duplicates
if (!subscribers.contains(streamId)) {
subscribers[streamId] = std::vector<shared_ptr<Processor<T>>>();
}
}
/**
* Call to start the driving DataHub (i.e., the very first DataHub in topology)
*/
virtual void run() { };
/** To register a Processor as the receiver of a specific stream produced by this DataHub.
* The method throws KeyErrorException if the target streamId has not been previously declared.
* If there is no streamId specified, Processor subscribes to DEFAULT_STREAM_ID.
*/
virtual void attach(const shared_ptr<Processor<T>> &processor, const string &streamId = DEFAULT_STREAM_ID) {
try {
subscribers.get(streamId).push_back(processor);
} catch (KeyErrorException &e) {
throw TopologyException("Attaching error: Datahub[" + streamId + "] has not declared stream " + streamId);
}
}
/**
* To forward data to all interested Processor instances.
* called by child #DataHub#.
* If no stream id specified, data will be forwarded to all #Processor#s subscribing to DEFAULT stream.
*/
virtual void dispatch(const shared_ptr<T> &data, const string &streamId = DEFAULT_STREAM_ID) {
try {
auto &streamReceivers = subscribers.get(streamId);
for (auto &proc : streamReceivers) {
proc->process(data);
}
} catch (KeyErrorException &e) {
throw TopologyException("Dispatching error: Datahub[" + streamId + "] has not declared stream " + streamId);
}
}
};
template<typename T>
const string DataHub<T>::DEFAULT_STREAM_ID = "_DEFAULT_";
/*
* Currently unused.
*/
template<typename S, typename R>
class IWKATS_CORE_EXPORT PeerHub : Component {
protected:
shared_ptr<PeerHub<R, S>> peer;
public:
PeerHub(const string &id) : Component(id) { }
virtual ~PeerHub() { }
virtual void connect(shared_ptr<PeerHub<R, S>> other) {
peer = other;
other->peer = this;
}
virtual void send(shared_ptr<S> data_ptr) {
peer->receive(data_ptr);
}
virtual void receive(shared_ptr<R> data_ptr) = 0;
};
|
<filename>src/main/java/org/rs2server/rs2/domain/service/impl/skill/FarmingServiceImpl.java
package org.rs2server.rs2.domain.service.impl.skill;
import com.google.common.eventbus.Subscribe;
import com.google.inject.Inject;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.joda.time.Duration;
import org.rs2server.rs2.content.api.GameInterfaceButtonEvent;
import org.rs2server.rs2.content.api.GameNpcActionEvent;
import org.rs2server.rs2.content.api.GameObjectActionEvent;
import org.rs2server.rs2.content.api.GamePlayerLoginEvent;
import org.rs2server.rs2.content.api.GamePlayerRegionEvent;
import org.rs2server.rs2.domain.model.player.PlayerSkillFarmingEntity;
import org.rs2server.rs2.domain.service.api.HookService;
import org.rs2server.rs2.domain.service.api.PermissionService;
import org.rs2server.rs2.domain.service.api.skill.farming.FarmingPatch;
import org.rs2server.rs2.domain.service.api.skill.farming.FarmingPatchState;
import org.rs2server.rs2.domain.service.api.skill.farming.FarmingPatchTreatment;
import org.rs2server.rs2.domain.service.api.skill.farming.FarmingPatchType;
import org.rs2server.rs2.domain.service.api.skill.farming.FarmingPlantable;
import org.rs2server.rs2.domain.service.api.skill.farming.FarmingService;
import org.rs2server.rs2.domain.service.api.skill.farming.FarmingTool;
import org.rs2server.rs2.domain.service.api.skill.farming.action.FarmingClearingAction;
import org.rs2server.rs2.domain.service.api.skill.farming.action.FarmingCureAction;
import org.rs2server.rs2.domain.service.api.skill.farming.action.FarmingHarvestingAction;
import org.rs2server.rs2.domain.service.api.skill.farming.action.FarmingPlantingAction;
import org.rs2server.rs2.domain.service.api.skill.farming.action.FarmingRakeAction;
import org.rs2server.rs2.domain.service.api.skill.farming.action.FarmingTreatmentAction;
import org.rs2server.rs2.domain.service.api.skill.farming.action.FarmingWateringAction;
import org.rs2server.rs2.model.bit.BitConfig;
import org.rs2server.rs2.model.bit.BitConfigBuilder;
import org.rs2server.rs2.model.GameObject;
import org.rs2server.rs2.model.Item;
import org.rs2server.rs2.model.Location;
import org.rs2server.rs2.model.Skill;
import org.rs2server.rs2.model.boundary.Area;
import org.rs2server.rs2.model.player.Player;
import org.rs2server.rs2.util.Misc;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.annotation.Nonnull;
import java.util.Map;
import static com.google.common.collect.Maps.newHashMap;
/**
* @author tommo
*/
public class FarmingServiceImpl implements FarmingService {
private static final Logger logger = LoggerFactory.getLogger(FarmingServiceImpl.class);
private final PermissionService permissionService;
public static final int INTERFACE_TOOL_STORE_ID = 125;
public static final int INTERFACE_TOOL_STORE_INVENTORY_ID = 126;
private static final Item ITEM_SEED_DIBBER = new Item(5343, 1);
private static final Item ITEM_WATERING_CAN = new Item(5340, 1);
private static final Item ITEM_SUPERCOMPOST = new Item(6034, 1);
private static final int NPC_TOOL_LEPRECHAUN_ID = 0;
private static final int NPC_FARMER = 2664;
//2812, 3464
// catherby: 529
// herb spot: sppp ppp << 24
// allotment north: sspp pppp
// allotment south: sspp pppp << 8
@Inject
FarmingServiceImpl(final HookService hookService, PermissionService permissionService) {
hookService.register(this);
this.permissionService = permissionService;
}
@Subscribe
public void onPlayerLogin(final GamePlayerLoginEvent event) {
final Player player = event.getPlayer();
final PlayerSkillFarmingEntity farming = player.getDatabaseEntity().getFarmingSkill();
farming.getPatches().entrySet().stream()
.filter(entry -> entry.getValue() != null)
.forEach(entry -> {
final FarmingPatchState patch = entry.getValue();
//updatePatch(patch);
sendPatches(player);
});
}
@Subscribe
public void onGameObjectActionEvent(final GameObjectActionEvent event) {
final Player player = event.getPlayer();
final GameObject object = event.getGameObject();
final PlayerSkillFarmingEntity farming = player.getDatabaseEntity().getFarmingSkill();
final FarmingPatch farmingPatch = FarmingPatch.forObjectIdAndLocation(object.getId(), object.getLocation());
if (farmingPatch == null) {
return;
}
if (!farming.getPatches().containsKey(farmingPatch)) {
farming.getPatches().put(farmingPatch, new FarmingPatchState(farmingPatch));
}
final FarmingPatchState patchState = farming.getPatches().get(farmingPatch);
if (event.getActionType() == GameObjectActionEvent.ActionType.OPTION_1) {
if (isPatchFullyGrown(patchState)) {
harvest(player, patchState, object);
} else if (patchState.isDiseased()) {
if (player.getInventory().contains(FarmingCureAction.ITEM_PLANT_CURE.getId())) {
cure(player, patchState, object);
} else {
player.getActionSender().sendMessage("You need plant cure to cure this patch.");
}
} else if (patchState.isDead()) {
clear(player, patchState, object);
} else if (patchState.getWeedLevel() < 3) {
rake(player, patchState, object);
}
} else if (event.getActionType() == GameObjectActionEvent.ActionType.OPTION_2) {
inspect(player, patchState);
} else if (event.getActionType() == GameObjectActionEvent.ActionType.ITEM_ON_OBJECT) {
assert event.getItem() != null;
// Curing
if (event.getItem().getId() == FarmingCureAction.ITEM_PLANT_CURE.getId() && !isPatchFullyGrown(patchState)
&& patchState.isDiseased() && player.getInventory().contains(FarmingCureAction.ITEM_PLANT_CURE.getId())) {
cure(player, patchState, object);
}
// Raking
if (event.getItem().getId() == FarmingTool.RAKE.getItemId() && patchState.getWeedLevel() < 3
&& player.getInventory().contains(FarmingTool.RAKE.getItemId())) {
rake(player, patchState, object);
}
// Treating
if (patchState.getWeedLevel() == 3 && patchState.getTreatment() == FarmingPatchTreatment.NOT_TREATED) {
final FarmingPatchTreatment treatment = FarmingPatchTreatment.forItemId(event.getItem().getId());
if (treatment != null) {
treat(player, patchState, treatment, object);
}
}
// Watering
if (patchState.getPlanted() != null && !patchState.isWatered() && event.getItem().getId() == ITEM_WATERING_CAN.getId() && patchState.getWeedLevel() == 3) {
if (patchState.getPatch().getType().isWaterable() && !isPatchFullyGrown(patchState)
&& !patchState.isDiseased() && !patchState.isDead()) {
water(player, patchState, object);
}
}
// Planting
if (patchState.getWeedLevel() == 3 && patchState.getPlanted() == null) {
final FarmingPlantable plantable = FarmingPlantable.forSeedItemId(event.getItem().getId());
// Check if the plantable can be planted in this patch type
if (plantable != null && farmingPatch.getType() == plantable.getType()) {
if (player.getSkills().getLevel(Skill.FARMING.getId()) < plantable.getRequiredLevel()) {
player.getActionSender().sendMessage("You need a Farming level of " + plantable.getRequiredLevel() + " to plant that.");
} else if (player.getInventory().contains(ITEM_SEED_DIBBER.getId())) {
plant(player, patchState, plantable, object);
} else {
player.getActionSender().sendMessage("You need a Seed dibber to plant seeds.");
}
}
}
}
}
private void inspect(@Nonnull Player player, @Nonnull FarmingPatchState patch) {
final StringBuilder builder = new StringBuilder();
builder.append("This is a ").append(patch.getPatch().getType().toString()).append(".");
//updatePatch(patch);
if (patch.getTreatment() == FarmingPatchTreatment.NOT_TREATED) {
builder.append(" The soil has not been treated.");
} else {
builder.append(" The soil has been treated with ").append(patch.getTreatment().name().toLowerCase()).append(".");
}
if (patch.getPlanted() != null) {
if (isPatchFullyGrown(patch)) {
builder.append(" The patch is fully grown.");
} else if (patch.isDiseased()) {
builder.append(" The patch is diseased and needs attending to before it dies.");
} else if (patch.isDead()) {
builder.append(" The patch has become infected by disease and has died.");
} else {
builder.append(" The patch has something growing in it.");
}
} else {
if (patch.getWeedLevel() < 3) {
builder.append(" The patch needs weeding.");
} else {
builder.append(" The patch is empty and weeded.");
}
}
player.getActionSender().sendMessage(builder.toString());
}
private void cure(@Nonnull Player player, @Nonnull FarmingPatchState patch, @Nonnull GameObject object) {
player.faceObject(object);
player.getActionQueue().addAction(new FarmingCureAction(player, patch));
}
private void clear(@Nonnull Player player, @Nonnull FarmingPatchState patch, @Nonnull GameObject object) {
player.faceObject(object);
player.getActionQueue().addAction(new FarmingClearingAction(player, patch));
}
private void treat(@Nonnull Player player, @Nonnull FarmingPatchState patch, @Nonnull FarmingPatchTreatment treatment, @Nonnull GameObject object) {
player.faceObject(object);
player.getActionQueue().addAction(new FarmingTreatmentAction(player, patch, treatment));
}
private void water(@Nonnull Player player, @Nonnull FarmingPatchState patch, @Nonnull GameObject object) {
player.faceObject(object);
player.getActionQueue().addAction(new FarmingWateringAction(player, patch));
}
private void harvest(@Nonnull Player player, @Nonnull FarmingPatchState patch, @Nonnull GameObject object) {
player.faceObject(object);
player.getActionQueue().addAction(new FarmingHarvestingAction(player, patch));
}
private void plant(@Nonnull Player player, @Nonnull FarmingPatchState patch, @Nonnull FarmingPlantable plantable, @Nonnull GameObject object) {
player.faceObject(object);
player.getActionQueue().addAction(new FarmingPlantingAction(player, patch, plantable));
}
private void rake(@Nonnull Player player, @Nonnull FarmingPatchState patch, @Nonnull GameObject object) {
player.faceObject(object);
player.getActionQueue().addAction(new FarmingRakeAction(player, patch));
}
/**
* Randomly decides if a patch should become diseased based on the state of it.
*
* @param patch The patch.
* @return true for diseased, false if not.
*/
private boolean randomlyDisease(@Nonnull FarmingPatchState patch) {
if (!patch.getPatch().getType().isVulnerableToDisease()) {
return false;
}
int modifier = 0;
modifier += (patch.getTreatment().getYieldIncrease() * 3);
modifier += (patch.isWatered() ? 3 : 0);
return Misc.random(10 + modifier) == 1;
}
@Subscribe
public void onRegionChange(final GamePlayerRegionEvent event) {
final Player player = event.getPlayer();
if (player.isActive()) {
sendPatches(event.getPlayer());
}
}
@Subscribe
public void onNpcAction(final GameNpcActionEvent event) {
final Player player = event.getPlayer();
if (event.getActionType() == GameNpcActionEvent.ActionType.OPTION_1 || event.getActionType() == GameNpcActionEvent.ActionType.OPTION_2) {
if (event.getNpc().getId() == NPC_TOOL_LEPRECHAUN_ID) {
openToolInterface(player);
}
} else if (event.getActionType() == GameNpcActionEvent.ActionType.ITEM_ON_NPC) {
final Item item = event.getItem();
assert item != null;
if (event.getNpc().getId() != NPC_TOOL_LEPRECHAUN_ID) {
return;
}
final FarmingPlantable plantable = FarmingPlantable.forRewardItemId(item.getId());
if (plantable != null) {
final int amount = player.getInventory().getCount(plantable.getReward());
final Item unnoted = new Item(plantable.getReward(), amount);
final Item noted = new Item(plantable.getReward() + 1, amount);
player.getInventory().remove(unnoted);
player.getInventory().addItemIgnoreStackPolicy(noted);
player.getActionSender().sendMessage("The Leprechaun exchanges your items into notes.");
}
}
}
@Subscribe
public void onInterfaceButtonClick(final GameInterfaceButtonEvent event) {
final Player player = event.getPlayer();
if (event.getInterfaceId() == INTERFACE_TOOL_STORE_INVENTORY_ID) {
final FarmingTool tool = FarmingTool.forInventoryActionButtonId(event.getButton());
if (tool == null || !player.getInventory().contains(tool.getItemId())) {
return;
}
final PlayerSkillFarmingEntity farming = player.getDatabaseEntity().getFarmingSkill();
if (farming.getToolStore().containsKey(tool)) {
final int stored = farming.getToolStore().get(tool);
if (stored >= tool.getMaxAmount()) {
player.getActionSender().sendMessage("You cannot store more than " + tool.getMaxAmount() + " "
+ new Item(tool.getItemId()).getDefinition2().getName() + " in here.");
} else {
farming.getToolStore().put(tool, farming.getToolStore().get(tool) + 1);
player.getInventory().remove(new Item(tool.getItemId(), 1));
}
} else {
farming.getToolStore().put(tool, 1);
player.getInventory().remove(new Item(tool.getItemId(), 1));
}
} else if (event.getInterfaceId() == INTERFACE_TOOL_STORE_ID) {
final FarmingTool tool = FarmingTool.forStoreActionButtonId(event.getButton());
if (tool == null) {
return;
}
final PlayerSkillFarmingEntity farming = player.getDatabaseEntity().getFarmingSkill();
if (farming.getToolStore().containsKey(tool)) {
final int amount = farming.getToolStore().get(tool);
if (amount > 0) {
final Item item = new Item(tool.getItemId(), 1);
if (!player.getInventory().hasRoomFor(item)) {
player.getActionSender().sendMessage("Your inventory is full.");
return;
} else {
player.getInventory().add(item);
if (amount - 1 == 0) {
farming.getToolStore().remove(tool);
} else {
farming.getToolStore().put(tool, amount - 1);
}
}
}
}
}
sendToolInterface(player);
}
private void sendToolInterface(@Nonnull Player player) {
final BitConfigBuilder config = BitConfigBuilder.of(615);
player.getDatabaseEntity().getFarmingSkill().getToolStore().entrySet().forEach(t -> {
final FarmingTool tool = t.getKey();
final int amount = t.getValue();
if (tool == FarmingTool.RAKE) {
config.or(0x1);
} else if (tool == FarmingTool.SEED_DIBBER) {
config.or(0x2);
} else if (tool == FarmingTool.SPADE) {
config.or(0x4);
} else if (tool == FarmingTool.SECATEURS) {
config.or(0x8);
} else if (tool == FarmingTool.TROWEL) {
config.or(0x100);
} else if (tool == FarmingTool.WATERING_CAN) {
config.or(0x30);
} else if (tool == FarmingTool.BUCKET) {
config.or(amount << 9);
} else if (tool == FarmingTool.COMPOST) {
config.or(amount << 14);
} else if (tool == FarmingTool.SUPERCOMPOST) {
config.or(amount << 22);
}
});
player.getActionSender().sendConfig(config.build());
}
@Override
public void openToolInterface(@Nonnull Player player) {
player.getActionSender().sendInterface(INTERFACE_TOOL_STORE_ID, false);
player.getActionSender().sendInterfaceInventory(INTERFACE_TOOL_STORE_INVENTORY_ID);
sendToolInterface(player);
}
@Override
public void clearPatch(@Nonnull Player player, @Nonnull FarmingPatchState patch) {
patch.setGrowth(0);
patch.setPlanted(null);
patch.setTreatment(FarmingPatchTreatment.NOT_TREATED);
patch.setWatered(false);
patch.setDiseased(false);
patch.setDead(false);
patch.setImmune(false);
updateAndSendPatches(player, patch);
}
@Override
public void updateAndSendPatches(@Nonnull Player player, @Nonnull FarmingPatchState patch) {
updatePatch(patch);
sendPatches(player);
}
@Override
public void updatePatch(@Nonnull FarmingPatchState patch) {
final DateTime now = DateTime.now(DateTimeZone.UTC);
// Check if whatever is planted can grow
if (patch.getPlanted() != null) {
if (!isPatchFullyGrown(patch)) {
// Check if the current stage is ready to grow
if (now.isAfter(patch.getLastGrowthTime().plus(patch.getPlanted().getGrowthTime()))) {
patch.setLastGrowthTime(now);
patch.setWatered(false);
// Choose whether to disease, kill, or grow the crop
if (!patch.isDead() && !patch.isDiseased() && randomlyDisease(patch) && patch.getPatch().getType() != FarmingPatchType.HERB_PATCH) {
patch.setDiseased(true);
//logger.info("Crop " + patch.getPlanted().name() + " diseased for " + player.getName());
} else if (patch.isDiseased()) {
patch.setDiseased(false);
patch.setDead(true);
//logger.info("Crop " + patch.getPlanted().name() + " died for " + player.getName());
} else if (!patch.isDead()) {
//logger.info("Growing...");
patch.setGrowth(patch.getGrowth() + 1);
if (isPatchFullyGrown(patch)) {
// The plantable has finished growing.
//logger.info("Crop " + patch.getPlanted().name() + " fully grown for " + player.getName());
patch.setYield(Misc.random(patch.getPlanted().getMinYield(), patch.getPlanted().getMaxYield()) + patch.getTreatment().getYieldIncrease());
}
}
}
} else {
// The plantable is fully grown..
}
} else {
// Check if weed should grow back
if (patch.getWeedLevel() > 0 && (patch.getLastGrowthTime() == null || now.isAfter(patch.getLastGrowthTime().plus(Duration.standardMinutes(1))))) {
patch.setLastGrowthTime(now);
patch.setWeedLevel(patch.getWeedLevel() - 1);
}
}
}
@Override
public void sendPatches(@Nonnull Player player) {
final Map<Integer, BitConfigBuilder> configMap = newHashMap();
// We cannot send each patch config 1 by 1 since they are packed, and therefore since
// different patches may have different config ids, we eagerly construct them.
player.getDatabaseEntity().getFarmingSkill().getPatches().entrySet().stream()
.filter(p -> {
/*
* Oddly enough, RS uses the same config ID for all farming patches, and the config is region specific.
* This then inherently addresses the issue where all patches would be mirrored across Gielinor.
/* */
final Area patchArea = p.getKey().getAreas().get(0);
final Location patchLocation = Location.create(patchArea.getBottomLeftX(), patchArea.getBottomLeftY());
//return World.getWorld().getRegionManager().getRegionByLocation(patchLocation).getPlayers().contains(player);
// TODO the above fix does not work since region changes are not detected, this is a quickfix.
return player.getLocation().distance(patchLocation) <= 56;
})
.forEach(p -> {
final FarmingPatch key = p.getKey();
final FarmingPatchType type = key.getType();
final FarmingPatchState patch = p.getValue();
final BitConfigBuilder config = configMap.getOrDefault(key.getConfigId(), new BitConfigBuilder(key.getConfigId()));
//logger.info("Sending patch " + key.name());
if (patch.getPlanted() != null) {
config.set(patch.getGrowth(), key.getConfigBitOffset());
} else {
config.set(patch.getWeedLevel(), key.getConfigBitOffset());
}
if (patch.isWatered()) {
if (type == FarmingPatchType.ALLOTMENT || type == FarmingPatchType.FLOWER_PATCH) {
config.set(1 << type.getStateBitOffset(), key.getConfigBitOffset());
}
} else if (patch.isDiseased()) {
if (type == FarmingPatchType.ALLOTMENT || type == FarmingPatchType.FLOWER_PATCH) {
config.set(2 << type.getStateBitOffset(), key.getConfigBitOffset());
} else if (type == FarmingPatchType.HERB_PATCH) {
// TODO fix this, doesn't work
config.set(1 << type.getStateBitOffset(), key.getConfigBitOffset());
}
} else if (patch.isDead()) {
if (type == FarmingPatchType.ALLOTMENT || type == FarmingPatchType.FLOWER_PATCH) {
config.set(3 << type.getStateBitOffset(), key.getConfigBitOffset());
} else if (type == FarmingPatchType.HERB_PATCH) {
// TODO fix this, doesn't work
config.set(0xAB, key.getConfigBitOffset());
}
}
configMap.put(key.getConfigId(), config);
});
configMap.entrySet().stream().forEach(e -> {
//logger.info("Config " + e.getKey() + ": " + Integer.toBinaryString(e.getValue().build().getValue()));
final BitConfig config = e.getValue().build();
player.getActionSender().sendConfig(config.getId(), config.getValue());
});
}
private boolean isPatchFullyGrown(final FarmingPatchState patch) {
return patch.getPlanted() != null && patch.getGrowth() >= patch.getPlanted().getMaxGrowth();
}
}
|
from threading import Timer
def set_timeout(seconds, callback):
timer = Timer(seconds, callback)
timer.start()
return timer |
#!/bin/sh
cd `dirname $0`
# set path to eclipse folder. If local folder, use '.'; otherwise, use /path/to/eclipse/
eclipsehome="server";
# set ports for HTTP(S) server
HTTP_PORT=8080
HTTPS_PORT=8443
# get path to equinox jar inside $eclipsehome folder
cp=$(find $eclipsehome -name "org.eclipse.equinox.launcher_*.jar" | sort | tail -1);
# debug options
debug_opts="-Xdebug -Xnoagent -Djava.compiler=NONE -Xrunjdwp:transport=dt_socket,address=8001,server=y,suspend=n"
echo Launching the openHAB runtime in debug mode...
java $debug_opts -Dosgi.clean=true -Declipse.ignoreApp=true -Dosgi.noShutdown=true -Djetty.port=$HTTP_PORT -Djetty.port.ssl=$HTTPS_PORT -Djetty.home=. -Dlogback.configurationFile=configurations/logback_debug.xml -Dfelix.fileinstall.dir=addons -Djava.library.path=lib -Dorg.quartz.properties=./etc/quartz.properties -Djava.security.auth.login.config=./etc/login.conf -Dequinox.ds.block_timeout=240000 -Dequinox.scr.waitTimeOnBlock=60000 -Dfelix.fileinstall.active.level=4 -Djava.awt.headless=true -jar $cp $* -console
|
package ru.job4j.io.configreading;
import org.junit.Test;
import ru.job4j.io.configreading.Config;
import java.io.File;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.is;
/**
* ConfigTest class.
* @author <NAME> (<EMAIL>)
* @version $Id$
* @since 0.1
*/
public class ConfigTest {
private String s = File.separator;
@Test
public void whenPairWithoutComment() {
String path = "." + s + "data" + s + "io" + s + "configLoad" + s + "pair_without_comment.properties";
Config config = new Config(path);
config.load();
assertThat(
config.value("name"),
is("<NAME>")
);
}
@Test
public void whenPairWithComment() {
String path = "." + s + "data" + s + "io" + s + "configLoad" + s + "pair_with_comment.properties";
Config config = new Config(path);
config.load();
assertThat(
config.value("name"),
is("<NAME>")
);
}
@Test
public void whenUsualConfig() {
String path = "." + s + "data" + s + "io" + s + "configLoad" + s + "usualConfig.properties";
Config config = new Config(path);
config.load();
assertThat(config.value("name"), is("<NAME>"));
assertThat(config.value("phone"), is("111"));
assertThat(config.value("e-mail"), is("<EMAIL>"));
assertThat(config.value("status"), is(""));
}
} |
# -*- coding: utf-8 -*-
# @Author: Administrator
# @Date: 2019-05-23 18:19:15
# @Last Modified by: Administrator
# @Last Modified time: 2019-05-23 18:23:05
__all__ = [
"task_download_favorite_matches",
]
import os
from ...client.botzone import BotzoneClient
from ...client.bean import FavoriteMatchBean
from ...utils import json_load, json_dump
from ...log import ConsoleLogger
from ..const import CONFIG_JSON_FILE, FAVORITE_MATCHES_DATA_DIR
from ..utils import log_schedule_task
_logger = ConsoleLogger("scheduler.favorite_match")
_client = BotzoneClient()
@log_schedule_task(_logger, "fetch rank matches")
def task_download_favorite_matches():
r = _client.get_favorites()
respJson = r.json()
matches = [ FavoriteMatchBean(match) for match in respJson["favorited_matches"] ]
counter = 0
for match in matches:
file = os.path.join(FAVORITE_MATCHES_DATA_DIR, "%s.json" % match.id)
if os.path.exists(file):
continue
json_dump(match.dict, file, ensure_ascii=False, indent=4)
counter += 1
_logger.info("downloaded %d new favorite matches" % counter)
|
#!/bin/sh
TAG=${TAG:-latest}
TAG=${CENTOS_DEV_TAG:-$TAG}
REPO=${REPO:-icehess}
NAME=${CENTOS_DEV_IMG_NAME:-centos-base}
docker build --rm . -t $REPO/$NAME:$TAG
|
#!/bin/bash
source /opt/tools/bash-common.sh .
log="/tmp/pre-start.log"
echo "" > $log
lg "Running Pre-Start"
if [ -d "$ENV_SSH_CREDS" ]; then
lg "Installing ssh for container user from ${ENV_SSH_CREDS}" >> $log
if [ ! -e /root/.ssh ]; then
mkdir /root/.ssh
chmod 700 /root/.ssh
fi
cp -r $ENV_SSH_CREDS/* /root/.ssh/
chmod 600 /root/.ssh/*
chmod 700 /root/.ssh/*.pem
chmod 644 /root/.ssh/*.pub
chmod 700 /root/.ssh/config
fi
if [ -e "$ENV_GIT_CONFIG" ]; then
lg "Installing git config for container user from ${ENV_GIT_CONFIG}" >> $log
cp $ENV_GIT_CONFIG /root/.gitconfig
chmod 664 /root/.gitconfig
fi
# Custom commands:
if [[ "${ENV_REDIS_REQUIRE_PASSWORD}" != "" ]]; then
lg "Setting redis password" >> $log
sed -i "s/#SET_ENV_REDIS_REQUIRE_PASSWORD/requirepass ${ENV_REDIS_REQUIRE_PASSWORD}/g" /opt/redis/node/*.conf
lg "Set redis password" >> $log
else
lg "No redis password set" >> $log
fi
lg "Done Pre-Start"
exit 0
|
#!/bin/sh
cd `dirname $0`
case "$0" in
*count-aig.sh) mode=count;;
*) mode=sat;;
esac
cmp=./cmp-dualiza-${mode}-aig.sh
if [ ! -f aiger/aigfuzz ]
then
make -C aiger || exit 1
fi
i=0
suffix=aag
tmp=/tmp/dualiza-aig-fuzz${mode}-$$
child=0
trap "[ $child ] || kill -9 $child; rm -f $tmp*; exit 1" 2 9 15
while true
do
aig=$tmp.$suffix
./fuzzaig.sh $aig
seed="`./aiger/aiginfo $aig|awk '/^seed/{print $2}'`"
i=`expr $i + 1`
echo -n "\r$i $seed "
$cmp $aig > /dev/null &
child=$!
wait $child && continue
echo
bug=bug-${mode}-$seed.$suffix
red=red-${mode}-$seed.$suffix
cp $aig $bug
echo "$bug `head -1 $bug 2>/dev/null`"
./aiger/aigdd $bug $red $cmp >/dev/null 2>/dev/null
echo "$red `head -1 $red 2>/dev/null`"
if [ $suffix = aag ]; then suffix=aig; else suffix=aag; fi
done
|
#!/bin/bash
set -e
declare -a URLS=()
declare -a PKGS=()
declare -a SIGS=()
declare -A VARIANTS=(
[ungoogled-chromium]=83.0.4103.61-1
[ungoogled-chromium-ozone]=83.0.4103.61-1
[ungoogled-chromium-git]=83.0.4103.61.1.r0.gc732887-1
)
echo "Downloading and signing packages"
for KEY in "${!VARIANTS[@]}"
do
PKG="${KEY}-${VARIANTS[$KEY]}-x86_64.pkg.tar.xz"
URL="https://download.opensuse.org/repositories/home:/justkidding:/arch/standard/x86_64/${PKG}"
SIG=$PKG.sig
URLS+=($URL)
PKGS+=($PKG)
SIGS+=($SIG)
if [ ! -f $PKG ]; then
wget -q $URL
fi
gpg -o $SIG --detach-sign $PKG
done
echo "Adding binaries to repo file"
repo-add -q -s -R jk-aur.db.tar.gz ${PKGS[@]}
echo "Sending files to server"
rsync -q -e ssh -Phazz jk-aur* ${SIGS[@]} lyra:/home/jk/Repo/
echo "Cleaning up local files"
rm -f ${SIGS[@]}
echo "Downloading package on server..."
ssh lyra "zsh -s ${URLS[@]}" <<"ENDSSH"
cd /home/jk/Repo
wget -q $@
cd /srv/http/repo/arch/x86_64/
rm -f *
mv /home/jk/Repo/* .
ENDSSH
echo "Done."
|
package my.company.web;
import my.company.web.pages.MainPage;
import my.company.web.pages.SearchPage;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.firefox.FirefoxDriver;
import org.openqa.selenium.remote.DesiredCapabilities;
import org.openqa.selenium.remote.RemoteWebDriver;
import static org.hamcrest.collection.IsCollectionWithSize.hasSize;
import static org.junit.Assert.assertThat;
import static ru.yandex.qatools.htmlelements.matchers.WebElementMatchers.exists;
/**
* @author <NAME> eroshenkoam
* 5/6/13, 2:51 PM
*/
public class SearchingByRequestTest {
private final int DEFAULT_RESULTS_COUNT;
private WebDriverProperties webDriverProperties = new WebDriverProperties();
public WebDriver driver = new RemoteWebDriver(webDriverProperties.getServer(), DesiredCapabilities.firefox());
public SearchingByRequestTest() {
DEFAULT_RESULTS_COUNT = 10;
}
@Before
public void loadStartPage() {
driver.get("http://www.yandex.ru");
}
@Test
public void afterSearchingUserShouldSeSearchResults() {
MainPage mainPage = new MainPage(driver);
SearchPage page = mainPage.searchFor("Yandex");
assertThat(page.getSearchResults(), exists());
assertThat(page.getSearchResults().getSearchItems(), hasSize(DEFAULT_RESULTS_COUNT));
}
@After
public void killWebDriver() {
driver.quit();
}
}
|
#!/usr/bin/env bash
###############################################################################
# Copyright 2017 The Apollo Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
source "$DIR/apollo_base.sh"
${DIR}/../bazel-bin/modules/map/tools/map_xysl --flagfile=${DIR}/../modules/common/data/global_flagfile.txt $@
|
<gh_stars>0
import { Component, OnInit } from '@angular/core';
import {CatagoryServicesService} from '../services/catagory-services.service';
import {MatDialog} from '@angular/material';
import {CatagoryFormComponent} from '../catagory-form/catagory-form.component';
import {getConfirmDialogRef} from '../utils/dialog-util';
@Component({
selector: 'app-catagory-list',
templateUrl: './catagory-list.component.html',
styleUrls: ['./catagory-list.component.scss']
})
export class CatagoryListComponent implements OnInit {
constructor(private catagoryService: CatagoryServicesService, private dialog: MatDialog) { }
catagoryList: Catagory[] = [];
displayedColumns: string[] = ['id', 'name', 'type', 'Action'];
ngOnInit() {
this.getCatagories();
}
addUser() {
const dialogRef = this.dialog.open<CatagoryFormComponent, Catagory, Catagory>(
CatagoryFormComponent,
{
panelClass: 'zero-padding-dialog-panel',
width: '500px',
maxHeight: '90vh',
}
);
dialogRef.beforeClosed().subscribe((catagory) => {
// User has to be checked because also in cancel bottom this method is subscribed
if (catagory) {
this.getCatagories();
}
});
}
getCatagories() {
this.catagoryService.getCatagoryList().subscribe((catagories) => {
this.catagoryList = catagories;
});
}
deleteCatagories(element: Catagory) {
const confirmDialogRef = this.getConfirmDialogRef('delete this catagory');
confirmDialogRef.afterClosed().subscribe((success) => {
if (success) {
this.catagoryService.deleteCatagory(element._id).subscribe((data) => {
this.getCatagories();
});
}
});
}
editCatagories(element: Catagory) {
const dialogRef = this.dialog.open<CatagoryFormComponent, Catagory, Catagory>(
CatagoryFormComponent,
{
panelClass: 'zero-padding-dialog-panel',
width: '500px',
maxHeight: '90vh',
data: element
}
);
dialogRef.beforeClosed().subscribe((catagory) => {
// User has to be checked because also in cancel bottom this method is subscribed
if (catagory) {
this.getCatagories();
}
});
}
private getConfirmDialogRef(confirmDialogMessage: string) {
return getConfirmDialogRef(this.dialog, `Are you sure you want to ${confirmDialogMessage}?`);
}
}
export interface Catagory {
_id: string;
name: string;
type: string;
products?: any;
}
|
#!/bin/bash
# Database Copy Script
#
# Provides two servers's database copy
#
# @author Nick Tsai <myintaer@gmail.com>
# @version 1.0.0
# @link https://github.com/yidas/shell
#
# Configuration
#
# Source Database Config
declare -A src
src[dbName]="source_dbname"
src[dbUser]=""
src[dbPasswd]=""
src[charset]="utf8"
src[dbHost]="source.db.com"
# Destination Database Config
declare -A dst
dst[dbName]=""
dst[dbUser]=""
dst[dbPasswd]=""
dst[charset]="utf8"
dst[dbHost]="localhost"
# Directory for temporary saving files
tmpPath="/tmp"
# Date format for filename
dateFormat='%Y%m%d'
# Database Advanced Config
# Table Array to Use --no-data (Ex. Log table)
# #example nodataTables=("log" "sys_log")
nodataTables=()
#
# /Configuration
#
now=$(date +$dateFormat)
cd "$tmpPath"
# Auto DB name
if [ "${dst[dbName]}" == '' ]
then
dst[dbName]="${src[dbName]}_${now}";
count=0;
while : ; do
result=$(echo "show databases;" | mysql -h${dst[dbHost]} -u ${dst[dbUser]} -p${dst[dbPasswd]} | grep "${dst[dbName]}" | wc -l);
if [ $result != "0" ]
then
count=$(($count+1));
dst[dbName]="${src[dbName]}_${now}_${count}";
else
break;
fi
done
fi
# Auto Distant Setting
for i in "${!dst[@]}"
do
# Same as source if not set
if [ "${dst[$i]}" == '' ]
then
dst[$i]="${src[$i]}"
fi
done
# Option for nodataTables setting
ignoreTableString="";
if [[ ${nodataTables[@]} ]]
then
for i in "${nodataTables[@]}"
do
ignoreTableString="${ignoreTableString} --ignore-table=${src[dbName]}.${i}"
done
fi
# Copy Process
sqlFile="${src[dbName]}.sql"
# Create database query
query='create database `'${dst[dbName]}'` DEFAULT CHARACTER SET '${dst[charset]}'; use `'${dst[dbName]}'`;'
mysql -h${dst[dbHost]} -u ${dst[dbUser]} -p${dst[dbPasswd]} -e "${query}"
# Dump structure
mysqldump -h${src[dbHost]} -u ${src[dbUser]} -p${src[dbPasswd]} --set-gtid-purged=off --default-character-set=${src[charset]} --no-data ${src[dbName]} > "${sqlFile}";
mysql -h${dst[dbHost]} -u ${dst[dbUser]} -p${dst[dbPasswd]} ${dst[dbName]} < "${sqlFile}";
# Dump data
mysqldump -h${src[dbHost]} -u ${src[dbUser]} -p${src[dbPasswd]} --set-gtid-purged=off --default-character-set=${src[charset]} ${src[dbName]} ${ignoreTableString} > "${sqlFile}";
mysql -h${dst[dbHost]} -u ${dst[dbUser]} -p${dst[dbPasswd]} ${dst[dbName]} < "${sqlFile}";
# Remove files after copy
rm -f "${sqlFile}"
exit 1
|
class ImgerWidget:
def __init__(self, attrs=None, **kwargs):
self.imger_settings = attrs['imger_settings'] if attrs and 'imger_settings' in attrs else []
super(ImgerWidget, self).__init__(**kwargs)
def update_settings(self, new_settings):
self.imger_settings = new_settings
def calculate_total_js_size(widget):
total_size = 0
for js_file in widget.imger_settings:
# Assuming js_file is the path to the JavaScript file
# Get the size of the JavaScript file and add it to the total_size
# Here, we assume a function get_file_size(js_file) that returns the size of the file in bytes
total_size += get_file_size(js_file)
return total_size |
def calculate_rectangle_area(length, width):
return length * width
rectangle_area = calculate_rectangle_area(3, 4)
print(rectangle_area) |
<filename>blinker.py
from RPi import GPIO
import time
# LED configuration (BCM numbering)
led1 = 20
led2 = 26
# Number of seconds between blinks
t = 1
# Setup
GPIO.setmode(GPIO.BCM)
GPIO.setup(led1, GPIO.OUT)
GPIO.setup(led2, GPIO.OUT)
# Blinker loop
while True:
GPIO.output(led1, GPIO.HIGH)
GPIO.output(led2, GPIO.LOW)
time.sleep(t)
GPIO.output(led1, GPIO.LOW)
GPIO.output(led2, GPIO.HIGH)
time.sleep(t)
|
package main
import (
"bytes"
"fmt"
)
func join2(sep string, strs ...string) string {
ans := "" // TODO byte.Buffer 更加高效
firstFlag := false
for _, s := range strs {
if firstFlag {
ans += sep
}
firstFlag = true
ans += s
}
return ans
}
func join3(sep string, strs ...string) string {
var ans bytes.Buffer
firstFlag := false
for _, s := range strs {
if firstFlag {
ans.WriteString(sep)
}
firstFlag = true
ans.WriteString(s)
}
return ans.String()
}
func main() {
fmt.Println(join2(" ", "b", "c", "d", "e", "f"))
}
|
<gh_stars>0
function alterarSenha(id){
swal({
title: "Deseja alterar a senha?",
text: "informe a nova senha:",
icon: "warning",
content: "input",
buttons: ["Não", 'Alterar'],
dangerMode: true,
})
.then((v) => {
if (v) {
$.post(path + 'info/atualizarSenha', {
senha: v,
id: id,
_token: $('#token').val()
})
.done((data) => {
console.log(data)
swal("Sucesso", 'Sua senha foi alterada com sucesso', 'success');
})
.fail((err) => {
console.log(err)
swal("Erro", 'Erro ao atualizar a senha', 'warning');
})
} else {
}
});
} |
/*
* Copyright 2015-2021 <NAME> <<EMAIL>>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.nexttypes.nodes;
import java.io.InputStream;
import java.math.BigDecimal;
import java.sql.Savepoint;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.LocalTime;
import java.time.ZoneId;
import java.time.ZonedDateTime;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.TreeMap;
import javax.mail.internet.InternetAddress;
import com.nexttypes.datatypes.ActionResult;
import com.nexttypes.datatypes.AlterFieldResult;
import com.nexttypes.datatypes.AlterIndexResult;
import com.nexttypes.datatypes.AlterResult;
import com.nexttypes.datatypes.Auth;
import com.nexttypes.datatypes.Color;
import com.nexttypes.datatypes.Document;
import com.nexttypes.datatypes.FieldInfo;
import com.nexttypes.datatypes.FieldRange;
import com.nexttypes.datatypes.Filter;
import com.nexttypes.datatypes.HTMLFragment;
import com.nexttypes.datatypes.Image;
import com.nexttypes.datatypes.ImportObjectsResult;
import com.nexttypes.datatypes.ImportTypesResult;
import com.nexttypes.datatypes.Matrix;
import com.nexttypes.datatypes.NXObject;
import com.nexttypes.datatypes.Names;
import com.nexttypes.datatypes.ObjectField;
import com.nexttypes.datatypes.ObjectInfo;
import com.nexttypes.datatypes.Objects;
import com.nexttypes.datatypes.Reference;
import com.nexttypes.datatypes.Tuple;
import com.nexttypes.datatypes.Tuples;
import com.nexttypes.datatypes.Type;
import com.nexttypes.datatypes.TypeField;
import com.nexttypes.datatypes.TypeIndex;
import com.nexttypes.datatypes.TypeInfo;
import com.nexttypes.datatypes.TypeReference;
import com.nexttypes.datatypes.URL;
import com.nexttypes.datatypes.UpdateIdResult;
import com.nexttypes.datatypes.XML;
import com.nexttypes.datatypes.XML.Element;
import com.nexttypes.enums.ImportAction;
import com.nexttypes.enums.NodeMode;
import com.nexttypes.enums.Order;
import com.nexttypes.interfaces.ObjectsStream;
import com.nexttypes.interfaces.TypesStream;
import com.nexttypes.protocol.http.HTTPRequest;
import com.nexttypes.settings.Settings;
import com.nexttypes.settings.LanguageSettings;
import com.nexttypes.settings.TypeSettings;
import com.nexttypes.system.KeyWords;
import com.nexttypes.system.Context;
import com.nexttypes.system.Controller;
import com.nexttypes.system.Loader;
public class ControllersNode extends Node {
protected Auth auth;
protected Settings settings;
protected TypeSettings typeSettings;
protected Node nextNode;
public ControllersNode(HTTPRequest request, NodeMode mode) {
this(request.getAuth(), mode, request.getLang(), request.getRemoteAddress(),
request.getContext(), true);
}
public ControllersNode(Auth auth, NodeMode mode, String lang, String remoteAddress,
Context context, boolean useConnectionPool) {
this.auth = auth;
settings = context.getSettings(Settings.CONTROLLERS_SETTINGS);
nextNode = Loader.loadNode(settings.getString(KeyWords.NEXT_NODE), auth, mode, lang, remoteAddress,
context, useConnectionPool);
typeSettings = context.getTypeSettings(auth);
}
protected Controller getController(String type) {
String className = typeSettings.gts(type, KeyWords.CONTROLLER);
Controller controller = null;
ProxyNode proxyNode = new ProxyNode(type, nextNode, this);
if (className != null) {
controller = Loader.loadController(className, type, auth, proxyNode);
} else {
controller = new Controller(type, auth, proxyNode);
}
return controller;
}
protected void setTypeActions(LinkedHashMap<String, Type> types) {
for (Map.Entry<String, Type> entry : types.entrySet()) {
entry.getValue().setActions(getTypeActions(entry.getKey()));
}
}
@Override
public LinkedHashMap<String, TypeField> getActionFields(String type, String action) {
return getController(type).getActionFields(action);
}
@Override
public FieldRange getActionFieldRange(String type, String action, String field) {
return getController(type).getActionFieldRange(action, field);
}
@Override
public TypeField getActionField(String type, String action, String field) {
return getController(type).getActionField(action, field);
}
@Override
public String getActionFieldType(String type, String action, String field) {
return getController(type).getActionFieldType(action, field);
}
@Override
public LinkedHashMap<String, LinkedHashMap<String, TypeField>> getTypeActions(String type) {
return getController(type).getTypeActions();
}
@Override
public ActionResult executeAction(String type, String id, String action, Object... parameters) {
return getController(type).executeAction(id, action, parameters);
}
@Override
public ActionResult executeAction(String type, String[] objects, String action, Object... parameters) {
return getController(type).executeAction(objects, action, parameters);
}
@Override
public Type getType(String type) {
return getController(type).getType();
}
@Override
public LinkedHashMap<String, Type> getTypes(String[] types) {
LinkedHashMap<String, Type> typeObjects = nextNode.getTypes(types);
setTypeActions(typeObjects);
return typeObjects;
}
@Override
public TypesStream exportTypes(String[] types, boolean includeObjects) {
TypesStream export = nextNode.exportTypes(types, includeObjects);
setTypeActions(export.getTypes());
return export;
}
@Override
public TypesStream exportTypes(String[] types, Filter filter, boolean includeObjects) {
TypesStream export = nextNode.exportTypes(types, filter, includeObjects);
setTypeActions(export.getTypes());
return export;
}
@Override
public TypesStream exportTypes(String[] types, Filter[] filters, boolean includeObjects) {
TypesStream export = nextNode.exportTypes(types, filters, includeObjects);
setTypeActions(export.getTypes());
return export;
}
@Override
public TypesStream backup(boolean full) {
TypesStream backup = nextNode.backup(full);
setTypeActions(backup.getTypes());
return backup;
}
@Override
public String getVersion() {
return nextNode.getVersion();
}
@Override
public ObjectsStream exportObjects(String type, String[] objects, LinkedHashMap<String, Order> order) {
return getController(type).exportObjects(objects, order);
}
@Override
public ImportTypesResult importTypes(InputStream types, ImportAction existingTypesAction,
ImportAction existingObjectsAction) {
return nextNode.importTypes(types, existingTypesAction, existingObjectsAction);
}
@Override
public ImportTypesResult importTypes(TypesStream types, ImportAction existingTypesAction,
ImportAction existingObjectsAction) {
return nextNode.importTypes(types, existingTypesAction, existingObjectsAction);
}
@Override
public ImportObjectsResult importObjects(InputStream objects, ImportAction existingObjectsAction) {
return nextNode.importObjects(objects, existingObjectsAction);
}
@Override
public ImportObjectsResult importObjects(ObjectsStream objects, ImportAction existingObjectsAction) {
return nextNode.importObjects(objects, existingObjectsAction);
}
@Override
public void scanVirus(String type, String[] objects) {
getController(type).scanVirus(type, objects);
}
@Override
public ZonedDateTime create(Type type) {
return getController(type.getName()).create(type);
}
@Override
public ZonedDateTime addField(String type, String field, TypeField typeField) {
return getController(type).addField(field, typeField);
}
@Override
public ZonedDateTime addIndex(String type, String index, TypeIndex typeIndex) {
return getController(type).addIndex(index, typeIndex);
}
@Override
public AlterResult alter(Type type) {
return getController(type.getName()).alter(type);
}
@Override
public AlterResult alter(Type type, ZonedDateTime adate) {
return getController(type.getName()).alter(type, adate);
}
@Override
public ZonedDateTime rename(String type, String newName) {
return getController(type).rename(newName);
}
@Override
public AlterFieldResult alterField(String type, String field, TypeField typeField) {
return getController(type).alterField(field, typeField);
}
@Override
public AlterIndexResult alterIndex(String type, String index, TypeIndex typeIndex) {
return getController(type).alterIndex(index, typeIndex);
}
@Override
public ZonedDateTime renameField(String type, String field, String newName) {
return getController(type).renameField(field, newName);
}
@Override
public ZonedDateTime renameIndex(String type, String index, String newName) {
return getController(type).renameIndex(index, newName);
}
@Override
public ZonedDateTime insert(NXObject object) {
return getController(object.getType()).insert(object);
}
@Override
public ZonedDateTime update(NXObject object) {
return getController(object.getType()).update(object);
}
@Override
public ZonedDateTime update(NXObject object, ZonedDateTime udate) {
return getController(object.getType()).update(object, udate);
}
@Override
public UpdateIdResult updateId(String type, String id, String newId) {
return getController(type).updateId(id, newId);
}
@Override
public ZonedDateTime updateField(String type, String id, String field, Object value) {
return getController(type).updateField(id, field, value);
}
@Override
public ZonedDateTime updatePassword(String type, String id, String field, String currentPassword,
String newPassword, String newPasswordRepeat) {
return getController(type).updatePassword(id, field, currentPassword, newPassword, newPasswordRepeat);
}
@Override
public boolean checkPassword(String type, String id, String field, String password) {
return getController(type).checkPassword(id, field, password);
}
@Override
public ZonedDateTime update(String type, String id, byte[] data) {
return getController(type).update(id, data);
}
@Override
public NXObject get(String type, String id, String[] fields, String lang, boolean fulltext, boolean binary,
boolean documentPreview, boolean password, boolean objectName, boolean referencesName) {
return getController(type).get(id, fields, lang, fulltext, binary, documentPreview,
password, objectName, referencesName);
}
@Override
public Objects select(String type, String[] fields, String lang, Filter filter, String search,
LinkedHashMap<String, Order> order, Long offset, Long limit) {
return getController(type).select(fields, lang, filter, search, order, offset, limit);
}
@Override
public Objects select(String type, String[] fields, String lang, Filter filter, String search,
LinkedHashMap<String, Order> order, boolean fulltext, boolean binary, boolean documentPreview,
boolean password, boolean objectsName, boolean referencesName, Long offset, Long limit) {
return getController(type).select(fields, lang, filter, search, order, fulltext, binary, documentPreview,
password, objectsName, referencesName, offset, limit);
}
@Override
public Objects select(String type, String[] fields, String lang, Filter[] filters, String search,
LinkedHashMap<String, Order> order, Long offset, Long limit) {
return getController(type).select(fields, lang, filters, search, order, offset, limit);
}
@Override
public Objects select(String type, String[] fields, String lang, Filter[] filters, String search,
LinkedHashMap<String, Order> order, boolean fulltext, boolean binary, boolean documentPreview,
boolean password, boolean objectsName, boolean referencesName, Long offset, Long limit) {
return getController(type).select(fields, lang, filters, search, order, fulltext, binary, documentPreview,
password, objectsName, referencesName, offset, limit);
}
@Override
public Tuples select(String type, StringBuilder sql, ArrayList<Object> parameters, String filters,
String search, String[] searchFields, String order, Long offset, Long limit) {
return getController(type).select(sql, parameters, filters, search, searchFields,
order, offset, limit);
}
@Override
public Tuple[] select(String type, StringBuilder sql, ArrayList<Object> parameters, String filters, String order) {
return getController(type).select(sql, parameters, filters, order);
}
@Override
public ObjectsStream selectStream(String type, String[] fields, String lang, Filter filter, String search,
LinkedHashMap<String, Order> order, Long offset, Long limit) {
return getController(type).selectStream(fields, lang, filter, search, order, offset, limit);
}
@Override
public ObjectsStream selectStream(String type, String[] fields, String lang, Filter filter, String search,
LinkedHashMap<String, Order> order, boolean fulltext, boolean binary, boolean documentPreview,
boolean password, boolean objectsName, boolean referencesName, Long offset, Long limit) {
return getController(type).selectStream(fields, lang, filter, search, order, fulltext, binary,
documentPreview, password, objectsName, referencesName, offset, limit);
}
@Override
public ObjectsStream selectStream(String type, String[] fields, String lang, Filter[] filters, String search,
LinkedHashMap<String, Order> order, Long offset, Long limit) {
return getController(type).selectStream(fields, lang, filters, search, order, offset, limit);
}
@Override
public ObjectsStream selectStream(String type, String[] fields, String lang, Filter[] filters, String search,
LinkedHashMap<String, Order> order, boolean fulltext, boolean binary, boolean documentPreview,
boolean password, boolean objectsName, boolean referencesName, Long offset, Long limit) {
return getController(type).selectStream(fields, lang, filters, search, order, fulltext, binary,
documentPreview, password, objectsName, referencesName, offset, limit);
}
@Override
public String[] getTypesName() {
return nextNode.getTypesName();
}
@Override
public TypeInfo[] getTypesInfo() {
return nextNode.getTypesInfo();
}
@Override
public TreeMap<String, TypeInfo> getTypesInfoOrderByName() {
return nextNode.getTypesInfoOrderByName();
}
@Override
public String getName(String type, String id, String lang) {
return getController(type).getName(id, lang);
}
@Override
public Names getNames(String type, String lang) {
return getController(type).getNames(lang);
}
@Override
public Names getNames(String type, String lang, String search,
Long offset, Long limit ) {
return getController(type).getNames(lang, search, offset, limit);
}
@Override
public Names getNames(String type, String sql, Object[] parameters,
String lang, String search, Long offset, Long limit) {
return getController(type).getNames(sql, parameters, lang, search, offset, limit);
}
@Override
public Names getNames(String type, StringBuilder sql,
ArrayList<Object> parameters, String lang, String search, Long offset, Long limit) {
return getController(type).getNames(sql, parameters, lang, search, offset, limit);
}
@Override
public Names getNames(String referencedType, String referencingType,
String referencingAction, String referencingField, String lang) {
return getController(referencedType).getNames(referencingType, referencingAction,
referencingField, lang);
}
@Override
public Names getNames(String referencedType, String referencingType,
String referencingAction, String referencingField, String lang, String search, Long offset,
Long limit) {
return getController(referencedType).getNames(referencingType, referencingAction,
referencingField, lang, search, offset, limit);
}
@Override
public LinkedHashMap<String, ObjectInfo[]> getObjectsInfo(String[] types) {
return nextNode.getObjectsInfo(types);
}
@Override
public TypeField getTypeField(String type, String field) {
return getController(type).getTypeField(field);
}
@Override
public LinkedHashMap<String, TypeField> getTypeFields(String type, String... fields) {
return getController(type).getTypeFields(fields);
}
@Override
public LinkedHashMap<String, TypeField> getTypeFields(String type) {
return getController(type).getTypeFields();
}
@Override
public TypeIndex getTypeIndex(String type, String index) {
return getController(type).getTypeIndex(index);
}
@Override
public LinkedHashMap<String, TypeIndex> getTypeIndexes(String type, String... indexes) {
return getController(type).getTypeIndexes(indexes);
}
@Override
public LinkedHashMap<String, TypeIndex> getTypeIndexes(String type) {
return getController(type).getTypeIndexes();
}
@Override
public String getFieldType(String type, String field) {
return getController(type).getFieldType(field);
}
@Override
public Tuple getFieldsSize(String type, String id) {
return getController(type).getFieldsSize(id);
}
@Override
public void drop(String... types) {
nextNode.drop(types);
}
@Override
public ZonedDateTime dropField(String type, String field) {
return getController(type).dropField(field);
}
@Override
public ZonedDateTime dropIndex(String type, String index) {
return getController(type).dropIndex(index);
}
@Override
public void delete(String type, String... objects) {
getController(type).delete(objects);
}
@Override
public Long count(String type) {
return getController(type).count();
}
@Override
public boolean hasObjects(String type) {
return getController(type).hasObjects();
}
@Override
public boolean hasNullValues(String type, String field) {
return getController(type).hasNullValues(field);
}
@Override
public Long count(String sql, Object... parameters) {
return nextNode.count(sql, parameters);
}
@Override
public Long count(StringBuilder sql, Object... parameters) {
return nextNode.count(sql, parameters);
}
@Override
public Long count(StringBuilder sql, ArrayList<Object> parameters) {
return nextNode.count(sql, parameters);
}
@Override
public Object getField(String type, String id, String field) {
return getController(type).getField(id, field);
}
@Override
public String getStringField(String type, String id, String field) {
return getController(type).getStringField(id, field);
}
@Override
public byte[] getBinaryField(String type, String id, String field) {
return getController(type).getBinaryField(id, field);
}
@Override
public Image getImageField(String type, String id, String field) {
return getController(type).getImageField(id, field);
}
@Override
public byte[] getImageContent(String type, String id, String field) {
return getController(type).getImageContent(id, field);
}
@Override
public byte[] getImageThumbnail(String type, String id, String field) {
return getController(type).getImageThumbnail(id, field);
}
@Override
public String getImageContentType(String type, String id, String field) {
return getController(type).getImageContentType(id, field);
}
@Override
public String getDocumentContentType(String type, String id, String field) {
return getController(type).getDocumentContentType(id, field);
}
@Override
public XML getXMLField(String type, String id, String field) {
return getController(type).getXMLField(id, field);
}
@Override
public Element getHTMLElement(String type, String id, String field, String element) {
return getController(type).getHTMLElement(id, field, element);
}
@Override
public Element getXMLElement(String type, String id, String field, String element) {
return getController(type).getXMLElement(id, field, element);
}
@Override
public HTMLFragment getHTMLField(String type, String id, String field) {
return getController(type).getHTMLField(id, field);
}
@Override
public Document getDocumentField(String type, String id, String field) {
return getController(type).getDocumentField(id, field);
}
@Override
public ObjectField getObjectField(String type, String id, String field) {
return getController(type).getObjectField(id, field);
}
@Override
public String getPasswordField(String type, String id, String field) {
return getController(type).getPasswordField(id, field);
}
@Override
public String getFieldContentType(String type, String field) {
return getController(type).getFieldContentType(type, field);
}
@Override
public String getFieldContentType(String type, String id, String field) {
return getController(type).getFieldContentType(id, field);
}
@Override
public Object getFieldDefault(String type, String field) {
return getController(type).getFieldDefault(field);
}
@Override
public String getCompositeFieldContentType(String type, String id, String field) {
return getController(type).getCompositeFieldContentType(id, field);
}
@Override
public LinkedHashMap<String, String> getFieldsContentType(String type) {
return getController(type).getFieldsContentType();
}
@Override
public LinkedHashMap<String, FieldInfo> getFieldsInfo(String type, String id) {
return getController(type).getFieldsInfo(id);
}
@Override
public ZonedDateTime getADate(String type) {
return getController(type).getADate();
}
@Override
public ZonedDateTime getUDate(String type, String id) {
return getController(type).getUDate(id);
}
@Override
public String getETag(String type, String id) {
return getController(type).getETag(id);
}
@Override
public FieldRange getFieldRange(String type, String field) {
return getController(type).getFieldRange(field);
}
@Override
public int execute(String sql, Object... parameters) {
return nextNode.execute(sql, parameters);
}
@Override
public int execute(StringBuilder sql, Object... parameters) {
return nextNode.execute(sql, parameters);
}
@Override
public int execute(StringBuilder sql, ArrayList<Object> parameters) {
return nextNode.execute(sql, parameters);
}
@Override
public int execute(String sql, Integer expectedRows, Object... parameters) {
return nextNode.execute(sql, expectedRows, parameters);
}
@Override
public int execute(StringBuilder sql, Integer expectedRows, Object... parameters) {
return nextNode.execute(sql, expectedRows, parameters);
}
@Override
public int execute(StringBuilder sql, Integer expectedRows, ArrayList<Object> parameters) {
return nextNode.execute(sql, expectedRows, parameters);
}
@Override
public int execute(String sql, boolean useSavepoint, Integer expectedRows, Object... parameters) {
return nextNode.execute(sql, useSavepoint, expectedRows, parameters);
}
@Override
public int execute(StringBuilder sql, boolean useSavepoint, Integer expectedRows,
Object... parameters) {
return nextNode.execute(sql, useSavepoint, expectedRows, parameters);
}
@Override
public int execute(StringBuilder sql, boolean useSavepoint, Integer expectedRows,
ArrayList<Object> parameters) {
return nextNode.execute(sql, useSavepoint, expectedRows, parameters);
}
@Override
public Savepoint setSavepoint() {
return nextNode.setSavepoint();
}
@Override
public void rollback() {
nextNode.rollback();
}
@Override
public void rollback(Savepoint savepoint) {
nextNode.rollback(savepoint);
}
@Override
public void commit() {
nextNode.commit();
}
@Override
public void close() {
nextNode.close();
}
@Override
public Reference[] getReferences() {
return nextNode.getReferences();
}
@Override
public TreeMap<String, TreeMap<String, TreeMap<String, Reference>>> getReferencesOrderByNames() {
return nextNode.getReferencesOrderByNames();
}
@Override
public TypeReference[] getUpReferences(String type) {
return getController(type).getUpReferences();
}
@Override
public Reference[] getUpReferences(String[] types) {
return nextNode.getUpReferences(types);
}
@Override
public TypeReference[] getDownReferences(String type) {
return getController(type).getDownReferences();
}
@Override
public Short getInt16(String sql, Object... parameters) {
return nextNode.getInt16(sql, parameters);
}
@Override
public Integer getInt32(String sql, Object... parameters) {
return nextNode.getInt32(sql, parameters);
}
@Override
public Long getInt64(String sql, Object... parameters) {
return nextNode.getInt64(sql, parameters);
}
@Override
public Float getFloat32(String sql, Object... parameters) {
return nextNode.getFloat32(sql, parameters);
}
@Override
public Double getFloat64(String sql, Object... parameters) {
return nextNode.getFloat64(sql, parameters);
}
@Override
public BigDecimal getNumeric(String sql, Object... parameters) {
return nextNode.getNumeric(sql, parameters);
}
@Override
public String getString(String sql, Object... parameters) {
return nextNode.getString(sql, parameters);
}
@Override
public String getText(String sql, Object... parameters) {
return nextNode.getText(sql, parameters);
}
@Override
public LocalDate getDate(String sql, Object... parameters) {
return nextNode.getDate(sql, parameters);
}
@Override
public LocalTime getTime(String sql, Object... parameters) {
return nextNode.getTime(sql, parameters);
}
@Override
public LocalDateTime getDateTime(String sql, Object... parameters) {
return nextNode.getDateTime(sql, parameters);
}
@Override
public byte[] getBinary(String sql, Object... parameters) {
return nextNode.getBinary(sql, parameters);
}
@Override
public HTMLFragment getHTML(String sql, String allowedTags, Object... parameters) {
return nextNode.getHTML(sql, allowedTags, parameters);
}
@Override
public URL getURL(String sql, Object... parameters) {
return nextNode.getURL(sql, parameters);
}
@Override
public InternetAddress getEmail(String sql, Object... parameters) {
return nextNode.getEmail(sql, parameters);
}
@Override
public String getTel(String sql, Object... parameters) {
return nextNode.getTel(sql, parameters);
}
@Override
public ZoneId getTimeZone(String sql, Object... parameters) {
return nextNode.getTimeZone(sql, parameters);
}
@Override
public Color getColor(String sql, Object... parameters) {
return nextNode.getColor(sql, parameters);
}
@Override
public Image getImage(String sql, Object... parameters) {
return nextNode.getImage(sql, parameters);
}
@Override
public Document getDocument(String sql, Object... parameters) {
return nextNode.getDocument(sql, parameters);
}
@Override
public ZonedDateTime getUTCDateTime(String sql, Object... parameters) {
return nextNode.getUTCDateTime(sql, parameters);
}
@Override
public Short[] getInt16Array(String sql, Object... parameters) {
return nextNode.getInt16Array(sql, parameters);
}
@Override
public Integer[] getInt32Array(String sql, Object... parameters) {
return nextNode.getInt32Array(sql, parameters);
}
@Override
public Long[] getInt64Array(String sql, Object... parameters) {
return nextNode.getInt64Array(sql, parameters);
}
@Override
public Float[] getFloat32Array(String sql, Object... parameters) {
return nextNode.getFloat32Array(sql, parameters);
}
@Override
public Double[] getFloat64Array(String sql, Object... parameters) {
return nextNode.getFloat64Array(sql, parameters);
}
@Override
public BigDecimal[] getNumericArray(String sql, Object... parameters) {
return nextNode.getNumericArray(sql, parameters);
}
@Override
public Boolean[] getBooleanArray(String sql, Object... parameters) {
return nextNode.getBooleanArray(sql, parameters);
}
@Override
public String[] getStringArray(String sql, Object... parameters) {
return nextNode.getStringArray(sql, parameters);
}
@Override
public String[] getTextArray(String sql, Object... parameters) {
return nextNode.getTextArray(sql, parameters);
}
@Override
public LocalDate[] getDateArray(String sql, Object... parameters) {
return nextNode.getDateArray(sql, parameters);
}
@Override
public LocalTime[] getTimeArray(String sql, Object... parameters) {
return nextNode.getTimeArray(sql, parameters);
}
@Override
public LocalDateTime[] getDateTimeArray(String sql, Object... parameters) {
return nextNode.getDateTimeArray(sql, parameters);
}
@Override
public ZonedDateTime[] getUTCDateTimeArray(String sql, Object... parameters) {
return nextNode.getUTCDateTimeArray(sql, parameters);
}
@Override
public byte[][] getBinaryArray(String sql, Object... parameters) {
return nextNode.getBinaryArray(sql, parameters);
}
@Override
public HTMLFragment[] getHTMLArray(String sql, String allowedTags, Object... parameters) {
return nextNode.getHTMLArray(sql, allowedTags, parameters);
}
@Override
public URL[] getURLArray(String sql, Object... parameters) {
return nextNode.getURLArray(sql, parameters);
}
@Override
public InternetAddress[] getEmailArray(String sql, Object... parameters) {
return nextNode.getEmailArray(sql, parameters);
}
@Override
public String[] getTelArray(String sql, Object... parameters) {
return nextNode.getTelArray(sql, parameters);
}
@Override
public ZoneId[] getTimeZoneArray(String sql, Object... parameters) {
return nextNode.getTimeZoneArray(sql, parameters);
}
@Override
public Color[] getColorArray(String sql, Object... parameters) {
return nextNode.getColorArray(sql, parameters);
}
@Override
public Image[] getImageArray(String sql, Object... parameters) {
return nextNode.getImageArray(sql, parameters);
}
@Override
public Document[] getDocumentArray(String sql, Object... parameters) {
return nextNode.getDocumentArray(sql, parameters);
}
@Override
public <T> T[] getArray(String sql, Class<T> type, Object... parameters) {
return nextNode.getArray(sql, type, parameters);
}
@Override
public Object getObject(String sql, Object... parameters) {
return nextNode.getObject(sql, parameters);
}
@Override
public Matrix getMatrix(String sql, String[] axes, Object... parameters) {
return nextNode.getMatrix(sql, axes, parameters);
}
@Override
public Tuple getTuple(String sql, Object... parameters) {
return nextNode.getTuple(sql, parameters);
}
@Override
public Tuple getTuple(StringBuilder sql, Object... parameters) {
return nextNode.getTuple(sql, parameters);
}
@Override
public Tuple getTuple(StringBuilder sql, ArrayList<Object> parameters) {
return nextNode.getTuple(sql, parameters);
}
@Override
public Tuple[] query(StringBuilder sql, ArrayList<Object> parameters) {
return nextNode.query(sql, parameters);
}
@Override
public Tuple[] query(StringBuilder sql, Object... parameters) {
return nextNode.query(sql, parameters);
}
@Override
public Tuple[] query(String sql, Object... parameters) {
return nextNode.query(sql, parameters);
}
@Override
public <T> T[] query(String sql, Class<T> type, Object... parameters) {
return nextNode.query(sql, type, parameters);
}
@Override
public Auth getAuth() {
return nextNode.getAuth();
}
@Override
public Boolean existsType(String type) {
return getController(type).existsType();
}
@Override
public Boolean existsObject(String type, String id) {
return getController(type).existsObject(id);
}
@Override
public String[] getBinaryFieldsName(String type) {
return getController(type).getBinaryFieldsName(type);
}
@Override
public Boolean getBoolean(String sql, Object... parameters) {
return nextNode.getBoolean(sql, parameters);
}
@Override
public void setDeferredConstraints(boolean status) {
nextNode.setDeferredConstraints(status);
}
@Override
public Context getContext() {
return nextNode.getContext();
}
@Override
public LanguageSettings getLanguageSettings() {
return nextNode.getLanguageSettings();
}
@Override
public TypeSettings getTypeSettings() {
return nextNode.getTypeSettings();
}
@Override
public String[] getGroups(String user) {
return nextNode.getGroups(user);
}
@Override
public Node getNextNode() {
return nextNode;
}
} |
<filename>src/main/java/org/ga4gh/discovery/search/beaconnetwork/BeaconNetworkSearchAdapter.java
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package org.ga4gh.discovery.search.beaconnetwork;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.google.gson.JsonElement;
import com.google.gson.JsonParser;
import io.debezium.antlr.CaseChangingCharStream;
import io.debezium.ddl.parser.mysql.generated.MySqlLexer;
import io.debezium.ddl.parser.mysql.generated.MySqlParser;
import io.debezium.ddl.parser.mysql.generated.MySqlParserBaseListener;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Reader;
import java.net.HttpURLConnection;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.antlr.v4.runtime.CharStream;
import org.antlr.v4.runtime.CharStreams;
import org.antlr.v4.runtime.CommonTokenStream;
import org.antlr.v4.runtime.tree.ParseTree;
import org.antlr.v4.runtime.tree.ParseTreeWalker;
import org.apache.http.client.utils.URIBuilder;
import org.ga4gh.discovery.search.model.SchemaProperty;
import org.ga4gh.discovery.search.model.SearchAdapter;
import org.ga4gh.table.SchemaId;
import org.ga4gh.table.model.*;
import org.json.JSONException;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Service;
/**
*
* @author mfiume
*/
@Service
public class BeaconNetworkSearchAdapter implements SearchAdapter {
private static final org.slf4j.Logger logger = LoggerFactory.getLogger(BeaconQueryParser.class);
String BEACON_NETWORK_API = "https://beacon-network.org/api";
ListTablesResponse tableInfo;
private void printElement(JsonElement e) {
Gson gson = new GsonBuilder().setPrettyPrinting().create();
logger.info(gson.toJson(e));
}
public static enum PROPERTIES {
VARIANT_BEACONID {
@Override
public String toString() {
return "beaconId";
}
},
VARIANT_CHROMOSOME {
@Override
public String toString() {
return "chromosome";
}
},
VARIANT_POSITION {
@Override
public String toString() {
return "position";
}
},
VARIANT_REFERENCE_ALLELE {
@Override
public String toString() {
return "referenceAllele";
}
},
VARIANT_ALLELE {
@Override
public String toString() {
return "allele";
}
},
VARIANT_REFERENCE {
@Override
public String toString() {
return "reference";
}
},
VARIANT_EXISTS {
@Override
public String toString() {
return "exists";
}
};
}
public BeaconNetworkSearchAdapter() {
List<TableInfo> tables = new ArrayList<>();
tables.add(getOrganizationTable());
tables.add(getBeaconTable());
tables.add(getVariantTable());
tableInfo = new ListTablesResponse(tables);
}
private TableInfo getOrganizationTable() {
TableInfo ti = new TableInfo();
ti.setName("organizations");
ti.setDescription("Organizations hosting Beacons registered to the network");
ObjectMapper mapper = new ObjectMapper();
JsonNode node = mapper.valueToTree(getOrganizationProperties());
SchemaId sid = SchemaId.createIgnoringFragment(ti.getName());
Schema s = new Schema(sid, node);
ti.setSchema(s);
return ti;
}
private TableInfo getBeaconTable() {
TableInfo ti = new TableInfo();
ti.setName("variants");
ti.setDescription("Variants, their existence, and metadata about them");
ObjectMapper mapper = new ObjectMapper();
JsonNode node = mapper.valueToTree(getVariantProperties());
SchemaId sid = SchemaId.createIgnoringFragment(ti.getName());
Schema s = new Schema(sid, node);
ti.setSchema(s);
return ti;
}
private TableInfo getVariantTable() {
TableInfo ti = new TableInfo();
ti.setName("beacons");
ti.setDescription("Beacons registered to the network");
ObjectMapper mapper = new ObjectMapper();
JsonNode node = mapper.valueToTree(getBeaconProperties());
SchemaId sid = SchemaId.createIgnoringFragment(ti.getName());
Schema s = new Schema(sid, node);
ti.setSchema(s);
return ti;
}
private Map<String, SchemaProperty> getOrganizationProperties() {
Map<String, SchemaProperty> properties = new HashMap<>();
int index = 0;
properties.put("id", new SchemaProperty("int", index++));
properties.put("name", new SchemaProperty("string", index++));
properties.put("description", new SchemaProperty("string", index++));
properties.put("createdDate", new SchemaProperty("date", index++));
properties.put("url", new SchemaProperty("string", index++));
properties.put("address", new SchemaProperty("string", index++));
properties.put("logo", new SchemaProperty("string", index++));
return properties;
}
private Map<String, SchemaProperty> getBeaconProperties() {
Map<String, SchemaProperty> properties = new HashMap<>();
int index = 0;
properties.put("id", new SchemaProperty("int", index++));
properties.put("name", new SchemaProperty("string", index++));
properties.put("url", new SchemaProperty("string", index++));
properties.put("organization", new SchemaProperty("string", index++));
properties.put("description", new SchemaProperty("string", index++));
properties.put("homepage", new SchemaProperty("string", index++));
properties.put("email", new SchemaProperty("string", index++));
properties.put("aggregator", new SchemaProperty("boolean", index++));
properties.put("enabled", new SchemaProperty("boolean", index++));
properties.put("visible", new SchemaProperty("boolean", index++));
properties.put("createdDate", new SchemaProperty("date", index++));
properties.put("supportedReferences", new SchemaProperty("array", index++));
properties.put("aggregatedBeacons", new SchemaProperty("array", index++));
return properties;
}
private Map<String, SchemaProperty> getVariantProperties() {
Map<String, SchemaProperty> properties = new HashMap<>();
int index = 0;
properties.put(PROPERTIES.VARIANT_BEACONID.toString(), new SchemaProperty("string", index++));
properties.put(PROPERTIES.VARIANT_CHROMOSOME.toString(), new SchemaProperty("string", index++));
properties.put(PROPERTIES.VARIANT_POSITION.toString(), new SchemaProperty("long", index++));
properties.put(PROPERTIES.VARIANT_REFERENCE_ALLELE.toString(), new SchemaProperty("string", index++));
properties.put(PROPERTIES.VARIANT_ALLELE.toString(), new SchemaProperty("string", index++));
properties.put(PROPERTIES.VARIANT_REFERENCE.toString(), new SchemaProperty("string", index++));
properties.put(PROPERTIES.VARIANT_EXISTS.toString(), new SchemaProperty("boolean", index++));
return properties;
}
@Override
public ListTablesResponse getTables() {
return tableInfo;
}
@Override
public TableInfo getTableInfo(String name) {
for (TableInfo table : tableInfo.getTables()) {
if (table.getName().equals(name)) {
return table;
}
}
throw new IllegalArgumentException("Unknown table " + name);
}
@Override
public TableData getTableData(String name) {
return new TableData(null, null, null);
}
@Override
public TableData doSearch(String sql) {
logger.info("Parsing query " + sql);
CharStream stream = CharStreams.fromString(sql);
CaseChangingCharStream upper = new CaseChangingCharStream(stream, true);
MySqlLexer mySqlLexer = new MySqlLexer(upper);
CommonTokenStream tokens = new CommonTokenStream(mySqlLexer);
MySqlParser mySqlParser = new MySqlParser(tokens);
ParseTree tree = mySqlParser.dmlStatement();
BeaconQueryParser bqp = new BeaconQueryParser();
MySqlParserBaseListener listener = bqp;
ParseTreeWalker.DEFAULT.walk(listener, tree);
System.out.println(bqp.getBeaconQuery());
if (bqp.isValid()) {
try {
BeaconQuery beaconQuery = bqp.getBeaconQuery();
System.out.println(beaconQuery);
URIBuilder builder = new URIBuilder(BEACON_NETWORK_API + "/responses");
builder.setParameter("allele", beaconQuery.getAllele())
.setParameter("beacon", beaconQuery.getBeaconId())
.setParameter("chrom", beaconQuery.getChromosome())
.setParameter("pos", beaconQuery.getPosition().toString())
.setParameter("ref", beaconQuery.getReference());
URI uri = builder.build();
System.out.println(uri.toString());
JsonElement json = readJsonFromUrl(uri.toString());
printElement(json);
JsonElement root = json.getAsJsonArray().get(0);
Map<String, SchemaProperty> properties = new HashMap<>();
int index = 0;
properties.put("beacon", new SchemaProperty("json", index++));
properties.put("query", new SchemaProperty("json", index++));
properties.put("response", new SchemaProperty("json", index++));
properties.put("authHint", new SchemaProperty("json", index++));
properties.put("fullBeaconResponse", new SchemaProperty("json", index++));
ObjectMapper mapper = new ObjectMapper();
JsonNode node = mapper.valueToTree(properties);
SchemaId sid = SchemaId.createIgnoringFragment("beacon-network-result");
Schema s = new Schema(sid, node);
List<Map<String, Object>> objects = new ArrayList<>();
Map<String, Object> map = new HashMap<>();
map.put("beacon", fetchChildAsString(root,"beacon"));
map.put("query", fetchChildAsString(root,"query"));
map.put("response", fetchChildAsString(root,"response"));
map.put("authHint", fetchChildAsString(root,"authHint"));
map.put("fullBeaconResponse", fetchChildAsString(root,"fullBeaconResponse"));
objects.add(map);
TableData td = new TableData(s, objects, null);
return td;
} catch (URISyntaxException | IOException | JSONException ex) {
ex.printStackTrace();
throw new RuntimeException("Problem contacting the Beacon Network");
}
}
throw new RuntimeException("Problem parsing Beacon query");
}
private String fetchChildAsString(JsonElement root, String childName) {
JsonElement child = root.getAsJsonObject().get(childName);
if (child == null) {
return null;
} else {
return child.toString();
}
}
private static String readAll(Reader rd) throws IOException {
StringBuilder sb = new StringBuilder();
int cp;
while ((cp = rd.read()) != -1) {
sb.append((char) cp);
}
return sb.toString();
}
public static JsonElement readJsonFromUrl(String urlStr) throws IOException, JSONException {
URL url = new URL(urlStr);
logger.info("Requesting " + urlStr);
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
// Enable output for the connection.
conn.setDoOutput(true);
conn.setRequestProperty("Content-Type", "application/json; charset=UTF-8");
conn.setRequestProperty("Accept", "application/json");
InputStream is = conn.getInputStream();
try {
BufferedReader rd = new BufferedReader(new InputStreamReader(is, Charset.forName("UTF-8")));
//String jsonText = "{ \"results\" : [ " + readAll(rd) + " ] }";
//logger.info("Received " + jsonText);
JsonElement je = new JsonParser().parse(readAll(rd));
return je;
} finally {
is.close();
}
}
}
|
#!/bin/bash
FILE=$1
RESULT=0
FILE_DATE=$(grep -E '@date\s+([0-9]{2}\. \w+ [0-9]{4})' ${FILE} | sed -E 's/^.*@date\s+([0-9]{2}\. \w+ [0-9]{4}).*/\1/')
if [[ ! -z $FILE_DATE ]]; then
AUTHOR_DATE=$(git log -1 --pretty="format:%ad" --date="format:%d. %B %Y" ${FILE})
if [[ $AUTHOR_DATE != $FILE_DATE ]]; then
FILE_DATE_LINE=$(grep -En "@date.*${FILE_DATE}" ${FILE} | cut -f1 -d:)
echo "${FILE}:${FILE_DATE_LINE}:Please update file date to '$AUTHOR_DATE'." >&2
RESULT=1
fi
fi
FILE_VERSION=$(grep -E '@version\s+V?([0-9]+\.[0-9]+(\.[0-9]+)?)' ${FILE} | sed -E 's/^.*@version\s+V?([0-9]+\.[0-9]+(\.[0-9]+)?).*/\1/')
if [[ ! -z $FILE_VERSION ]]; then
FILE_VERSION_LINE=$(grep -En "@version.*${FILE_VERSION}" ${FILE} | cut -f1 -d:)
AUTHOR_REV=$(git log -1 --pretty="format:%H")
PARENT_REV=$(git log -1 --pretty="format:%P")
VERSION_BLAME=$(git blame ${PARENT_REV}..${AUTHOR_REV} -l -L ${FILE_VERSION_LINE},${FILE_VERSION_LINE} ${FILE} | sed -E 's/^([[:alnum:]]+).*/\1/')
if [[ $FILE_VERSION != $VERSION_BLAME ]]; then
echo "${FILE}:${FILE_VERSION_LINE}:Please increment file version." >&2
RESULT=1
fi
fi
exit $RESULT
|
#!/bin/sh
# Invoke this with no arguments to refresh all tarballs, or with a project name to refresh only that project.
#
# Example:
# ./refresh_bb_tarballs.sh gmp
# Get this list via:
# using BinaryBuilder
# print("TRIPLETS=\"$(join(triplet.(BinaryBuilder.supported_platforms()), " "))\"")
TRIPLETS="i686-linux-gnu x86_64-linux-gnu aarch64-linux-gnu arm-linux-gnueabihf powerpc64le-linux-gnu i686-linux-musl x86_64-linux-musl aarch64-linux-musl arm-linux-musleabihf x86_64-apple-darwin14 x86_64-unknown-freebsd11.1 i686-w64-mingw32 x86_64-w64-mingw32"
# These are the projects currently using BinaryBuilder; both GCC-expanded and non-GCC-expanded:
BB_PROJECTS="gmp mbedtls libssh2 mpfr curl libgit2 pcre libuv unwind osxunwind dsfmt objconv"
BB_GCC_EXPANDED_PROJECTS="llvm openblas suitesparse openlibm"
# If we've been given a project name, filter down to that one:
if [ -n ${1} ]; then
case "${BB_PROJECTS}" in
*${1}*) BB_PROJECTS="${1}" ;;
*) BB_PROJECTS="" ;;
esac
case "${BB_GCC_EXPANDED_PROJECTS}" in
*${1}*) BB_GCC_EXPANDED_PROJECTS="${1}" ;;
*) BB_GCC_EXPANDED_PROJECTS="" ;;
esac
fi
# Get "contrib/" directory path
CONTRIB_DIR=$(CDPATH= cd -- "$(dirname -- "$0")" && pwd)
# For each triplet and each project, download the BB tarball and save its hash:
for triplet in ${TRIPLETS}; do
for proj in ${BB_PROJECTS}; do
PROJ="$(echo ${proj} | tr [a-z] [A-Z])"
make -C "${CONTRIB_DIR}/../deps" USE_BINARYBUILDER_${PROJ}=1 ${PROJ}_BB_TRIPLET=${triplet} distclean-${proj}
make -C "${CONTRIB_DIR}/../deps" USE_BINARYBUILDER_${PROJ}=1 ${PROJ}_BB_TRIPLET=${triplet} install-${proj}
done
for proj in ${BB_GCC_EXPANDED_PROJECTS}; do
PROJ="$(echo ${proj} | tr [a-z] [A-Z])"
for gcc in gcc4 gcc7 gcc8; do
make -C "${CONTRIB_DIR}/../deps" USE_BINARYBUILDER_${PROJ}=1 ${PROJ}_BB_TRIPLET=${triplet}-${gcc} BB_TRIPLET_CXXABI=${triplet} distclean-${proj}
make -C "${CONTRIB_DIR}/../deps" USE_BINARYBUILDER_${PROJ}=1 ${PROJ}_BB_TRIPLET=${triplet}-${gcc} BB_TRIPLET_CXXABI=${triplet} install-${proj}
done
done
done
|
package com.slickqa.junit;
import com.slickqa.client.errors.SlickError;
import com.slickqa.client.model.LogEntry;
import com.slickqa.client.model.Result;
import com.slickqa.junit.annotations.SlickLogger;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.Date;
/**
* Implementation of SlickLogger interface.
*/
public class SlickResultLogger implements SlickLogger {
public static int BUFFER_SIZE = 10;
public static int MAX_SECONDS_SINCE_FIRST_ENTRY = 5;
public static final String LOGGER_NAME = "testcase";
protected LogLevel minimumLevel;
protected SlickResultRule slick;
protected ArrayList<LogEntry> buffer;
public SlickResultLogger(SlickResultRule slick) {
this.slick = slick;
this.minimumLevel = SlickLogger.DEFAULT_MINIMUM_LOG_LEVEL;
buffer = new ArrayList<>(BUFFER_SIZE);
}
protected void uploadLogsIfNecessary() {
if (slick.isUsingSlick()) {
if (buffer.size() >= BUFFER_SIZE || (buffer.size() > 0 && (((new Date()).getTime() - buffer.get(0).getEntryTime().getTime()) / 1000) >= MAX_SECONDS_SINCE_FIRST_ENTRY)) {
flushLogs();
}
}
}
@Override
public void flushLogs() {
if (slick.isUsingSlick() && buffer.size() > 0) {
Result current = slick.getCurrentResult();
if (current != null) {
try {
slick.getSlickClient().result(current.getId()).addLogs(buffer);
} catch (SlickError e) {
e.printStackTrace();
System.err.println("!! ERROR: Unable to post logs to slick !!");
} finally {
buffer = new ArrayList<>(BUFFER_SIZE);
}
}
}
}
@Override
public LogLevel getMinimumLogLevel() {
return minimumLevel;
}
@Override
public void setMinimumLogLevel(LogLevel newMiniumum) {
this.minimumLevel = newMiniumum;
}
@Override
public void addLogEntry(LogEntry entry) {
if (slick.isUsingSlick()) {
buffer.add(entry);
uploadLogsIfNecessary();
}
// TODO: print message if configured
}
@Override
public boolean isLevelEnabled(LogLevel level) {
return level.getLevel() >= getMinimumLogLevel().getLevel();
}
private LogEntry getLogEntryFor(LogLevel level, String msg) {
LogEntry entry = new LogEntry();
entry.setEntryTime(new Date());
entry.setLevel(level.toString());
entry.setLoggerName(LOGGER_NAME);
entry.setMessage(msg);
return entry;
}
private String getFormattedMessage(String message, Object[] arguments) {
return MessageFormat.format(message, arguments);
}
@Override
public void log(LogLevel level, String msg) {
if (isLevelEnabled(level)) {
addLogEntry(getLogEntryFor(level, msg));
}
}
@Override
public void log(LogLevel level, String format, Object arg) {
if (isLevelEnabled(level)) {
addLogEntry(getLogEntryFor(level, getFormattedMessage(format, new Object[]{arg})));
}
}
@Override
public void log(LogLevel level, String format, Object arg1, Object arg2) {
if (isLevelEnabled(level)) {
addLogEntry(getLogEntryFor(level, getFormattedMessage(format, new Object[]{arg1, arg2})));
}
}
@Override
public void log(LogLevel level, String format, Object... args) {
if (isLevelEnabled(level)) {
addLogEntry(getLogEntryFor(level, getFormattedMessage(format, args)));
}
}
@Override
public void log(LogLevel level, String msg, Throwable t) {
if (isLevelEnabled(level)) {
LogEntry entry = getLogEntryFor(level, msg);
entry.setExceptionClassName(t.getClass().getName());
entry.setExceptionMessage(t.getMessage());
StackTraceElement[] elements = t.getStackTrace();
ArrayList<String> stackTrace = new ArrayList<>(elements.length);
for (StackTraceElement element : elements) {
stackTrace.add(element.toString());
}
entry.setExceptionStackTrace(stackTrace);
addLogEntry(entry);
}
}
@Override
public boolean isTraceEnabled() {
return isLevelEnabled(LogLevel.TRACE);
}
@Override
public void trace(String msg) {
log(LogLevel.TRACE, msg);
}
@Override
public void trace(String format, Object arg) {
log(LogLevel.TRACE, format, arg);
}
@Override
public void trace(String format, Object arg1, Object arg2) {
log(LogLevel.TRACE, format, arg1, arg2);
}
@Override
public void trace(String format, Object... args) {
log(LogLevel.TRACE, format, args);
}
@Override
public void trace(String msg, Throwable t) {
log(LogLevel.TRACE, msg, t);
}
@Override
public boolean isDebugEnabled() {
return isLevelEnabled(LogLevel.DEBUG);
}
@Override
public void debug(String msg) {
log(LogLevel.DEBUG, msg);
}
@Override
public void debug(String format, Object arg) {
log(LogLevel.DEBUG, format, arg);
}
@Override
public void debug(String format, Object arg1, Object arg2) {
log(LogLevel.DEBUG, format, arg1, arg2);
}
@Override
public void debug(String format, Object... args) {
log(LogLevel.DEBUG, format, args);
}
@Override
public void debug(String msg, Throwable t) {
log(LogLevel.DEBUG, msg, t);
}
@Override
public boolean isInfoEnabled() {
return isLevelEnabled(LogLevel.INFO);
}
@Override
public void info(String msg) {
log(LogLevel.INFO, msg);
}
@Override
public void info(String format, Object arg) {
log(LogLevel.INFO, format, arg);
}
@Override
public void info(String format, Object arg1, Object arg2) {
log(LogLevel.INFO, format, arg1, arg2);
}
@Override
public void info(String format, Object... args) {
log(LogLevel.INFO, format, args);
}
@Override
public void info(String msg, Throwable t) {
log(LogLevel.INFO, msg, t);
}
@Override
public boolean isWarnEnabled() {
return isLevelEnabled(LogLevel.WARN);
}
@Override
public void warn(String msg) {
log(LogLevel.WARN, msg);
}
@Override
public void warn(String format, Object arg) {
log(LogLevel.WARN, format, arg);
}
@Override
public void warn(String format, Object arg1, Object arg2) {
log(LogLevel.WARN, format, arg1, arg2);
}
@Override
public void warn(String format, Object... args) {
log(LogLevel.WARN, format, args);
}
@Override
public void warn(String msg, Throwable t) {
log(LogLevel.WARN, msg, t);
}
@Override
public boolean isErrorEnabled() {
return isLevelEnabled(LogLevel.ERROR);
}
@Override
public void error(String msg) {
log(LogLevel.ERROR, msg);
}
@Override
public void error(String format, Object arg) {
log(LogLevel.ERROR, format, arg);
}
@Override
public void error(String format, Object arg1, Object arg2) {
log(LogLevel.ERROR, format, arg1, arg2);
}
@Override
public void error(String format, Object... args) {
log(LogLevel.ERROR, format, args);
}
@Override
public void error(String msg, Throwable t) {
log(LogLevel.ERROR, msg, t);
}
}
|
#!/usr/bin/env bash
# Install command-line tools using Homebrew.
# Make sure we’re using the latest Homebrew.
brew update
# Upgrade any already-installed formulae.
brew upgrade
# Save Homebrew’s installed location.
BREW_PREFIX=$(brew --prefix)
# Install GNU core utilities (those that come with macOS are outdated).
# Don’t forget to add `$(brew --prefix coreutils)/libexec/gnubin` to `$PATH`.
brew install coreutils
ln -s "${BREW_PREFIX}/bin/gsha256sum" "${BREW_PREFIX}/bin/sha256sum"
# Install some other useful utilities like `sponge`.
brew install moreutils
# Install GNU `find`, `locate`, `updatedb`, and `xargs`, `g`-prefixed.
brew install findutils
# Install GNU `sed`, overwriting the built-in `sed`.
brew install gnu-sed --with-default-names
# Install a modern version of Bash.
brew install bash
brew install bash-completion2
## Switch to using brew-installed bash as default shell
#if ! fgrep -q "${BREW_PREFIX}/bin/bash" /etc/shells; then
# echo "${BREW_PREFIX}/bin/bash" | sudo tee -a /etc/shells;
# chsh -s "${BREW_PREFIX}/bin/bash";
#fi;
# Install `wget` with IRI support.
brew install wget --with-iri
# Install GnuPG to enable PGP-signing commits.
brew install gnupg
# Install more recent versions of some macOS tools.
brew install vim --with-override-system-vi
brew install grep
brew install openssh
brew install screen
brew install php
brew install gmp
# Install font tools.
brew tap bramstein/webfonttools
brew install sfnt2woff
brew install sfnt2woff-zopfli
brew install woff2
# Install some CTF tools; see https://github.com/ctfs/write-ups.
brew install aircrack-ng
brew install bfg
brew install binutils
brew install binwalk
brew install cifer
brew install dex2jar
brew install dns2tcp
brew install fcrackzip
brew install foremost
brew install hashpump
brew install hydra
brew install john
brew install knock
brew install netpbm
brew install nmap
brew install pngcheck
brew install socat
brew install sqlmap
brew install tcpflow
brew install tcpreplay
brew install tcptrace
brew install ucspi-tcp # `tcpserver` etc.
brew install xpdf
brew install xz
# Install other useful binaries.
brew install ack
#brew install exiv2
brew install git
brew install git-lfs
brew install gs
brew install imagemagick --with-webp
brew install lua
brew install lynx
brew install p7zip
brew install pigz
brew install pv
brew install rename
brew install rlwrap
brew install ssh-copy-id
brew install tree
brew install vbindiff
brew install zopfli
# dashbot related programs
brew install node
brew install nvm
brew install docker
brew install java
brew install flyway
brew install yarn
# Remove outdated versions from the cellar.
brew cleanup
|
#!/bin/bash
set -e
# Functions
function execute_and_wait {
command=$1
max_retries=5
retries=0
cmd_output=$(echo $command | sh)
cmd_code=$?
while [ "$cmd_code" != "0" -a $retries -lt $max_retries ] ; do
echo "Executing $command. Wait and retry ($retries/$max_retries)"
sleep 5
retries=$(($retries+1))
cmd_output=$(echo $command | sh)
cmd_code=$?
done
if [ $retries = $max_retries ] ; then
echo "Exit with error while executing $command. Reached max retries (=$max_retries)"
exit 1
fi
echo $cmd_output
}
sudo sh -c 'docker daemon -H unix:///var/run/docker-bootstrap.sock -p /var/run/docker-bootstrap.pid --iptables=false --ip-masq=false --bridge=none --graph=/var/lib/docker-bootstrap 2> /var/log/docker-bootstrap.log 1> /dev/null &'
execute_and_wait "sudo pgrep -f 'docker daemon -H unix:///var/run/docker-bootstrap.sock'" |
<reponame>bontempos/Game
package bontempos.Game.Act;
import processing.core.PApplet;
import java.util.ArrayList;
import java.util.Collections;
/**
* Action Constants Class to support Countdown and Action classes
*/
public class AConstants {
private static AConstants instance;
ArrayList<Action> events;
ArrayList<Countdown> timers;
ArrayList<Checker> checkers;
ArrayList<ActionList> actionLists;
static PApplet papplet;
//private static Game game;
private AConstants() {
events = new ArrayList<Action>();
timers = new ArrayList<Countdown>();
checkers = new ArrayList<Checker>();
actionLists = new ArrayList<ActionList>();
}
//////////////////////////////////////////////////// GETTERS
public static AConstants get() {
if (instance == null) {
instance = new AConstants();
}
return instance;
}
public ArrayList<Countdown> getTimers(){
return timers;
}
public ArrayList<Action> getActions(){
return events;
}
public ArrayList<Checker> getCheckers(){
return checkers;
}
public ArrayList<ActionList> getActionLists(){
return actionLists;
}
//////////////////////////////////////////////////// SETTERS
public static void setPApplet( PApplet p){
papplet = p;
}
public int addTimer(Countdown t){
timers.add(t);
//System.out.println("timer added");
return timers.size();
}
public int addAction(Action e){
events.add(e);
//System.out.println("action added");
return events.size();
}
public int addChecker(Checker c){
checkers.add(c);
//System.out.println("checker added");
return checkers.size();
}
public int addActionList(ActionList l){
actionLists.add(l);
//System.out.println("action list added");
return actionLists.size();
}
//////////////////////////////////////////////////// UPDATE
public void updateLists(){
if(!timers.isEmpty()){
//there is a problem in array name update with timer and checker.
for(int i = timers.size()-1; i >= 0; i--){
Countdown t = timers.get(i);
//System.out.println("update list timer:" + t.toInvoke.method);
t.update();
}
}
if(!events.isEmpty()){
for(int i = events.size()-1; i >= 0; i--){
Action e = events.get(i);
if(e.isActive()) {
e.eval();
}else{
if(e.autoRemove) events.remove(e);
}
}
}
if(!actionLists.isEmpty()){
for(int i = actionLists.size()-1; i>=0; i--){
ActionList L = actionLists.get(i);
L.update();
}
}
}
//////////////////////////////////////////////////// METHODS
/*
* checks if an action (name) exists in the list of actions and returns true if yes, plus executes the action
*/
public boolean check(String actionName){
Collections.sort(checkers);
//System.out.print("checking if action: "+ actionName + " exists in checking list");
for(Checker c : checkers){
if(c.actionName.equals(actionName)){
if(c.isActive() && ( c.permanent || !c.isChecked() )){
c.checked = true;
c.dispatchAction();
//System.out.println("...it exists!");
return true;
}
}
}
//System.out.println("...does not exist!");
return false;
}
} |
<gh_stars>0
angular.module('betterTimetable')
.factory('CourseTemplateSrv', function(UISrv, $compile, DataTimeSrv) {
var _maxSubstring = UISrv.getMaxSubString();
var _dayProps = DataTimeSrv.getDayProps();
var _gridProps = UISrv.getGridProps();
var _setTimetableGrid = function(groupedCourses){
var timetable = $("#timetable");
timetable.empty();
var howManyNotEmpty = _howManyNotEmpty(groupedCourses);
var unusableSpace = _gridProps.columnNumber % howManyNotEmpty ;
var dayWidth = (_gridProps.columnNumber - unusableSpace) / howManyNotEmpty;
var offset = Math.floor(unusableSpace / _gridProps.offset);
for(var i = 0; i < _dayProps.daysWithinWeek; i++){
var isEmpty = _isEmpty(groupedCourses[i]);
if(isEmpty) {
continue;
}
//CONSTRUCT COLUMN
var template = "<div id='{@id}' class='col s12 m12 l{@w} {@offset}'></div>";
template = template.replace("{@id}", i);
template = template.replace("{@w}", dayWidth);
if(i === 0 && offset !== 0){
template = template.replace("{@offset}", "offset-l" + offset);
} else {
template = template.replace("{@offset}", "");
}
var column =$(template);
//ADD HEADER
var header = _getHeader(i);
column.append("<div class='row margin-top-10 center' ><b>" + header + "</b></div>");
//ADD EMPTY CELL
for(var j = 0; j < _dayProps.quartersWithinDay; j++){
column.append("<div class='row break reset-margin timetable'></br></div>");
}
timetable.append(column);
}
}
var _selectProperRow = function(singleCourse, dayNumber, lastWithinDay, scope, theFirst, theLast){
var dayColumn = $("#timetable").find("div#" + dayNumber);
var dayRows = dayColumn.find("div.row.timetable");
if(singleCourse !== undefined && singleCourse !== null && !singleCourse.hidden){
var duration = singleCourse.duration.seconds;
var courseBeginning = DataTimeSrv.getCourseDataTime(singleCourse);
var dayBeginning = DataTimeSrv.getDayBeginning(courseBeginning);
var diff = courseBeginning - dayBeginning; //in mili
var howManyRowsToSelect = duration / _dayProps.secondsInQuarter;
var offset = diff === 0 ? 0 :(diff / 1000 )/ _dayProps.secondsInQuarter;
for(var i = offset; i < offset + howManyRowsToSelect; i++){
//detect collision
var collision = _detectCollision(dayRows[i]);
if(i === offset + 1){
_setHeader(dayRows[i], singleCourse, scope);
} else if (i === offset + 2) {
_setHours(dayRows[i], singleCourse, courseBeginning, scope);
} else if (i === offset + 3) {
_setRoom(dayRows[i], singleCourse, scope);
} else if (i === offset + howManyRowsToSelect - 2) {
_setLecturers(dayRows[i], singleCourse, scope);
} else {
var addClass = _getAdditionalClass(i, offset, howManyRowsToSelect);
_fillCourse(dayRows[i], singleCourse, addClass, scope);
}
if(collision){
var addClass = "border-top";
_setSwitchButton(dayRows[offset], singleCourse, scope, addClass);
}
}
}
var mobile = UISrv.isMobile();
if(lastWithinDay && !mobile) { //remove unnecessary cells
var firstBegining = DataTimeSrv.getCourseDataTime(theFirst);
var lastBegining = DataTimeSrv.getCourseDataTime(theLast);
var diff = DataTimeSrv.getCourseTime(firstBegining) - _dayProps.dayBeginning;
var offset = diff === 0 ? 0 :(diff / 1000 )/ _dayProps.secondsInQuarter;
for(var i = 0; i < offset; i++){
$(dayRows[i]).remove();
}
var lastDiff = DataTimeSrv.getCourseTime(lastBegining) - _dayProps.dayBeginning;
var lastRow = theLast.duration.seconds / _dayProps.secondsInQuarter;
var lastOffset = lastDiff === 0 ? 0 :(lastDiff / 1000 )/ _dayProps.secondsInQuarter;
for(var j = lastOffset + lastRow; j < _dayProps.quartersWithinDay; j++){
$(dayRows[j]).remove();
}
} else if (lastWithinDay && mobile) {
var index = 0;
while(true){
if($(dayRows[index]).hasClass("row break reset-margin timetable")){
$(dayRows[index]).remove();
} else {
break;
}
++index;
}
for(var i = offset + howManyRowsToSelect; i < _dayProps.quartersWithinDay; i++){
$(dayRows[i]).remove();
}
}
}
var _getHeader = function(dayNumber){
return DataTimeSrv.getDayName(dayNumber);
}
var _getAdditionalClass = function (i, offset, howManyRowsToSelect){
var addClass = '';
if(i === offset){
addClass = 'border-top';
} else if(i === offset + howManyRowsToSelect - 1) {
addClass = 'border-bottom';
}
return addClass;
}
var _fillCourse = function(processingRow, course, addClass, scope){
var color = UISrv.getColor({courseType : course.courseType});
$(processingRow).empty(); //reset cell
$(processingRow).attr("class", "row timetable reset-margin course " + color + " " + addClass);
var element = "<span ng-click='getDetails(" + JSON.stringify(course) + ")'><p class='center reset-margin transparent-text'>" + " " + "</p></span>";
var compiledElement = $compile(element)(scope);
$(processingRow).append(compiledElement);
$(processingRow).attr("ng-click", 'getDetails(' + JSON.stringify(course) + ')');
}
var _setHeader = function(processingRow, course, scope){
var color = UISrv.getColor({courseType : course.courseType});
$(processingRow).empty(); //reset cell
$(processingRow).attr("class", "row timetable reset-margin course " + color);
if(_isEmptyString(course.name.shortName)){
var element = "<p ng-click='getDetails(" + JSON.stringify(course) + ")' class='center reset-margin transparent-text'>" + " " + "</p>";
} else {
var element = "<p ng-click='getDetails(" + JSON.stringify(course) + ")' class='center reset-margin'><span>" + _getMaxSubstring(course.name.shortName)+ "</span></p>";
}
var compiledElement = $compile(element)(scope);
$(processingRow).append(compiledElement);
}
var _setHours = function(processingRow, course, beginCourseDate, scope){
var color = UISrv.getColor({courseType : course.courseType});
$(processingRow).empty(); //reset cell
$(processingRow).attr("class", "row timetable reset-margin course " + color);
var startTxt = beginCourseDate.getHours() + ":" + (beginCourseDate.getMinutes() === 0 ? "00" : beginCourseDate.getMinutes());
var end = new Date(beginCourseDate);
end.setTime(beginCourseDate.getTime() + (course.duration.seconds * 1000));
var endTxt = end.getHours() + ":" + (end.getMinutes() === 0 ? "00" : end.getMinutes());
var element = "<p ng-click='getDetails(" + JSON.stringify(course) + ")' class='center reset-margin'><span>" + startTxt + " - " + endTxt + "</span></p>";
var compiledElement = $compile(element)(scope);
$(processingRow).append(compiledElement);
}
var _setRoom = function(processingRow, course, scope){
var color = UISrv.getColor({courseType : course.courseType});
$(processingRow).empty(); //reset cell
$(processingRow).attr("class", "row timetable reset-margin course " + color);
var rooms = "";
course.classrooms.forEach(function(currentValue, index){
if(index < course.classrooms.length - 1){
rooms += currentValue.room + ", ";
} else {
rooms += currentValue.room;
}
})
if(_isEmptyString(rooms)){
var element = "<p ng-click='getDetails(" + JSON.stringify(course) + ")' class='center reset-margin transparent-text'><span)'>" + " " + "</span></p>";
} else {
var element = "<p ng-click='getDetails(" + JSON.stringify(course) + ")' class='center reset-margin'><span>" + _getMaxSubstring(rooms) + "</span></p>";
}
var compiledElement = $compile(element)(scope);
$(processingRow).append(compiledElement);
}
var _setLecturers = function(processingRow, course, scope){
var color = UISrv.getColor({courseType : course.courseType});
$(processingRow).empty(); //reset cell
$(processingRow).attr("class", "row timetable reset-margin course " + color);
var lecturers = "";
course.lecturers.forEach(function(currentValue, index){
if(index < course.lecturers.length - 1){
lecturers += currentValue.shortName + ", ";
} else {
lecturers += currentValue.shortName;
}
})
if(_isEmptyString(lecturers)){
var element = "<p ng-click='getDetails(" + JSON.stringify(course) + ")' class='center reset-margin transparent-text'><span>" + " " + "</span></p>";
} else {
var element = "<p ng-click='getDetails(" + JSON.stringify(course) + ")' class='center reset-margin'><span>" + _getMaxSubstring(lecturers) + "</span></p>";
}
var compiledElement = $compile(element)(scope);
$(processingRow).append(compiledElement);
}
var _setSwitchButton = function (processingRow, course, scope, addClass) {
var color = UISrv.getColor({courseType : course.courseType});
$(processingRow).empty(); //reset cell
$(processingRow).attr("class", "row timetable reset-margin course " + color + " " + addClass);
var element = "<p class='reset-margin right'><a href='' ng-click='hide(" + JSON.stringify(course) +")'><i class='material-icons switch-course-button icon-white'>arrow_drop_down</i></a></p>";
var compiledElement = $compile(element)(scope);
$(processingRow).append(compiledElement);
}
var _isEmpty = function (groupedCourses) {
if(groupedCourses === null || groupedCourses === undefined || groupedCourses.length == 0){
return true;
}
return false;
}
var _isEmptyString = function (string) {
return (string.length === 0 || !string.trim());
}
var _howManyNotEmpty = function (groupedCourses) {
var howMany = 0;
for(var i = 0; i < 7; i++){
if(groupedCourses[i] !== null && groupedCourses[i] !== undefined && groupedCourses[i].length > 0){
++howMany;
}
}
return howMany;
}
var _getMaxSubstring = function(string){
return string.substring(0, _maxSubstring);
}
var _detectCollision = function(row){
var hasCourseClass = $(row).hasClass( "course");
return hasCourseClass;
}
var _resetStartingCells = function(){
for(var i = 0; i < 7; i++){
var dayColumn = $("#timetable").find("div#" + i);
var dayRows = dayColumn.find("div.row.timetable");
for(var j = 0; j < dayRows.length; j++){
if($(dayRows[j]).hasClass('row break reset-margin timetable')){
$(dayRows[j]).hide();
}
}
}
}
return {
setTimetableGrid : _setTimetableGrid,
isEmpty : _isEmpty,
selectProperRow : _selectProperRow,
resetStartingCells : _resetStartingCells
}
}); |
<filename>cmd/compose/fake_parser_test.go
package compose
import (
"errors"
"testing"
)
func TestFakeParser(t *testing.T) {
var err error
f := &FakeParser{}
f.MockLoadError = errors.New("load error")
err = f.Load("compose")
if err == nil {
t.Error("expecting error on Load, got none")
} else if err.Error() != "load error" {
t.Errorf("expecting error 'load error' on Load, got %v", err)
}
if val, ok := f.CalledLoad["compose"]; !ok || !val {
t.Error("failed calling Load")
}
f.MockSetServiceError = errors.New("set service error")
err = f.SetService("service", "content")
if err == nil {
t.Error("expecting error on SetService, got none")
} else if err.Error() != "set service error" {
t.Errorf("expecting error 'set service error' on SetService, got %v", err)
}
if val, ok := f.CalledSetService["service"]["content"]; !ok || !val {
t.Error("failed calling SetService")
}
f.RemoveService("service")
if val, ok := f.CalledRemoveService["service"]; !ok || !val {
t.Error("failed calling RemoveService")
}
f.RemoveVolume("volume")
if val, ok := f.CalledRemoveVolume["volume"]; !ok || !val {
t.Error("failed calling RemoveVolume")
}
f.MockStringError = errors.New("string error")
_, err = f.String()
if err == nil {
t.Error("expecting error on String, got none")
} else if err.Error() != "string error" {
t.Errorf("expecting error 'string error' on String, got %v", err)
}
if !f.CalledString {
t.Error("failed calling String")
}
}
|
//
// INFViewLayout.h
// INFView
//
// Created by Alexander on 2/9/18.
// Copyright © 2018 Alexander. All rights reserved.
//
#import <Foundation/Foundation.h>
#import "INFLayoutViewInfo.h"
#import "INFOrientation.h"
@interface INFViewLayout : NSObject
@property (nonatomic) INFOrientation orientation;
@property (nonatomic) CGSize scrollViewSize;
@property (nonatomic) CGPoint contentOffset;
@property (nonatomic) CGSize contentSize;
@property (strong, nonatomic) NSArray<INFLayoutViewInfo*>* viewsLayoutInfo;
@property (nonatomic) NSRange leadingViewsRange;
@property (nonatomic) NSRange trailingViewsRange;
@property (nonatomic) BOOL canHaveInfiniteScrolling;
- (CGFloat)getContentOffsetPosition;
- (void)setContentOffsetPosition:(CGFloat)position;
- (void)setContentLength:(CGFloat)contentLength;
- (CGFloat)lengthOfViewsInRange:(NSRange)range;
- (CGFloat)lengthOfAllViews;
- (CGFloat)lengthOfLeadingViews;
- (CGFloat)lengthOfTrailingViews;
- (void)moveViewsInRange:(NSRange)range position:(CGFloat)position;
- (void)shiftViewsWithOffset:(CGFloat)offset;
- (void)setAccurateSize:(CGSize)size forViewAtIndex:(NSInteger)index;
- (NSArray<INFLayoutViewInfo*>*)getViewsInAreaFrom:(CGFloat)startPosition to:(CGFloat)endPosition;
- (NSArray<INFLayoutViewInfo*>*)getViewsInVisibleArea;
@end
|
<filename>imageEnhancement.py
# image enhancement for blood cell smear images
# author: <NAME>
#
#import libraries
#from matplotlib import pyplot as plt
import numpy as np
import cv2
import matplotlib.pyplot as plt
import matplotlib.image as mpimg
import matplotlib.cm as cm
#read original image
image = cv2.imread("c1.png")
#convet to gray scale image
gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
cv2.imwrite('gray.png',gray)
#apply median filter for smoothning
blurM = cv2.medianBlur(gray, 5)
cv2.imwrite('blurM.png',blurM)
#apply gaussian filter for smoothning
blurG = cv2.GaussianBlur(gray,(9,9), 0)
cv2.imwrite('blurG.png',blurG)
#histogram equalization
histoNorm = cv2.equalizeHist(gray)
cv2.imwrite('histoNorm.png',histoNorm)
# create a CLAHE object for Contrast Limited Adaptive Histogram Equalization (CLAHE)
clahe = cv2.createCLAHE(clipLimit=2.0, tileGridSize=(8,8))
claheNorm = clahe.apply(gray)
cv2.imwrite('claheNorm.png',claheNorm)
#contrast stretching
# Function to map each intensity level to output intensity level.
def pixelVal(pix, r1, s1, r2, s2):
if (0 <= pix and pix <= r1):
return (s1 / r1)*pix
elif (r1 < pix and pix <= r2):
return ((s2 - s1)/(r2 - r1)) * (pix - r1) + s1
else:
return ((255 - s2)/(255 - r2)) * (pix - r2) + s2
# Define parameters.
r1 = 70
s1 = 0
r2 = 200
s2 = 255
# Vectorize the function to apply it to each value in the Numpy array.
pixelVal_vec = np.vectorize(pixelVal)
# Apply contrast stretching.
contrast_stretched = pixelVal_vec(gray, r1, s1, r2, s2)
contrast_stretched_blurM = pixelVal_vec(blurM, r1, s1, r2, s2)
# Save edited images
cv2.imwrite('contrast_stretch.png', contrast_stretched)
cv2.imwrite('contrast_stretch_blurM.png', contrast_stretched_blurM)
#edge detection using canny edge detector
edge = cv2.Canny(gray,100,200)
cv2.imwrite('edge.png',edge)
edgeG = cv2.Canny(blurG,100,200)
cv2.imwrite('edgeG.png',edgeG)
edgeM = cv2.Canny(blurM,100,200)
cv2.imwrite('edgeM.png',edgeM)
'''
#display
fig = plt.figure()
a = fig.add_subplot(3, 3, 1)
imgplot = plt.imshow(gray, cmap='Greys_r')
a.set_title('a. Gray Scale Image')
a = fig.add_subplot(3, 3, 2)
imgplot = plt.imshow(blurM, cmap='Greys_r')
a.set_title('b. Median Filtered Image')
plt.show()
'''
|
/** @ignore *//** */
import * as Test from './Test'
import * as A from './Any/_api'
import * as B from './Boolean/_api'
import * as C from './Class/_api'
import * as F from './Function/_api'
import * as I from './Iteration/_api'
import * as M from './Misc/_api'
import * as N from './Number/_api'
import * as O from './Object/_api'
import * as S from './String/_api'
import * as T from './List/_api'
import * as L from './List/_api'
import * as U from './Union/_api'
import * as Any from './Any/_api'
import * as Boolean from './Boolean/_api'
import * as Class from './Class/_api'
import * as Function from './Function/_api'
import * as Iteration from './Iteration/_api'
import * as Misc from './Misc/_api'
import * as Number from './Number/_api'
import * as Object from './Object/_api'
import * as String from './String/_api'
import * as Tuple from './List/_api'
import * as List from './List/_api'
import * as Union from './Union/_api'
export {
Test,
A,
Any,
B,
Boolean,
C,
Class,
F,
Function,
I,
Iteration,
L,
List,
M,
Misc,
N,
Number,
O,
Object,
S,
String,
T,
Tuple,
U,
Union,
}
// ///////////////////////////////////////////////////////////////////////////////////////
// NOTES /////////////////////////////////////////////////////////////////////////////////
// ///////////////////////////////////////////////////////////////////////////////////////
// RULES /////////////////////////////////////////////////////////////////////////////////
// I regularly check that the project is respecting the following rules
// ---------------------------------------------------------------------------------------
// 1. Better computations
//
// search for `= \{\n?[ ]*?\[(.*?\n)*?\}` and add `& {}` for better computation
// ! we can only do this if the mapped type is not intended to go deep (recurse)
// ! because `& {}` forces computation, if we do it deeply => resolves to `any`
// ! this happens only when a type is nested within itself => infinite recursion
// ---------------------------------------------------------------------------------------
// 2. Avoid fall-through `never`
//
// do not forget to NOT do `X extends never` => do `[X] extends [never]`
// if the goal is to explicitly match `never` & not distribute the type
// ---------------------------------------------------------------------------------------
// 3. Ensure type distribution
//
// There are three families of types that do not distribute well (at all)
// - types that make use of `keyof`. `keyof` is a distribution breaker. search for `(?<! in )keyof(?! any)`
// - recursive iteration types, the ones that are of the `Concat` form. search for `(?<!\?)\n.*?extends infer X`
// (this happens because this is an unsupported feature, it's neither `extends` nor a mapped type)
// (it has the effect of not distributing/aggregate unions with `At`/`[]`, we must do it manually)
// - types that are used to compute keys that match certain conditions. search for `}[Keys<` | `}[keyof`
//
// => In those cases, we do the distributution manually by inserting `<type> extends unknown ? ... : never`
// => `keyof` statements are ok and can be used if they're distributed. search for `extends unknown ?`
// => Remember that simple mapped types distribute well over unions and preserve them (no problem)
//
// => For recursive types that re-use each other, we MUST NOT use the distributed version since they all do it
// We must import the version of the type that is named `type __<name>`. This is the non-distributed version
// (otherwise, we would distribute over something that is already distributed (pointless, it uses resources))
//
// => And if you wonder what the `type _<name>` means, it's a "step" in the implementation (bare implementation)
// ///////////////////////////////////////////////////////////////////////////////////////
// TODO //////////////////////////////////////////////////////////////////////////////////
|
import json
def validate_json_string(s):
try:
json_obj = json.loads(s)
return True
except json.JSONDecodeError:
return False |
<reponame>Gesserok/Market<gh_stars>0
package utils;
import java.util.UUID;
public class TerminalUtils {
public static long longIdGenerator(){
return UUID.randomUUID().getMostSignificantBits();
}
}
|
<reponame>whardier/serverless-plugin-ssm-parameterize
'use strict';
const sleep = async (wait) => new Promise((resolve) => setTimeout(() => resolve(), wait));
module.exports = { sleep };
|
<reponame>devilry/devilry-django
import re
from devilry.devilry_account.cradminextensions import devilry_crmenu_account
from devilry.devilry_account.crapps import account
from devilry.devilry_cradmin import devilry_crinstance
class Menu(devilry_crmenu_account.Menu):
def build_menu(self):
super(Menu, self).build_menu()
self.add_role_menuitem_object(active=True)
class CrAdminInstance(devilry_crinstance.BaseDevilryCrInstance):
menuclass = Menu
roleclass = None
id = 'devilry_account'
rolefrontpage_appname = 'account'
flatten_rolefrontpage_url = True
apps = [
('account', account.App),
]
def get_devilryrole_type(self):
return None
def has_access(self):
"""
We give any user access to this instance, including unauthenticated users.
"""
return self.request.user.is_authenticated
@classmethod
def matches_urlpath(cls, urlpath):
return re.match('^/account/.*$', urlpath)
|
<filename>gis-regression-analysis-core/src/main/java/com/katus/data/SimpleRecord.java
package com.katus.data;
import com.katus.exception.InvalidParamException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author <NAME>
* @version 1.0, 2021-10-07
*/
public class SimpleRecord extends AbstractRecord<String> {
private static final Logger logger = LoggerFactory.getLogger(SimpleRecord.class);
protected final String SEPARATOR;
public SimpleRecord(String str, String sep) {
super();
this.SEPARATOR = sep;
init(str);
}
@Override
public double[] load(String s) {
String[] items = s.split(SEPARATOR);
if (items.length < 1) {
logger.error("item length is too short");
throw new InvalidParamException();
}
double[] data = new double[items.length];
for (int i = 0; i < data.length; i++) {
data[i] = Double.parseDouble(items[i]);
}
return data;
}
@Override
public String put() {
StringBuilder sb = new StringBuilder();
sb.append(y);
for (int i = 0; i < xSize(); i++) {
sb.append(SEPARATOR).append(x(i));
}
return sb.toString();
}
}
|
<reponame>klimesf/a4m36sep-semestral-project
package cz.cvut.fel.sep.klimefi1.semestral.ws.customerDatabase;
import java.util.ArrayList;
import java.util.List;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for CustomerDetailType complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="CustomerDetailType">
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="firstName" type="{http://www.w3.org/2001/XMLSchema}string" maxOccurs="5"/>
* <element name="surname" type="{http://www.w3.org/2001/XMLSchema}string" maxOccurs="5"/>
* <element name="address" type="{http://www.cvut.cz/FEL/}AddressType" maxOccurs="3"/>
* <element name="phoneNum" type="{http://www.cvut.cz/FEL/}PhoneType" maxOccurs="3"/>
* <element name="birthNum" type="{http://www.w3.org/2001/XMLSchema}string"/>
* <element name="countryOfOrigin" type="{http://www.w3.org/2001/XMLSchema}string"/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "CustomerDetailType", propOrder = {
"firstName",
"surname",
"address",
"phoneNum",
"birthNum",
"countryOfOrigin"
})
public class CustomerDetailType {
@XmlElement(required = true)
protected List<String> firstName;
@XmlElement(required = true)
protected List<String> surname;
@XmlElement(required = true)
protected List<AddressType> address;
@XmlElement(required = true)
protected List<PhoneType> phoneNum;
@XmlElement(required = true)
protected String birthNum;
@XmlElement(required = true)
protected String countryOfOrigin;
/**
* Gets the value of the firstName property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the firstName property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getFirstName().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link String }
*
*
*/
public List<String> getFirstName() {
if (firstName == null) {
firstName = new ArrayList<String>();
}
return this.firstName;
}
/**
* Gets the value of the surname property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the surname property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getSurname().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link String }
*
*
*/
public List<String> getSurname() {
if (surname == null) {
surname = new ArrayList<String>();
}
return this.surname;
}
/**
* Gets the value of the address property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the address property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getAddress().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link AddressType }
*
*
*/
public List<AddressType> getAddress() {
if (address == null) {
address = new ArrayList<AddressType>();
}
return this.address;
}
/**
* Gets the value of the phoneNum property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the phoneNum property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getPhoneNum().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link PhoneType }
*
*
*/
public List<PhoneType> getPhoneNum() {
if (phoneNum == null) {
phoneNum = new ArrayList<PhoneType>();
}
return this.phoneNum;
}
/**
* Gets the value of the birthNum property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getBirthNum() {
return birthNum;
}
/**
* Sets the value of the birthNum property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setBirthNum(String value) {
this.birthNum = value;
}
/**
* Gets the value of the countryOfOrigin property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getCountryOfOrigin() {
return countryOfOrigin;
}
/**
* Sets the value of the countryOfOrigin property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setCountryOfOrigin(String value) {
this.countryOfOrigin = value;
}
}
|
#!/bin/bash
cd /root/software/videojs-flow/demo
./mse
exit
|
require 'rails_helper'
RSpec.describe "Users", type: :request do
describe "#index" do
it "returns success response" do
get "/api/v1/users"
expect(response.status).to eq(200)
end
describe "Pagination" do
before do
51.times do
User.create!(name: Faker::Name.name, email: Faker::Internet.unique.email, phone: Faker::Base.numerify('###-###-####'))
end
end
it "returns success response" do
get "/api/v1/users?page=1"
expect(response.status).to eq(200)
end
context "when requesting first page" do
it "returns paginated data" do
get "/api/v1/users?page=1"
expect(response_body["total_pages"]).to eq 3
expect(response_body["current_page"]).to eq 1
expect(response_body["next_page"]).to eq 2
expect(response_body["previous_page"]).to eq 1
expect(response_body["total_users"]).to eq 51
end
end
context "when requesting second page" do
it "returns paginated data" do
get "/api/v1/users?page=2"
expect(response_body["total_pages"]).to eq 3
expect(response_body["current_page"]).to eq 2
expect(response_body["next_page"]).to eq 3
expect(response_body["previous_page"]).to eq 1
expect(response_body["total_users"]).to eq 51
end
end
end
describe "Sort" do
before do
@a = User.create!(name: "AAA", email: "<EMAIL>", phone: "123-456-7890")
@b = User.create!(name: "BBB", email: "<EMAIL>", phone: "222-456-7890")
@c = User.create!(name: "CCC", email: "<EMAIL>", phone: "333-456-7890")
end
it "returns success response" do
get "/api/v1/users?sort=name"
expect(response.status).to eq(200)
end
context "when sorting by name" do
it "returns sorted asc data" do
get "/api/v1/users?sort=name"
user_names = response_body["users"].map { |user| user["name"] }
expect(user_names).to eq [@a.name, @b.name, @c.name]
end
it "returns sorted desc data" do
get "/api/v1/users?sort=-name"
user_names = response_body["users"].map { |user| user["name"] }
expect(user_names).to eq [@c.name, @b.name, @a.name]
end
end
context "when sorting by email" do
it "returns sorted asc data" do
get "/api/v1/users?sort=email"
user_names = response_body["users"].map { |user| user["email"] }
expect(user_names).to eq [@a.email, @b.email, @c.email]
end
it "returns sorted desc data" do
get "/api/v1/users?sort=-email"
user_names = response_body["users"].map { |user| user["email"] }
expect(user_names).to eq [@c.email, @b.email, @a.email]
end
end
context "when sorting by phone" do
it "returns sorted asc data" do
get "/api/v1/users?sort=phone"
user_names = response_body["users"].map { |user| user["phone"] }
expect(user_names).to eq [@a.phone, @b.phone, @c.phone]
end
it "returns sorted desc data" do
get "/api/v1/users?sort=-phone"
user_names = response_body["users"].map { |user| user["phone"] }
expect(user_names).to eq [@c.phone, @b.phone, @a.phone]
end
end
end
describe "Search" do
before do
@a = User.create!(name: "AAA", email: "<EMAIL>", phone: "123-456-7890")
@b = User.create!(name: "BBB", email: "<EMAIL>", phone: "123-456-7890")
@c = User.create!(name: "CCC", email: "<EMAIL>", phone: "123-456-7890")
end
it "returns success response" do
get "/api/v1/users?search=some-user"
expect(response.status).to eq(200)
end
context "when searching by name" do
it "returns search data" do
get "/api/v1/users?search=aaa"
expect(response_body["users"]).to include @a.as_json
end
end
context "when searching by email" do
it "returns search data" do
get "/api/v1/users?search=example2"
expect(response_body["users"]).to include @b.as_json
end
end
context "when searching by phone" do
it "returns search data" do
get "/api/v1/users?search=123"
expect(response_body["users"]).to include @a.as_json, @b.as_json, @c.as_json
end
end
end
end
describe "#show" do
before do
@show_user = User.create!(name: Faker::Name.name, email: Faker::Internet.unique.email, phone: Faker::Base.numerify('###-###-####'))
end
it "returns success" do
get "/api/v1/users/#{@show_user.id}"
expect(response.status).to eq(200)
end
end
describe "#create" do
context "when successful" do
it "returns success" do
post "/api/v1/users", params: { user: { name: Faker::Name.name, email: Faker::Internet.unique.email, phone: Faker::Base.numerify('###-###-####') }}
expect(response.status).to eq(200)
end
end
context "when error" do
before do
@email = Faker::Internet.unique.email
User.create!(name: Faker::Name.name, email: @email, phone: Faker::Base.numerify('###-###-####'))
end
it "returns error" do
post "/api/v1/users", params: { user: { name: Faker::Name.name, email: @email, phone: Faker::Base.numerify('###-###-####') }}
expect(response.status).to eq(422)
expect(response_body["error"]).to include "email" => ["has already been taken"]
end
end
end
describe "#update" do
before do
@update_user = User.create!(name: Faker::Name.name, email: Faker::Internet.unique.email, phone: Faker::Base.numerify('###-###-####'))
end
context "when successful" do
it "returns success" do
put "/api/v1/users/#{@update_user.id}", params: { user: { name: Faker::Name.name, email: Faker::Internet.unique.email, phone: Faker::Base.numerify('###-###-####') }}
expect(response.status).to eq(200)
end
end
context "when error" do
it "returns error" do
put "/api/v1/users/#{@update_user.id}", params: { user: { name: Faker::Name.name, email: "bad_email", phone: Faker::Base.numerify('###-###-####') }}
expect(response.status).to eq(422)
end
end
end
describe "#destroy" do
before do
@destroy_user = User.create!(name: Faker::Name.name, email: Faker::Internet.unique.email, phone: Faker::Base.numerify('###-###-####'))
end
it "returns no content" do
delete "/api/v1/users/#{@destroy_user.id}"
expect(response.status).to eq(204)
end
end
def response_body
JSON.parse(response.body)
end
end |
#! /bin/bash
cp ../Helper/BiomarkerBenchmark/download.sh .
cp ../Helper/BiomarkerBenchmark/parse.py .
cp ../Helper/BiomarkerBenchmark/parse.sh .
cp ../Helper/BiomarkerBenchmark/cleanup.sh .
sed -e s,{urlExpression},https://osf.io/bu6tk/download?version=4,g ../Helper/BiomarkerBenchmark/download.sh > download.sh
|
package com.example.movielingo.model;
public class MovieFlashCard {
private String movieName;
private boolean isMovie;
private int season;
private int episode;
private String difficulty;
private String sourceLanguage;
private String targetLanguage;
public String getMovieName() {
return movieName;
}
public boolean isMovie() {
return isMovie;
}
public int getSeason() {
return season;
}
public int getEpisode() {
return episode;
}
public String getDifficulty() {
return difficulty;
}
public void setMovieName(String movieName) {
this.movieName = movieName;
}
public String getSourceLanguage() {
return sourceLanguage;
}
public void setSourceLanguage(String sourceLanguage) {
this.sourceLanguage = sourceLanguage;
}
public String getTargetLanguage() {
return targetLanguage;
}
public void setTargetLanguage(String targetLanguage) {
this.targetLanguage = targetLanguage;
}
@Override
public String toString() {
return "MovieFlashCard{" +
"movieName='" + movieName + '\'' +
", isMovie=" + isMovie +
", season=" + season +
", episode=" + episode +
", difficulty='" + difficulty + '\'' +
", sourceLanguage='" + sourceLanguage + '\'' +
", targetLanguage='" + targetLanguage + '\'' +
'}';
}
}
|
#include <iostream>
#include <vector>
#include <algorithm>
using namespace std;
int main() {
vector<int> v = {1, 3, 4, 1, 3, 7, 8, 4};
// Remove duplicates
sort(v.begin(), v.end());
v.erase(unique(v.begin(), v.end()), v.end());
return 0;
} |
#!/bin/bash
DOCKER_ID=$(awk -F/ '{ print $NF }' /proc/1/cpuset)
_CHECK=$(cat <<EOT
{
"check": {
"id": "docker-test-critical",
"name": "docker test critical",
"docker_container_id": "${DOCKER_ID}",
"shell": "/bin/bash",
"args": ["/usr/local/bin/dummy.sh", "failing"],
"interval": "10s"
}
}
EOT
)
echo "${_CHECK}"
|
type UserSettings = {
// Define the structure of UserSettings
// Example:
theme: string;
language: string;
// ... other settings fields
};
type FetchSuccessAction = {
type: typeof SettingsActionTypes.FETCHED;
payload: { settings: UserSettings };
};
type FetchFailedAction = {
type: typeof SettingsActionTypes.FETCH_FAILED;
payload: { error: Error };
};
type UpdateAction = {
type: typeof SettingsActionTypes.UPDATE;
payload: { settings: UserSettings };
};
type SettingsAction = FetchSuccessAction | FetchFailedAction | UpdateAction; |
<gh_stars>0
/**
* @license
* Copyright 2016 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
foam.CLASS({
package: 'foam.u2',
name: 'GoogleValidator',
extends: 'foam.u2.DefaultValidator',
documentation: 'Standard Google Element validator, which is stricter than the default.',
constants: [
DEFAULT_DISALLOWED_NODE_NAMES = {
APPLET: true,
EMBED: true,
META: true,
OBJECT: true,
SCRIPT: true,
STYLE: true,
TEMPLATE: true
}
],
properties: [
[
'disallowedNodeNames',
function() { return this.DEFAULT_DISALLOWED_NODE_NAMES; }
]
],
methods: [
function validateNodeName(name) {
return ! this.disallowedNodeNames[name];
}
]
});
|
import random
from tqdm import trange
class BisectionMethod:
def __init__(self, function, epsilon=1e-6):
self.function = function
self.epsilon = epsilon
def find(self, ends):
"""Finds root within ends.
Args:
ends: a tuple containing the ends of interval where to check for roots.
"""
if self.function(ends[0]) * self.function(ends[1]) > 0:
raise ValueError('Sign of function at both the end points must be opposite.')
a = min(ends)
b = max(ends)
while (b - a > self.epsilon):
mid = (a + b) / 2
if self.function(mid) * self.function(a) > 0:
a = mid
else:
b = mid
return (a + b) / 2
def find_all(self, iterations=100, Range=10):
"""Finds all root of an equation.
Args:
iterations: number of iterations to initialise the ends
Range: range of values used to initialise the ends
"""
roots = []
for _ in trange(iterations):
a = 0
b = 0
while self.function(a) * self.function(b) >= 0:
a = (random.random() - 0.5) * Range
b = (random.random() - 0.5) * Range
a, b = min(a, b), max(a, b)
root = self.find(ends=(a, b))
exists = False
for r in roots:
if abs(r - root) < self.epsilon:
exists = True
if not exists:
roots.append(root)
return roots
if __name__ == "__main__":
solver = BisectionMethod(lambda x: x ** 5 - 25 * x ** 4 + 184 * x ** 3 - 640 * x ** 2 - 2223 * x - 4905, 1e-10)
# solver = BisectionMethod(lambda x: x ** 3 - 6 * (x ** 2) + 11 * x - 6, 1e-8)
print (solver.find((0, 1000)))
# print(solver.find_all(iterations=100))
|
package domaine.dto;
import domaine.bizz.interfaces.BaseEntite;
public interface AnnulationDto extends BaseEntite {
/**
* @return le motif de l'anulation.
*/
String getMotif();
/**
* @return le créateur de l'annulation.
*/
UserDto getCreateur();
/**
* Enregistre le motif de l'annulation.
*
* @param motif - le nouveau motif.
*/
void setMotif(String motif);
/**
* Enregistre le crateur de l'application.
*
* @param createur - le nouveau créateur.
*/
void setCreateur(UserDto createur);
}
|
#!/usr/bin/env sh
set -euf
# shellcheck source=scripts/commands/view.sh
. "${SCRIPT_DIR}/commands/view.sh"
terraform_usage() {
cat <<EOF
helm secrets [ OPTIONS ] terraform <path to file>
Subcommand which is compatible with terraform external data source provider.
https://registry.terraform.io/providers/hashicorp/external/latest/docs/data-sources/data_source
Typical usage:
$ helm secrets terraform secrets/myproject/nginx/secrets.yaml
Example output: {"content_base64":"<base64 coded content of value file>"}
EOF
}
terraform() {
if is_help "$1"; then
terraform_usage
exit 1
fi
if ! content=$(view_helper "$1"); then
exit 1
fi
printf '{"content_base64":"%s"}' "$(printf '%s' "${content}" | base64 | tr -d \\n)"
}
|
#!/bin/bash
echo "$DOCKER_TOKEN" | docker login -u "$DOCKER_USERNAME" --password-stdin
docker build -t scrayorg/grafana-configurator scray-examples/persistent-traffic-data/monitoring/grafana-configurator
docker push scrayorg/grafana-configurator
|
def filter_words(words, letter):
filtered_words = []
for word in words:
if letter not in word:
filtered_words.append(word)
return filtered_words |
'use strict'
const {AUTHENTICATION_ERROR} = require('../../config/errors');
const Factory = require('../../common/classes/factory');
module.exports.requestAuthenticator = () => {
const getTokenFromHeader = (ctx) => {
if (ctx.header && ctx.header.authorization) {
const parts = ctx.header.authorization.split(' ')
if (parts.length === 2) {
const scheme = parts[0]
const credentials = parts[1]
if (/^Bearer$/i.test(scheme)) {
return credentials
}
}
}
}
return async function (ctx, next) {
try {
const token = getTokenFromHeader(ctx)
if (!token) {
return ctx.throw(401, AUTHENTICATION_ERROR)
}
const us = Factory.UserSession(ctx);
const session = await us.FindByAccessToken(token, ctx.request.ip, ctx.request.header['user-agent']);
if (!session) {
return ctx.throw(401, AUTHENTICATION_ERROR)
}
ctx.state = ctx.state || {}
const user = Factory.User(ctx);
ctx.state.user = await user.FindById(session.userId);
if (!ctx.state.user) {
return ctx.throw(401, AUTHENTICATION_ERROR)
}
ctx.user = ctx.user || {}
ctx.user.User = user;
ctx.user.Session = us;
ctx.state.accessToken = token;
return next()
} catch (err) {
ctx.throw(err.status || 500, err.message)
}
}
}
|
<gh_stars>0
package cn.st.test;
import cn.st.domain.User;
import cn.st.mapper.UserMapper;
import com.github.pagehelper.PageHelper;
import com.github.pagehelper.PageInfo;
import org.apache.ibatis.io.Resources;
import org.apache.ibatis.session.SqlSession;
import org.apache.ibatis.session.SqlSessionFactory;
import org.apache.ibatis.session.SqlSessionFactoryBuilder;
import org.junit.Test;
import java.io.IOException;
import java.io.InputStream;
import java.util.Date;
import java.util.List;
/**
* @description:
* @author: st
* @create: 2021-02-07 11:58
**/
public class MyBatisTest {
@Test
public void test3() throws IOException {
InputStream resourceAsStream = Resources.getResourceAsStream("sqlMapConfig.xml");
SqlSessionFactory sqlSessionFactory = new SqlSessionFactoryBuilder().build(resourceAsStream);
SqlSession sqlSession = sqlSessionFactory.openSession();
UserMapper mapper = sqlSession.getMapper(UserMapper.class);
//设置分页相关参数 当前页+每页显示的条数
PageHelper.startPage(3,3);
List<User> userList = mapper.findAll();
for (User user : userList) {
System.out.println(user);
}
//获得与分页相关参数
PageInfo<User> pageInfo = new PageInfo<User>(userList);
System.out.println("当前页:"+pageInfo.getPageNum());
System.out.println("每页显示条数:"+pageInfo.getPageSize());
System.out.println("总条数:"+pageInfo.getTotal());
System.out.println("总页数:"+pageInfo.getPages());
System.out.println("上一页:"+pageInfo.getPrePage());
System.out.println("下一页:"+pageInfo.getNextPage());
System.out.println("是否是第一个:"+pageInfo.isIsFirstPage());
System.out.println("是否是最后一个:"+pageInfo.isIsLastPage());
sqlSession.close();
}
@Test
public void test2() throws IOException {
InputStream resourceAsStream = Resources.getResourceAsStream("sqlMapConfig.xml");
SqlSessionFactory sqlSessionFactory = new SqlSessionFactoryBuilder().build(resourceAsStream);
SqlSession sqlSession = sqlSessionFactory.openSession();
UserMapper mapper = sqlSession.getMapper(UserMapper.class);
User user = mapper.findById(23);
System.out.println("user中的birthday:"+user.getBirthday());
sqlSession.commit();
sqlSession.close();
}
@Test
public void test1() throws IOException {
InputStream resourceAsStream = Resources.getResourceAsStream("sqlMapConfig.xml");
SqlSessionFactory sqlSessionFactory = new SqlSessionFactoryBuilder().build(resourceAsStream);
SqlSession sqlSession = sqlSessionFactory.openSession();
UserMapper mapper = sqlSession.getMapper(UserMapper.class);
//创建user
User user = new User();
user.setUsername("aaaceshi");
user.setPassword("<PASSWORD>");
user.setBirthday(new Date());
//执行保存造作
mapper.save(user);
sqlSession.commit();
sqlSession.close();
}
}
|
SELECT *
FROM table1
INNER JOIN table2
ON table1.col1 = table2.col1
AND table1.col2 = 'value'; |
import WritingForm from './views/components/WritingForm.js'
import LinkList from './views/components/LinkList.js'
import About from './views/pages/About.js'
const Routes = {
'/' : WritingForm
, '/links' : LinkList
, '/about' : About
};
export default Routes; |
echo 'coctohug-flax web_launch working'
npm start --prefix /coctohug/web |
<gh_stars>1-10
#include "duckdb/parser/parsed_expression.hpp"
#include "duckdb/parser/transformer.hpp"
using namespace duckdb;
using namespace std;
// FIXME: what is the difference between GroupBy and expression list?
bool Transformer::TransformGroupBy(PGList *group, vector<unique_ptr<ParsedExpression>> &result) {
if (!group) {
return false;
}
for (auto node = group->head; node != nullptr; node = node->next) {
auto n = reinterpret_cast<PGNode *>(node->data.ptr_value);
result.push_back(TransformExpression(n));
}
return true;
}
|
<filename>Apps/HightechAngular.Web/ClientApp/src/app/shared/modules/builders/models/schema.ts
import {FieldInfo} from './field-info';
export class Schema {
public fields: FieldInfo[]
}
|
// pre deploy commands...
|
def is_contained(list_a, list_b):
for x in list_b:
if x not in list_a:
return False
return True |
#!/usr/bin/env bash
GITDIR="$(mktemp -d)"
GITURL="https://github.com/vim/vim"
#################################################################################
# setup exit handler
#################################################################################
onexit() {
echo "Script is terminating -- cleaning up"
rm -rf "$GITDIR"
exit
}
trap onexit EXIT
trap '' INT TERM # Ignore SigINT and SigTERM
sudo apt-get update
sudo apt-get install -y liblua5.1-0-dev luajit libluajit-5.1-dev build-essential
git clone "$GITURL" "$GITDIR"
cd "$GITDIR"
export LC_ALL=C
export LANG=C
./configure --with-features=huge --enable-rubyinterp --enable-luainterp --with-luajit
make -j4
sudo make install
cd "$HOME"
rm -rf "$GITDIR"
|
// 地灾菜单
const dizaiMenus = [
{
path: "/dzMap",
component: "Layout",
name: "DzMap",
title: "综合监视",
redirect: "/dzMap/dzMap",
meta: { title: "综合监视", icon: "dc-zhjs" },
children: [
{
path: "dzMap",
component: "dz/monitor/index",
name: "DzMapChild",
title: "综合监视",
meta: { title: "综合监视", icon: "tp-zhjs" }
}
]
},
// 系统管理
{
path: "/system",
component: "Layout",
redirect: "/system/user",
name: "UtilSystem",
alwaysShow: true,
title: "系统管理",
meta: {
title: "系统管理",
icon: "hzz_xtgl"
},
children: [
{
path: "user",
component: "system/user/index",
name: "SystemUser",
title: "用户管理",
meta: { title: "用户管理" }
},
{
path: "role",
component: "system/role/index",
name: "SystemRole",
title: "角色管理",
meta: { title: "角色管理" }
},
{
path: "menu",
component: "/system/menu/index",
name: "SystemMenu",
title: "菜单管理",
meta: { title: "菜单管理" }
},
{
path: "dict",
component: "system/dict/index",
name: "SystemDict",
title: "字典管理",
meta: { title: "字典管理" }
},
{
path: "shp",
component: "system/shp/index",
name: "Systemshp",
title: "图层管理",
meta: { title: "图层管理" }
},
{
path: "dept",
component: "system/dept/index",
name: "SystemDept",
title: "部门管理",
meta: { title: "部门管理" }
},
{
path: "associationReservoir",
component: "system/AssociationReservoir/index",
name: "AssociationReservoir",
title: "关联水库",
meta: { title: "关联水库" }
},
{
path: "associationSite",
component: "system/AssociationSite/index",
name: "AssociationSite",
title: "关联站点",
meta: { title: "关联站点" }
}
]
},
// 大屏
{
path: "/dzMap2",
component: "Layout",
redirect: "/dzMap2/dzMap",
name: "DzMapdaping",
title: "大屏",
meta: { title: "大屏", icon: "tp-zhjs" },
children: [
{
path: "dzMap",
component: "DIZAI/monitoring/index",
name: "DzMapChilddaping",
title: "大屏",
meta: { title: "大屏", icon: "tp-zhjs" }
}
]
}
];
// 水库菜单
const skMenus = [
//工作台
{
path: "/workbench",
component: "Layout",
name: "workbench",
title: "工作台",
redirect: "/workbench/workbench",
meta: { title: "工作台", icon: "dc-zhjs" },
children: [
{
path: "workbench",
component: "shuiku/workbench/index",
name: "sk-workbench",
title: "工作台",
meta: { title: "工作台", icon: "tp-zhjs" }
}
]
},
{
path: "/skMap",
component: "Layout",
name: "skMap",
title: "综合调度",
redirect: "/skMap/skMap",
meta: { title: "综合调度", icon: "dc-zhjs" },
children: [
{
path: "skMap",
component: "shuiku/monitor/index",
name: "skMapChild",
title: "水库一张图",
meta: { title: "水库一张图", icon: "tp-zhjs" }
}
]
},
//水库子系统
// {
// path: "/subskMap",
// component: "Layout",
// name: "subskMap",
// title: "实时监控",
// redirect: "/subskMap/subskMap",
// meta: { title: "实时监控", icon: "dc-zhjs" },
// children: [
// {
// path: "subskMap",
// component: "subShuiku/monitor/index",
// name: "subskMapChild",
// title: "水库一张图",
// meta: { title: "水库一张图", icon: "tp-zhjs" }
// }
// ]
// },
{
path: "/damSafety",
component: "Layout",
name: "damSafety",
title: "大坝",
redirect: "/subShuiku/damSafety",
meta: { title: "大坝安全", icon: "dc-zhjs" },
children: [
{
path: "DamMonitoring",
component: "subShuiku/damSafety/DamMonitoring/index",
name: "DamMonitoring",
title: "大坝监测",
meta: { title: "大坝监测", icon: "dc-tzgg" }
},
{
path: "MeasuringPointManagement",
component: "subShuiku/damSafety/MeasuringPointManagement/index",
name: "MeasuringPointManagement",
title: "测点管理",
meta: { title: "测点管理", icon: "dc-tzgg" }
},
{
path: "siteReport-sub",
component: "subShuiku/damSafety/siteReport/index",
name: "siteReport-sub",
title: "测点报表",
meta: { title: "测点管理", icon: "dc-tzgg" }
},
]
},
{
path: "/subvideoSurveillance",
component: "Layout",
redirect: "/subvideoSurveillance",
name: "subvideoSurveillance",
alwaysShow: true,
title: "视频监测",
meta: {
title: "视频监测",
icon: "dc-bg"
},
children: [
{
path: "video",
component: "subShuiku/subvideoSurveillance/video/index",
name: "subvideo",
title: "视频监控",
meta: { title: "视频监控", icon: "dc-tzgg" }
},
{
path: "platform",
component: "subShuiku/subvideoSurveillance/platform/index",
name: "subplatform",
title: "平台配置",
meta: { title: "平台配置", icon: "dc-tzgg" }
},
]
},
//运行管理
{
path: "/operationManagement",
component: "Layout",
redirect: "/operationManagement/inspectionTour",
name: "operationManagement",
alwaysShow: true,
title: "运行管理",
meta: {
title: "运行管理",
icon: "dc-bg"
},
children: [
{
path: "inspectionTour",
component: "shuiku/operationManagement/inspectionTour/index",
name: "inspectionTour-sk",
title: "巡视检查",
meta: { title: "巡视检查", icon: "dc-tzgg" }
},
{
path: "dailyMaintenance",
component: "shuiku/operationManagement/dailyMaintenance/index",
name: "dailyMaintenance-sk",
title: "日常养护",
meta: { title: "日常养护", icon: "dc-tzgg" }
},
{
path: "equipmentOperation",
component: "shuiku/operationManagement/equipmentOperation/index",
name: "equipmentOperation-sk",
title: "设备操作",
meta: { title: "设备操作", icon: "dc-tzgg" }
},
{
path: "dailyInspection",
component: "shuiku/operationManagement/dailyInspection/index",
name: "dailyInspection-sk",
title: "日常检测",
meta: { title: "日常检测", icon: "dc-tzgg" }
},
{
path: "reportRecords",
component: "shuiku/operationManagement/reportRecords/index",
name: "reportRecords-sk",
title: "上报记录",
meta: { title: "上报记录", icon: "dc-tzgg" }
},
{
path: "taskDistribution",
component: "shuiku/operationManagement/taskDistribution/index",
name: "taskDistribution-sk",
title: "任务下发",
meta: { title: "任务下发", icon: "dc-tzgg" }
},
{
path: "taskList",
component: "shuiku/operationManagement/taskList/index",
name: "taskList-sk",
title: "任务列表",
meta: { title: "任务列表", icon: "dc-tzgg" }
},
{
path: "timedTask",
component: "shuiku/operationManagement/timedTask/index",
name: "timedTask-sk",
title: "定时任务",
meta: { title: "定时任务", icon: "dc-tzgg" }
}
]
},
// //运行管理-子系统
// {
// path: "/operationManagement-sub",
// component: "Layout",
// redirect: "/operationManagement-sub/inspectionTour-sub",
// name: "operationManagement-sub",
// alwaysShow: true,
// title: "运行管理",
// meta: {
// title: "运行管理",
// icon: "dc-bg"
// },
// children: [
// {
// path: "inspectionTour-sub",
// component: "subShuiku/operationManagement/inspectionTour/index",
// name: "inspectionTour-sub-sk",
// title: "巡视检查",
// meta: { title: "巡视检查", icon: "dc-tzgg" }
// },
// {
// path: "dailyMaintenance",
// component: "subShuiku/operationManagement/dailyMaintenance/index",
// name: "dailyMaintenance-sub-sk",
// title: "日常养护",
// meta: { title: "日常养护", icon: "dc-tzgg" }
// },
// {
// path: "equipmentOperation",
// component: "subShuiku/operationManagement/equipmentOperation/index",
// name: "equipmentOperation-sub-sk",
// title: "设备操作",
// meta: { title: "设备操作", icon: "dc-tzgg" }
// },
// {
// path: "dailyInspection",
// component: "subShuiku/operationManagement/dailyInspection/index",
// name: "dailyInspection-sub-sk",
// title: "日常检测",
// meta: { title: "日常检测", icon: "dc-tzgg" }
// },
// {
// path: "reportRecords",
// component: "subShuiku/operationManagement/reportRecords/index",
// name: "reportRecords-sub-sk",
// title: "上报记录",
// meta: { title: "上报记录", icon: "dc-tzgg" }
// },
// {
// path: "taskDistribution",
// component: "subShuiku/operationManagement/taskDistribution/index",
// name: "taskDistribution-sub-sk",
// title: "任务下发",
// meta: { title: "任务下发", icon: "dc-tzgg" }
// },
// {
// path: "taskList",
// component: "subShuiku/operationManagement/taskList/index",
// name: "taskList-sub-sk",
// title: "任务列表",
// meta: { title: "任务列表", icon: "dc-tzgg" }
// },
// {
// path: "timedTask",
// component: "subShuiku/operationManagement/timedTask/index",
// name: "timedTask-sub-sk",
// title: "定时任务",
// meta: { title: "定时任务", icon: "dc-tzgg" }
// }
// ]
// },
//水库管理
{
path: "/reservoirManagement",
component: "Layout",
redirect: "/reservoirManagement/listReservoirs",
name: "reservoirManagement",
alwaysShow: true,
title: "水库管理",
meta: {
title: "水库管理",
icon: "dc-bg"
},
children: [
{
path: "listReservoirs",
component: "shuiku/reservoirManagement/listReservoirs/index",
name: "listReservoirs-sk",
title: "水库列表",
meta: { title: "水库列表", icon: "dc-tzgg" }
},
{
path: "reservoirPersonnel",
component: "shuiku/reservoirManagement/reservoirPersonnel/index",
name: "reservoirPersonnel-sk",
title: "水库人员",
meta: { title: "水库人员", icon: "dc-tzgg" }
},
{
path: "maintenancePoint",
component: "shuiku/reservoirManagement/maintenancePoint/index",
name: "maintenancePoint-sk",
title: "维护点位",
meta: { title: "维护点位", icon: "dc-tzgg" }
},
{
path: "maintenanceStandards",
component: "shuiku/reservoirManagement/maintenanceStandards/index",
name: "maintenanceStandards-sk",
title: "维护标准",
meta: { title: "维护标准", icon: "dc-tzgg" }
},
{
path: "monitorProject",
component: "shuiku/reservoirManagement/monitorProject/index",
name: "monitorProject-sk",
title: "监测项目",
meta: { title: "监测项目", icon: "dc-tzgg" }
},
{
path: "engineeringParts",
component: "shuiku/reservoirManagement/engineeringParts/index",
name: "engineeringParts-sk",
title: "工程部位",
meta: { title: "工程部位", icon: "dc-tzgg" }
},
{
path: "siteManagement",
component: "shuiku/reservoirManagement/siteManagement/index",
name: "siteManagement-sk",
title: "站点管理",
meta: { title: "站点管理", icon: "dc-tzgg" }
}
]
},
//视频监控
{
path: "/videoSurveillance",
component: "Layout",
redirect: "/videoSurveillance/videoWatch",
name: "videoSurveillance",
alwaysShow: true,
title: "视频监控",
meta: {
title: "视频监控",
icon: "dc-bg"
},
children: [
{
path: "videoWatch",
component: "shuiku/videoSurveillance/videoWatch/index",
name: "videoWatch-sk",
title: "视频监视",
meta: { title: "视频监视", icon: "dc-tzgg" }
},
{
path: "partformConfig",
component: "shuiku/videoSurveillance/partformConfig/index",
name: "partformConfig-sk",
title: "平台配置",
meta: { title: "平台配置", icon: "dc-tzgg" }
}
]
},
//预警管理
{
path: "/warningManagement",
component: "Layout",
redirect: "/warningManagement/warnInfo",
name: "warningManagement-sk",
alwaysShow: true,
title: "预警管理-sk",
meta: {
title: "预警管理-sk",
icon: "dc-bg"
},
children: [
{
path: "warnInfo",
component: "shuiku/warningManagement/warnInfo/index",
name: "warnInfo-sk",
title: "预警信息",
meta: { title: "预警信息", icon: "dc-tzgg" }
},
{
path: "threshold",
component: "shuiku/warningManagement/threshold/index",
name: "threshold-sk",
title: "预警阈值",
meta: { title: "预警阈值", icon: "dc-tzgg" }
}
]
},
// //预警管理-----子系统
// {
// path: "/warningManagement-sub",
// component: "Layout",
// redirect: "/warningManagement-sub/warnInfo",
// name: "warningManagement-sub-sk",
// alwaysShow: true,
// title: "预警管理-sk",
// meta: {
// title: "预警管理-sk",
// icon: "dc-bg"
// },
// children: [
// {
// path: "warnInfo",
// component: "subShuiku/warningManagement/warnInfo/index",
// name: "warnInfo-sub-sk",
// title: "预警信息",
// meta: { title: "预警信息", icon: "dc-tzgg" }
// },
// {
// path: "threshold",
// component: "subShuiku/warningManagement/threshold/index",
// name: "threshold-sub-sk",
// title: "预警阈值",
// meta: { title: "预警阈值", icon: "dc-tzgg" }
// }
// ]
// },
//水库资产
{
path: "/reservoirAssets",
component: "Layout",
redirect: "/reservoirAssets/listReservoirs",
name: "reservoirAssets",
alwaysShow: true,
title: "水库资产",
meta: {
title: "水库资产",
icon: "dc-bg"
},
children: [
{
path: "reservoirFacilities",
component: "shuiku/reservoirAssets/reservoirFacilities/index",
name: "reservoirFacilities-sk",
title: "感知设备",
meta: { title: "感知设备", icon: "dc-tzgg" }
},
{
path: "trainingRecords",
component: "shuiku/reservoirAssets/trainingRecords/index",
name: "trainingRecords-sk",
title: "培训记录",
meta: { title: "培训记录", icon: "dc-tzgg" }
},
{
path: "perceptionDevice",
component: "shuiku/reservoirAssets/perceptionDevice/index",
name: "perceptionDevice-sk",
title: "防汛物资",
meta: { title: "防汛物资", icon: "dc-tzgg" }
},
{
path: "archives",
component: "shuiku/reservoirAssets/archives/index",
name: "archives-sk",
title: "档案资料",
meta: { title: "档案资料", icon: "dc-tzgg" }
},
{
path: "inspectionBriefing",
component: "shuiku/reservoirAssets/inspectionBriefing/index",
name: "inspectionBriefing-sk",
title: "巡查简报",
meta: { title: "巡查简报", icon: "dc-tzgg" }
}
]
},
// 系统管理
{
path: "/system",
component: "Layout",
redirect: "/system/user",
name: "UtilSystem",
alwaysShow: true,
title: "系统管理",
meta: {
title: "系统管理",
icon: "hzz_xtgl"
},
children: [
{
path: "user",
component: "system/user/index",
name: "SystemUser",
title: "用户管理",
meta: { title: "用户管理" }
},
{
path: "role",
component: "system/role/index",
name: "SystemRole",
title: "角色管理",
meta: { title: "角色管理" }
},
{
path: "menu",
component: "/system/menu/index",
name: "SystemMenu",
title: "菜单管理",
meta: { title: "菜单管理" }
},
{
path: "dict",
component: "system/dict/index",
name: "SystemDict",
title: "字典管理",
meta: { title: "字典管理" }
},
{
path: "shp",
component: "system/shp/index",
name: "Systemshp",
title: "图层管理",
meta: { title: "图层管理" }
},
{
path: "dept",
component: "system/dept/index",
name: "SystemDept",
title: "部门管理",
meta: { title: "部门管理" }
},
{
path: "associationReservoir",
component: "system/AssociationReservoir/index",
name: "AssociationReservoir",
title: "关联水库",
meta: { title: "关联水库" }
},
{
path: "associationSite",
component: "system/AssociationSite/index",
name: "AssociationSite",
title: "关联站点",
meta: { title: "关联站点" }
}
]
},
// 大屏
{
path: "/dzMap2",
component: "Layout",
redirect: "/dzMap2/dzMap",
name: "DzMapdaping",
title: "大屏",
meta: { title: "大屏", icon: "tp-zhjs" },
children: [
{
path: "dzMap",
component: "DIZAI/monitoring/index",
name: "DzMapChilddaping",
title: "大屏",
meta: { title: "大屏", icon: "tp-zhjs" }
}
]
},
//新增-组织机构-系统管理
{
path: "/systemManagement",
component: "Layout",
redirect: "/systemManagement",
name: "SystemManagement",
alwaysShow: true,
title: "系统管理new",
meta: {
title: "系统管理new",
icon: "xtgl"
},
children: [
{
path: "organization",
component: "systemManagement/organization/index",
name: "Organization",
title: "组织机构",
meta: { title: "组织机构" }
},
{
path: "userM",
component: "systemManagement/user/index",
name: "Userm",
title: "用户管理new",
meta: { title: "用户管理new" }
},
{
path: "roleM",
component: "systemManagement/role/index",
name: "Rolem",
title: "角色管理new",
meta: { title: "角色管理new" }
},
{
path: "subsystem",
component: "systemManagement/subsystem/index",
name: "Subsystem",
title: "子系统管理",
meta: { title: "子系统管理" }
},
{
path: "menuM",
component: "/systemManagement/menu/index",
name: "menuM",
title: "菜单管理new",
meta: { title: "菜单管理new" }
}
// {
// path: "dictM",
// component: "systemManagement/dict/index",
// name: "dictM",
// title: "字典管理",
// meta: { title: "字典管理" }
// },
]
}
];
export default [
{
url: "/user/getMenus.*",
type: "get",
response: config => {
return {
status: true,
code: 20000,
autoLogin: true,
// data: dizaiMenus
data: [...dizaiMenus, ...skMenus]
// data: shuikuMenus
// data: [...shuikuMenus,
// ...floodSIM_COMMON]
};
}
}
];
|
<gh_stars>0
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.ic_autorenew_outline = void 0;
var ic_autorenew_outline = {
"viewBox": "0 0 24 24",
"children": [{
"name": "path",
"attribs": {
"d": "M0 0h24v24H0V0z",
"fill": "none"
},
"children": []
}, {
"name": "path",
"attribs": {
"d": "M12 6v3l4-4-4-4v3c-4.42 0-8 3.58-8 8 0 1.57.46 3.03 1.24 4.26L6.7 14.8c-.45-.83-.7-1.79-.7-2.8 0-3.31 2.69-6 6-6zm6.76 1.74L17.3 9.2c.44.84.7 1.79.7 2.8 0 3.31-2.69 6-6 6v-3l-4 4 4 4v-3c4.42 0 8-3.58 8-8 0-1.57-.46-3.03-1.24-4.26z"
},
"children": []
}]
};
exports.ic_autorenew_outline = ic_autorenew_outline; |
#!/bin/bash
echo "Super cool javac wrapper tool is being called!! Hurrah!!"
javac $@
|
#
# Copyright 2021 Merck & Co., Inc. Kenilworth, NJ, USA.
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
#!/bin/bash
eval $(minikube docker-env)
kubectl exec -it $(kubectl get pods | grep dp-postgres | awk '{print $1;}') -c dp-postgres -- psql -U postgres -d rules_of_use
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.