text stringlengths 1 1.05M |
|---|
function isElementVisible(scrollProps: ScrollProperties, elementTop: number, elementBottom: number): boolean {
const containerTop = 0;
const containerBottom = scrollProps.clientHeight;
return (elementTop >= containerTop && elementTop <= containerBottom) || (elementBottom >= containerTop && elementBottom <= containerBottom);
} |
import React, { useRef, useEffect, useState } from 'react';
import { RouteComponentProps } from 'react-router-dom';
import { Tooltip, makeStyles, createStyles } from '@material-ui/core';
import { useTracking } from 'react-tracking';
import { motion } from 'framer-motion';
import { upperCut } from '../../../services/animations';
import { GiftCard, CardConfig } from '../../../services/gift-card.types';
import { wait } from '../../../services/utils';
import { resizeToFitPage } from '../../../services/frame';
import { launchNewTab } from '../../../services/browser';
import { redeemGiftCard, getLatestBalance } from '../../../services/gift-card';
import LineItems from '../../components/line-items/line-items';
import CardHeader from '../../components/card-header/card-header';
import CodeBox from '../../components/code-box/code-box';
import CardMenu from '../../components/card-menu/card-menu';
import ActionButton from '../../components/action-button/action-button';
import { trackComponent } from '../../../services/analytics';
import './card.scss';
const Card: React.FC<RouteComponentProps & {
purchasedGiftCards: GiftCard[];
updateGiftCard: (card: GiftCard) => void;
}> = ({ location, history, purchasedGiftCards, updateGiftCard }) => {
const tracking = useTracking();
const useStyles = makeStyles(() =>
createStyles({
tooltipStyles: {
borderRadius: '6px',
color: 'white',
backgroundColor: '#303133',
maxWidth: 200,
padding: '12px 15px',
fontWeight: 400,
fontSize: '11px',
textAlign: 'center',
top: '10px'
}
})
);
const classes = useStyles();
const tooltipStyles = { tooltip: classes.tooltipStyles };
const ref = useRef<HTMLDivElement>(null);
const mountedRef = useRef(true);
const { card: giftCard, cardConfig } = location.state as { card: GiftCard; cardConfig: CardConfig };
const { invoiceId } = giftCard;
const [card, setCard] = useState(giftCard);
const [fetchingClaimCode, setFetchingClaimCode] = useState(false);
const initiallyArchived = giftCard.archived;
const redeemUrl = `${cardConfig.redeemUrl}${card.claimCode}`;
const launchClaimLink = (): void => {
const url = cardConfig.defaultClaimCodeType === 'link' ? (card.claimLink as string) : redeemUrl;
launchNewTab(url);
tracking.trackEvent({
action: 'clickedRedeemButton',
gaAction: `clickedRedeemButton:${cardConfig.name}`,
brand: cardConfig.name
});
};
const shouldShowRedeemButton = (): boolean => !!(cardConfig.redeemUrl || cardConfig.defaultClaimCodeType === 'link');
const updateCard = async (cardToUpdate: GiftCard): Promise<void> => {
updateGiftCard(cardToUpdate);
setCard(cardToUpdate);
};
const resizeFrame = (paddingBottom = 60): void => {
if (mountedRef.current) resizeToFitPage(ref, paddingBottom);
};
const archive = async (): Promise<void> => {
await updateCard({ ...card, archived: true });
initiallyArchived ? resizeToFitPage(ref, 75, 100) : history.goBack();
tracking.trackEvent({ action: 'archivedGiftCard' });
};
const unarchive = async (): Promise<void> => {
updateGiftCard(card);
resizeFrame();
updateCard({ ...card, archived: false });
tracking.trackEvent({ action: 'unarchivedGiftCard' });
};
const menuItems = ['Edit Balance', card.archived ? 'Unarchive' : 'Archive', 'Help'];
const handleMenuClick = (item: string): void => {
switch (item) {
case 'Edit Balance':
history.push({
pathname: `/card/${card.invoiceId}/balance`,
state: { card, cardConfig, updateType: 'Amount Spent' }
});
break;
case 'Archive':
archive();
break;
case 'Unarchive':
unarchive();
break;
case 'Help':
tracking.trackEvent({ action: 'clickedHelp' });
return launchNewTab('https://bitpay.com/request-help');
default:
console.log('Unknown Menu Option Selected');
}
};
const handleErrorButtonClick = (): void => {
const hasValidPayment = card.invoice && ['paid', 'confirmed', 'complete'].includes(card.invoice.status);
hasValidPayment || !card.invoice ? handleMenuClick('Help') : launchNewTab(card.invoice.url);
};
const redeem = async (): Promise<void> => {
setFetchingClaimCode(true);
const updatedGiftCard = await redeemGiftCard(card);
if (!mountedRef.current) return;
if (updatedGiftCard.status === 'PENDING') {
await wait(700);
}
setFetchingClaimCode(false);
const fullCard = { ...card, ...updatedGiftCard };
await updateCard(fullCard);
resizeFrame();
};
useEffect(() => {
const updatedCard = purchasedGiftCards.find(c => c.invoiceId === invoiceId) as GiftCard;
setCard(updatedCard);
resizeFrame();
}, [purchasedGiftCards, invoiceId]);
useEffect(() => {
resizeFrame();
}, [ref]);
useEffect(() => {
const createdLessThan24HoursAgo = Date.now() - new Date(card.date).getTime() < 1000 * 60 * 60 * 24;
if (card.status === 'PENDING' || (card.status === 'FAILURE' && createdLessThan24HoursAgo)) redeem();
return (): void => {
mountedRef.current = false;
};
// eslint-disable-next-line react-hooks/exhaustive-deps
}, []);
return (
<div className="card-details">
<div className="card-details__content" ref={ref}>
<CardMenu items={menuItems} onClick={handleMenuClick} />
<CardHeader amount={getLatestBalance(card)} cardConfig={cardConfig} card={card} />
<LineItems cardConfig={cardConfig} card={card} />
{card.status === 'SUCCESS' && cardConfig.defaultClaimCodeType !== 'link' && (
<motion.div
variants={upperCut}
custom={1}
animate="visible"
initial="hidden"
className="card-details__content__code-box"
>
<CodeBox label="Claim Code" code={card.claimCode} />
{card.pin && <CodeBox label="Pin" code={card.pin} />}
</motion.div>
)}
{card.status === 'SUCCESS' && !card.archived && shouldShowRedeemButton() && (
<motion.div
variants={upperCut}
custom={2}
animate="visible"
initial="hidden"
className="action-button__footer"
>
<ActionButton onClick={launchClaimLink}>Redeem Now</ActionButton>
</motion.div>
)}
{card.status === 'PENDING' && (
<Tooltip
title="We’ll update your claim code here when your payment confirms"
placement="top"
classes={tooltipStyles}
arrow
>
<motion.div
variants={upperCut}
custom={2}
animate="visible"
initial="hidden"
className="action-button__footer"
>
<ActionButton onClick={redeem} flavor="warn">
{fetchingClaimCode ? (
<>
<img className="action-button__spinner" src="../../assets/icons/spinner-warn.svg" alt="spinner" />
Fetching Claim Code
</>
) : (
<>Pending Confirmation</>
)}
</ActionButton>
</motion.div>
</Tooltip>
)}
{card.status === 'FAILURE' && (
<Tooltip
title="Could not get claim code. Please contact BitPay Support."
placement="top"
classes={tooltipStyles}
arrow
>
<motion.div
variants={upperCut}
custom={2}
animate="visible"
initial="hidden"
className="action-button__footer"
>
<ActionButton onClick={handleErrorButtonClick} flavor="danger">
Something Went Wrong
</ActionButton>
</motion.div>
</Tooltip>
)}
</div>
</div>
);
};
export default trackComponent(Card, { page: 'card' });
|
const DictModel = {
namespace: 'dict',
state: {
dictList: [],
},
reducers: {
refreshDict(state, action) {
//转换为
return { ...state, dictList: action.dictList || [] };
}
},
};
export default DictModel;
|
def insertInAscendingOrder(arr, num):
arr.append(num)
arr.sort()
return arr |
<reponame>pick4er/bookstore<gh_stars>0
import Vue from 'vue';
import App from 'client/App.vue';
import createRouter from 'client/router';
import createStore from 'client/flux';
export default function createApp(initialState) {
const store = createStore(initialState);
const router = createRouter();
const app = new Vue({
store,
router,
render: h => h(App),
});
return {
app,
store,
router,
};
}
|
<filename>tests/manual/pipeline_quality_measures.py<gh_stars>10-100
import argparse
from fmridenoise.interfaces import PipelinesQualityMeasures
from nipype import Node, Workflow
from nipype.interfaces.utility import IdentityInterface
from numpy import array
edges_weight = [
{'24HMP8PhysSpikeReg': array([0.70710678, 0.48339738, 0.70710678, 0.2699669, 0.64224707, 0.70710678])},
{'Null': array([0.70710678, 0.7526871, 0.70710678, 0.6626035, 0.81092814, 0.70710678])}
]
edges_weight_clean = [
{'24HMP8PhysSpikeReg': array([0.70710678, 0.48339738, 0.70710678, 0.2699669, 0.64224707, 0.70710678])},
{'Null': array([0.70710678, 0.7526871, 0.70710678, 0.6626035, 0.81092814, 0.70710678])}]
fc_fd_corr_values = [
{'24HMP8PhysSpikeReg': array([0.0, 0.7071686637722878, 0.0, 0.937709680044116, 0.6133452558083143, 0.0])},
{'Null': array([0.0, 0.7071686637722878, 0.0, 0.937709680044116, 0.6133452558083143, 0.0])}
]
fc_fd_corr_values_clean = [
{'24HMP8PhysSpikeReg': array([0.0, 0.7071686637722878, 0.0, 0.937709680044116, 0.6133452558083143, 0.0])},
{'Null': array([0.0, 0.7071686637722878, 0.0, 0.937709680044116, 0.6133452558083143, 0.0])}
]
fc_fd_summary = [
[
{'all': True,
'distance_dependence': -0.04001843958201733,
'n_excluded': 0,
'n_subjects': 3,
'pearson_fc_fd': 0.2837382791803038,
'perc_fc_fd_uncorr': 4.248756218905473,
'pipeline': '24HMP8PhysSpikeReg',
'tdof_loss': 38.333333333333336},
{'all': False,
'distance_dependence': -0.04001843958201733,
'n_excluded': 0,
'n_subjects': 3,
'pearson_fc_fd': 0.2837382791803038,
'perc_fc_fd_uncorr': 4.248756218905473,
'pipeline': '24HMP8PhysSpikeReg',
'tdof_loss': 38.333333333333336}
],
[
{'all': True,
'distance_dependence': -0.06304964769937552,
'n_excluded': 0,
'n_subjects': 3,
'median_pearson_fc_fd': 0.4903626077607741,
'perc_fc_fd_uncorr': 5.1393034825870645,
'pipeline': 'Null',
'tdof_loss': 0.0},
{'all': False,
'distance_dependence': -0.06304964769937552,
'n_excluded': 0,
'n_subjects': 3,
'pearson_fc_fd': 0.4903626077607741,
'perc_fc_fd_uncorr': 5.1393034825870645,
'pipeline': 'Null',
'tdof_loss': 0.0}
]
]
task = 'prlrew'
def run(output_dir: str):
workflow = Workflow(name="test_workflow", base_dir=output_dir)
identity_node = Node(
IdentityInterface(fields=["edges_weight", "edges_weight_clean", "fc_fd_corr_values", "fc_df_corr_values_clean",
"fc_fc_summary", "task"]),
name="SomeInputSource")
identity_node.inputs.edges_weight = edges_weight
identity_node.inputs.edges_weight_clean = edges_weight_clean
identity_node.inputs.fc_fd_summary = fc_fd_summary
identity_node.inputs.task = task
identity_node.inputs.fc_fd_corr_values = fc_fd_corr_values
identity_node.inputs.fc_fd_corr_values_clean = fc_fd_corr_values_clean
quality_node = Node(PipelinesQualityMeasures(
output_dir=output_dir),
name="PipelineQualitMeasures")
workflow.connect([(identity_node, quality_node, [
("edges_weight_clean", "edges_weight_clean"),
("edges_weight", "edges_weight"),
("fc_fd_corr_values", "fc_fd_corr_values"),
("fc_fd_corr_values_clean", "fc_fd_corr_values_clean"),
("fc_fd_summary", "fc_fd_summary"),
("task", "task")
])])
workflow.run()
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("-o","--output_dir", help="Output data/working directory", required=True)
args = parser.parse_args()
run(args.output_dir) |
SELECT product_id, MAX(quantity) as qty
FROM orders
GROUP BY product_id
ORDER BY qty DESC; |
<reponame>butsoleg/simpleworkflow<filename>lib/simple_workflow/test_helper.rb
# frozen_string_literal: true
# Utility methods to ease testing.
module SimpleWorkflow::TestHelper
def add_stored_detour(location = { controller: :bogus, action: :location })
@request.session[:detours] = [location]
end
end
|
import numpy as np
from keras.layers import Dense
from keras.models import Sequential
# Prepare training data
X = np.array([[1, 0, 1]])
y = np.array([0])
# Create model
model = Sequential()
model.add(Dense(3, input_dim=3, activation='relu'))
model.add(Dense(1, activation='sigmoid'))
model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy'])
model.fit(X, y)
# Predict output
pred = model.predict(X)
print(pred) |
#!/bin/bash
# This script is an example checker.
# It will be called by taskgrader in the following manner:
# checker.sh test.in test.solout test.out
# Thus $1 will contain the input number, $2 the output of the solution.
# The checker will give a grade of 100 if we got the expected result,
# a grade of 0 if the solution gave a wrong answer.
RESULT=$((`cat $2`*2))
SOLOUTDATA=`cat $1`
if ! [ "$SOLOUTDATA" -eq "$RESULT" ] 2> /dev/null
then
echo "0"
exit 0
else
echo "100"
exit 0
fi
|
import React, { useEffect, useState } from 'react';
import { StyleSheet, Image, View, Text, TouchableOpacity } from 'react-native';
import { getDataFromApi } from "../api/api";
export default function HomeScreen() {
const [data, setData] = useState([]);
useEffect(() => {
async function fetchData() {
const responseData = await getDataFromApi();
setData(responseData);
}
fetchData();
}, []);
return (
<View style={styles.container}>
<Text style={{ fontSize: 22, fontWeight: 'bold' }}>Data from API</Text>
{data.map((item) => (
<TouchableOpacity
key={item.id}
style={styles.list}
onPress={() => {
//handle on press event
}}>
<Image
source={{ uri: item.image }}
style={styles.image} />
<Text style={{ fontSize: 15 }}>
{item.name}
</Text>
</TouchableOpacity>
))}
</View>
);
}
const styles = StyleSheet.create({
container: {
flex: 1,
alignItems: 'center',
paddingTop: 40,
backgroundColor: '#fefefe',
},
list: {
flexDirection: 'row',
alignItems: 'center',
borderWidth: 1,
borderColor: '#cfcfcf',
padding: 20,
marginTop: 5,
width: 320,
},
image: {
width: 50,
height: 50,
borderRadius: 25,
},
}); |
package com.weibo.lbzone.justrockit.presenter;
/**
* Created by smile on 16/9/9.
*/
public interface IActivityPersenter {
//显示所有活动
void displayAll();
}
|
<filename>src/api.js
/* Api for managing all requests through the /api/ parameter */
const express = require('express')
const axios = require('axios')
const fs = require('fs')
const path = require('path')
const csv = require('fast-csv')
var formidable = require('formidable')
const TBA_AUTH = '<KEY>'
const router = express.Router()
router.use(express.json())
router.get('/', (req, res) => {
res.status(200).json({ 'success': 'API online' })
})
// Upload a file
router.post('/upload-schedule', (req, res, next) => {
var form = formidable.IncomingForm()
form.uploadDir = 'data'
form.keepExtensions = true
form.parse(req)
form
.on('file', (_, file) => {
fs.rename(file.path, path.join(form.uploadDir, 'schedules', file.name), (err) => {
if (err) { next(err) }
})
})
.on('error', (err) => { next(err) })
.on('end', () => { res.status(200).json({ 'success': `File ${req.file.name} was saved successfully` }) })
})
// Delete, Get, or Post file contents
router.route('/file/:file')
.get((req, res, next) => {
let loc = path.join('data', req.params.file)
let fileType = req.params.file.split('.')[1]
if (fileType === 'csv') {
let results = []
fs.createReadStream(loc)
.on('error', () => { next(Error(`The file ${req.params.file} could not be retrieved`)) })
.pipe(csv.parse({ headers: true, ignoreEmpty: true }))
.on('data', data => results.push(data))
.on('error', (err) => { next(err) })
.on('end', () => res.status(200).json(results))
} else {
console.log('Location', loc)
fs.readFile(loc, (err, rawData) => {
if (err) {
next(Error(`The file ${req.params.file} could not be retrieved`))
}
if (fileType === 'json') {
res.status(200).json(JSON.parse(rawData))
}
})
}
})
.post((req, res, next) => {
console.log('Got', req.body, req.params.file)
let fileType = req.params.file.split('.')[1]
let loc = path.join('data', req.params.file)
if (req.body.params && req.body.params.rawData === true) {
fs.writeFile(loc, req.body.data, err => { if (err) { next(err) } })
res.status(200).json({ 'success': `File ${req.params.file} was saved successfully` })
} else if (fileType === 'json') {
fs.writeFile(loc, JSON.stringify(req.body, null, 2), (err) => {
if (err) { next(err) }
})
res.status(200).json({ 'success': `File ${req.params.file} was saved successfully` })
} else if (fileType === 'csv') {
let exists = fs.existsSync(loc)
let ws = fs.createWriteStream(loc, { flags: 'a' })
ws.on('close', () => res.status(200).json({ 'success': `File ${req.params.file} was saved successfully` }))
ws.write((exists) ? '\n' : '')
csv.write(req.body, { headers: !exists })
.pipe(ws)
.on('error', (e) => { next(e) })
}
})
.delete((req, res, next) => {
let loc = path.join('data', req.params.file)
fs.unlink(loc, (err) => {
next(err)
})
})
router.get('/file/:file/download', (req, res, next) => {
let loc = path.join('data', req.params.file)
res.download(loc, (err) => {
next(err)
})
})
router.post('/download-schedule/:event', (req, res, next) => {
let url = `https://www.thebluealliance.com/api/v3/event/${req.params.event}/matches/simple`
console.log(url)
axios.get(url, { headers: { 'X-TBA-Auth-Key': TBA_AUTH } }).then(result => {
let loc = path.join('data', 'schedules', req.params.event + '.json')
let values = result.data
.filter(val => { return val.comp_level === 'qm' })
.map((val, index) => {
return {
'red': val.alliances.red.team_keys,
'blue': val.alliances.blue.team_keys,
'match': parseInt(val.match_number)
}
})
values.sort((a, b) => (a.match > b.match) ? 1 : -1)
let stream = fs.createWriteStream(loc)
stream.write(JSON.stringify(values, null, 2))
stream.on('error', (err) => { next(err) })
res.status(200).json({ 'success': `The schedule for event ${req.params.event} was saved.` })
}).catch(() => {
let error = new Error(`Could not find event ${req.params.event}. Make sure it includes the year and is spelled correctly!`)
next(error)
})
})
router.use((err, req, res, next) => {
res.status(500).json({ 'error': err.message })
})
module.exports = router
|
#!/usr/bin/bash
####! /usr/local/bin/zsh
####! /bin/bash
if [ $# -ne 1 ]; then
echo "Usage: $0 <data_directory>"
exit
fi
data_dir=$1
export KMP_AFFINITY="granularity=fine,compact,0,0"
cd ../cmake-build-release || exit
#####################
## Simple Search
#####################
bin=./app_para_searching
num_t_max=40
#num_t_max=64
# SIFT
data_path=${data_dir}/sift1m
#data_path=/scratch/zpeng/sift1m
data_name=sift
k=200
l=200
#l=298
echo "----${data_name}----"
for ((num_t = 1; num_t <= num_t_max; num_t *= 2)); do
${bin} ${data_path}/${data_name}_base.fvecs ${data_path}/${data_name}_query.fvecs ${data_path}/${data_name}.nsg $l $k output.ivecs ${data_path}/${data_name}.true-100_NN.q-10000.binary ${num_t}
done
# GIST
data_path=${data_dir}/gist1m
#data_path=/scratch/zpeng/gist1m
data_name=gist
k=400
l=400
#l=477
echo "----${data_name}----"
for ((num_t = 1; num_t <= num_t_max; num_t *= 2)); do
${bin} ${data_path}/${data_name}_base.fvecs ${data_path}/${data_name}_query.fvecs ${data_path}/${data_name}.nsg $l $k output.ivecs ${data_path}/${data_name}.true-100_NN.q-1000.binary ${num_t}
done
# DEEP10M
data_path=${data_dir}/deep1b
#data_path=/scratch/zpeng/deep1b
data_name=deep10M
k=400
l=400
#l=489
echo "----${data_name}----"
for ((num_t = 1; num_t <= num_t_max; num_t *= 2)); do
${bin} ${data_path}/${data_name}_base.fvecs ${data_path}/${data_name}_query.fvecs ${data_path}/${data_name}.nsg $l $k output.ivecs ${data_path}/${data_name}.true-100_NN.q-10000.binary ${num_t}
done
##set -x
### OpenMP Affinity for ICC
#export KMP_AFFINITY="granularity=fine,compact,1,0"
#
#cd ../cmake-build-release || exit
##bin_panns=./app_para_searching
#bin_panns=./profile_seq_searching_BitVector
#
#num_t_max=1
#
##
### SIFT
#data_path=/scratch/zpeng/sift1m
#data_name=sift
#k=200
#l=298
#echo "----${data_name}----"
#for ((num_t = 1; num_t <= num_t_max; num_t *= 2)); do
# ${bin_panns} ${data_path}/${data_name}_base.fvecs ${data_path}/${data_name}_query.fvecs ${data_path}/${data_name}.nsg $l $k output.ivecs ${data_path}/${data_name}.true-100_NN.q-10000.binary ${num_t}
#done
#
### GIST
#data_path=/scratch/zpeng/gist1m
#data_name=gist
#k=400
#l=477
#echo "----${data_name}----"
#for ((num_t = 1; num_t <= num_t_max; num_t *= 2)); do
# ${bin_panns} ${data_path}/${data_name}_base.fvecs ${data_path}/${data_name}_query.fvecs ${data_path}/${data_name}.nsg $l $k output.ivecs ${data_path}/${data_name}.true-100_NN.q-1000.binary ${num_t}
#done
#
### DEEP10M
#data_path=/scratch/zpeng/deep1b
#data_name=deep10M
#k=400
#l=489
#echo "----${data_name}----"
#for ((num_t = 1; num_t <= num_t_max; num_t *= 2)); do
# ${bin_panns} ${data_path}/${data_name}_base.fvecs ${data_path}/${data_name}_query.fvecs ${data_path}/${data_name}.nsg $l $k output.ivecs ${data_path}/${data_name}.true-100_NN.q-10000.binary ${num_t}
#done
###
#./profile_top_m_seq_search /scratch/zpeng/sift1m/sift_base.fvecs /scratch/zpeng/sift1m/sift_query.fvecs /scratch/zpeng/sift1m/sift.nsg 200 200 output.ivecs 128 /scratch/zpeng/sift1m/sift.true-100_NN.q-10000.binary 1
#./profile_top_m_seq_search /scratch/zpeng/gist1m/gist_base.fvecs /scratch/zpeng/gist1m/gist_query.fvecs /scratch/zpeng/gist1m/gist.nsg 400 400 output.ivecs 128 /scratch/zpeng/gist1m/gist.true-100_NN.q-1000.binary 1
#./profile_top_m_seq_search /scratch/zpeng/deep1b/deep10M_base.fvecs /scratch/zpeng/deep1b/deep10M_query.fvecs /scratch/zpeng/deep1b/deep10M.nsg 400 400 output.ivecs 128 /scratch/zpeng/deep1b/deep10M.true-100_NN.q-10000.binary 1
## Distance Computation
##echo "----SimpleSearching----"
#./app_seq_searching /scratch/zpeng/sift1m/sift_base.fvecs /scratch/zpeng/sift1m/sift_query.fvecs /scratch/zpeng/sift1m/sift.nsg 298 200 output.ivecs /scratch/zpeng/sift1m/sift.true-100_NN.q-10000.binary
#./app_seq_searching /scratch/zpeng/gist1m/gist_base.fvecs /scratch/zpeng/gist1m/gist_query.fvecs /scratch/zpeng/gist1m/gist.nsg 477 400 output.ivecs /scratch/zpeng/gist1m/gist.true-100_NN.q-1000.binary
#./app_seq_searching /scratch/zpeng/deep1b/deep10M_base.fvecs /scratch/zpeng/deep1b/deep10M_query.fvecs /scratch/zpeng/deep1b/deep10M.nsg 489 400 output.ivecs /scratch/zpeng/deep1b/deep10M.true-100_NN.q-10000.binary
#echo "----Top-M-Searching----"
#./app_top_m_seq_search /scratch/zpeng/sift1m/sift_base.fvecs /scratch/zpeng/sift1m/sift_query.fvecs /scratch/zpeng/sift1m/sift.nsg 200 200 output.ivecs 128 /scratch/zpeng/sift1m/sift.true-100_NN.q-10000.binary
#./app_top_m_seq_search /scratch/zpeng/gist1m/gist_base.fvecs /scratch/zpeng/gist1m/gist_query.fvecs /scratch/zpeng/gist1m/gist.nsg 400 400 output.ivecs 128 /scratch/zpeng/gist1m/gist.true-100_NN.q-1000.binary
#./app_top_m_seq_search /scratch/zpeng/deep1b/deep10M_base.fvecs /scratch/zpeng/deep1b/deep10M_query.fvecs /scratch/zpeng/deep1b/deep10M.nsg 400 400 output.ivecs 128 /scratch/zpeng/deep1b/deep10M.true-100_NN.q-10000.binary
### Find value L for seq searching to achive the same accuracy as PANNS-TMS.
#echo "----SIFT1M----"
#./profile_seq_searching_find_value_L /scratch/zpeng/sift1m/sift_base.fvecs /scratch/zpeng/sift1m/sift_query.fvecs /scratch/zpeng/sift1m/sift.nsg 200 1000 200 output.ivecs /scratch/zpeng/sift1m/sift.true-100_NN.q-10000.binary 0.997461
#
#echo "----GIST1M----"
#./profile_seq_searching_find_value_L /scratch/zpeng/gist1m/gist_base.fvecs /scratch/zpeng/gist1m/gist_query.fvecs /scratch/zpeng/gist1m/gist.nsg 400 1000 400 output.ivecs /scratch/zpeng/gist1m/gist.true-100_NN.q-1000.binary 0.985630
#
#echo "----DEEP10M----"
#./profile_seq_searching_find_value_L /scratch/zpeng/deep1b/deep10M_base.fvecs /scratch/zpeng/deep1b/deep10M_query.fvecs /scratch/zpeng/deep1b/deep10M.nsg 400 1000 400 output.ivecs /scratch/zpeng/deep1b/deep10M.true-100_NN.q-10000.binary 0.996748
#
#echo "---- SIFT1M ----"
#python3 ../scripts/module_run.py ./app_seq_searching sift
#python3 ../scripts/module_run.py ./test_nsg_optimized_search sift
#echo "---- GIST1M ----"
#python3 ../scripts/module_run.py ./app_seq_searching gist
#python3 ../scripts/module_run.py ./test_nsg_optimized_search gist
#echo "---- DEEP10M ----"
#python3 ../scripts/module_run.py ./app_seq_searching deep
#python3 ../scripts/module_run.py ./test_nsg_optimized_search deep
#echo "---- SIFT1M ----"
#python3 ../scripts/module_run.py ./profile_top_m_batch_search_value_m sift 128 256 /scratch/zpeng/sift1m/sift.true-100_NN.q-10000.binary 0.993029
#echo "---- GIST1M ----"
#python3 ../scripts/module_run.py ./profile_top_m_batch_search_value_m gist 256 256 /scratch/zpeng/gist1m/gist.true-100_NN.q-1000.binary 0.980970
#echo "---- DEEP10M ----"
#python3 ../scripts/module_run.py ./profile_top_m_batch_search_value_m deep 256 256 /scratch/zpeng/deep1b/deep10M.true-100_NN.q-10000.binary 0.995195
#echo "---- SIFT1M ----"
#python3 ../scripts/module_run.py ./profile_top_m_batch_search_find_L sift 256 256 /scratch/zpeng/sift1m/sift.true-100_NN.q-10000.binary 0.993029
#echo "---- GIST1M ----"
#python3 ../scripts/module_run.py ./profile_top_m_batch_search_find_L gist 512 256 /scratch/zpeng/gist1m/gist.true-100_NN.q-1000.binary 0.980970
#echo "---- DEEP10M ----"
#python3 ../scripts/module_run.py ./profile_top_m_batch_search_find_L deep 512 256 /scratch/zpeng/deep1b/deep10M.true-100_NN.q-10000.binary 0.995195
#
## PANNS Performane vs. value M
#bin_panns=/home/zpeng/pppp/clion/batch_searching/cmake-build-release/profile_top_m_search
#
## SIFT
#data_path=/scratch/zpeng/sift1m
#data_name=sift
#k=200
#l=200
#output=output.${data_name}.txt
#echo "---- ${data_path}/${data_name} ----"
#${bin_panns} ${data_path}/${data_name}_base.fvecs ${data_path}/${data_name}_query.fvecs ${data_path}/${data_name}.nsg $l $k output.ivecs 10000 ${data_path}/${data_name}.true-100_NN.q-10000.binary 128 2
##${bin_panns} ${data_path}/${data_name}_base.fvecs ${data_path}/${data_name}_paired_query.fvecs ${data_path}/${data_name}.nsg $l $k output.ivecs 10000 ${data_path}/${data_name}.true-100_NN.q-10000.binary 128 2
#
## GIST
#data_path=/scratch/zpeng/gist1m
#data_name=gist
#k=400
#l=400
#output=output.${data_name}.txt
#echo "---- ${data_path}/${data_name} ----"
#${bin_panns} ${data_path}/${data_name}_base.fvecs ${data_path}/${data_name}_query.fvecs ${data_path}/${data_name}.nsg $l $k output.ivecs 1000 ${data_path}/${data_name}.true-100_NN.q-1000.binary 128 2
##${bin_panns} ${data_path}/${data_name}_base.fvecs ${data_path}/${data_name}_paired_query.fvecs ${data_path}/${data_name}.nsg $l $k output.ivecs 1000 ${data_path}/${data_name}.true-100_NN.q-1000.binary 128 2
#
## DEEP10M
#data_path=/scratch/zpeng/deep1b
#data_name=deep10M
#k=400
#l=400
#output=output.${data_name}.txt
#echo "---- ${data_path}/${data_name} ----"
#${bin_panns} ${data_path}/${data_name}_base.fvecs ${data_path}/${data_name}_query.fvecs ${data_path}/${data_name}.nsg $l $k output.ivecs 10000 ${data_path}/${data_name}.true-100_NN.q-10000.binary 128 2
##${bin_panns} ${data_path}/${data_name}_base.fvecs ${data_path}/${data_name}_paired_query.fvecs ${data_path}/${data_name}.nsg $l $k output.ivecs 10000 ${data_path}/${data_name}.true-100_NN.q-10000.binary 128 2
## PANNS sequential searching vs. NSG sequential searching
#bin_panns=/home/zpeng/pppp/clion/batch_searching/cmake-build-release/app_seq_searching
#bin_nsg=/home/zpeng/pppp/clion/batch_searching/cmake-build-release/test_nsg_optimized_search
#
## SIFT
#data_path=/scratch/zpeng/sift1m
#data_name=sift
#k=200
#l=200
#output=output.${data_name}.txt
#echo "---- ${data_path}/${data_name} ----"
#${bin_nsg} ${data_path}/${data_name}_base.fvecs ${data_path}/${data_name}_query.fvecs ${data_path}/${data_name}.nsg $l $k output.ivecs 10000
#${bin_panns} ${data_path}/${data_name}_base.fvecs ${data_path}/${data_name}_query.fvecs ${data_path}/${data_name}.nsg $l $k output.ivecs 10000
#
#
## GIST
#data_path=/scratch/zpeng/gist1m
#data_name=gist
#k=400
#l=400
#output=output.${data_name}.txt
#echo "---- ${data_path}/${data_name} ----"
#${bin_nsg} ${data_path}/${data_name}_base.fvecs ${data_path}/${data_name}_query.fvecs ${data_path}/${data_name}.nsg $l $k output.ivecs 1000
#${bin_panns} ${data_path}/${data_name}_base.fvecs ${data_path}/${data_name}_query.fvecs ${data_path}/${data_name}.nsg $l $k output.ivecs 1000
#
## DEEP10M
#data_path=/scratch/zpeng/deep1b
#data_name=deep10M
#k=400
#l=400
#output=output.${data_name}.txt
#echo "---- ${data_path}/${data_name} ----"
#${bin_nsg} ${data_path}/${data_name}_base.fvecs ${data_path}/${data_name}_query.fvecs ${data_path}/${data_name}.nsg $l $k output.ivecs 10000
#${bin_panns} ${data_path}/${data_name}_base.fvecs ${data_path}/${data_name}_query.fvecs ${data_path}/${data_name}.nsg $l $k output.ivecs 10000
## Do computation with trace so without graph traverse
#bin=/home/zpeng/pppp/clion/searching_mac/cmake-build-release/tests/app_search_in_batch
#
## SIFT
#data_path=/scratch/zpeng/sift1m
#data_name=sift
#k=200
#l=200
#output=output.${data_name}.txt
#echo "---- ${data_path}/${data_name} ----"
#${bin} ${data_path}/${data_name}_base.fvecs ${data_path}/${data_name}_query.fvecs ${data_path}/${data_name}.nsg $l $k output.ivecs 10000 | tee ${output}
#
## GIST
#data_path=/scratch/zpeng/gist1m
#data_name=gist
#k=400
#l=400
#output=output.${data_name}.txt
#echo "---- ${data_path}/${data_name} ----"
#${bin} ${data_path}/${data_name}_base.fvecs ${data_path}/${data_name}_query.fvecs ${data_path}/${data_name}.nsg $l $k output.ivecs 1000 | tee ${output}
#
## DEEP10M
#data_path=/scratch/zpeng/deep1b
#data_name=deep10M
#k=400
#l=400
#output=output.${data_name}.txt
#echo "---- ${data_path}/${data_name} ----"
#${bin} ${data_path}/${data_name}_base.fvecs ${data_path}/${data_name}_query.fvecs ${data_path}/${data_name}.nsg $l $k output.ivecs 10000 | tee ${output}
## Do computation with trace so without graph traverse
#bin=/home/zpeng/pppp/clion/searching_mac/cmake-build-release/tests/test_nsg_optimized_search
##bin=/home/zpeng/pppp/clion/searching_mac/cmake-build-release/tests/profile_search_to_record_trace
#bin_search=/home/zpeng/pppp/clion/searching_mac/cmake-build-release/tests/profile_search_on_recorded_trace
#
#
## SIFT
#data_path=/scratch/zpeng/sift1m
#data_name=sift
#k=200
#l=200
#output=output.${data_name}.txt
#echo "---- ${data_path}/${data_name} ----"
#${bin} ${data_path}/${data_name}_base.fvecs ${data_path}/${data_name}_query.fvecs ${data_path}/${data_name}.nsg $l $k output.ivecs 10000 | tee ${output}
#${bin_search} ${data_path}/${data_name}_base.fvecs ${data_path}/${data_name}_query.fvecs ${data_path}/${data_name}.nsg $l $k output.ivecs 10000 ${data_path}/${data_name}.trace.q-10000.binary | tee -a ${output}
#
## GIST
#data_path=/scratch/zpeng/gist1m
#data_name=gist
#k=400
#l=400
#output=output.${data_name}.txt
#echo "---- ${data_path}/${data_name} ----"
#${bin} ${data_path}/${data_name}_base.fvecs ${data_path}/${data_name}_query.fvecs ${data_path}/${data_name}.nsg $l $k output.ivecs 1000 | tee ${output}
#${bin_search} ${data_path}/${data_name}_base.fvecs ${data_path}/${data_name}_query.fvecs ${data_path}/${data_name}.nsg $l $k output.ivecs 1000 ${data_path}/${data_name}.trace.q-1000.binary | tee -a ${output}
#
## DEEP10M
#data_path=/scratch/zpeng/deep1b
#data_name=deep10M
#k=400
#l=400
#output=output.${data_name}.txt
#echo "---- ${data_path}/${data_name} ----"
#${bin} ${data_path}/${data_name}_base.fvecs ${data_path}/${data_name}_query.fvecs ${data_path}/${data_name}.nsg $l $k output.ivecs 10000 | tee ${output}
#${bin_search} ${data_path}/${data_name}_base.fvecs ${data_path}/${data_name}_query.fvecs ${data_path}/${data_name}.nsg $l $k output.ivecs 10000 ${data_path}/${data_name}.trace.q-10000.binary | tee -a ${output}
## Graph Degree Distribution
#bin=/home/zpeng/benchmarks/clion/nsg_th107b4/cmake-build-release/tests/profile_graph_degree_distribution
#
## SIFT
#data_path=/scratch/zpeng/sift1m
#data_name=sift
#k=200
#l=200
#output=output.${data_name}.txt
#echo "---- ${data_path}/${data_name} ----"
#echo "Original Graph" | tee ${output}
#${bin} ${data_path}/${data_name}_base.fvecs ${data_path}/${data_name}.nsg | tee -a ${output}
#echo "Reversed Graph" | tee -a ${output}
#${bin} ${data_path}/${data_name}_base.fvecs ${data_path}/${data_name}.reversed.nsg | tee -a ${output}
#
## GIST
#data_path=/scratch/zpeng/gist1m
#data_name=gist
#k=400
#l=400
#output=output.${data_name}.txt
#echo "---- ${data_path}/${data_name} ----"
#echo "Original Graph" | tee ${output}
#${bin} ${data_path}/${data_name}_base.fvecs ${data_path}/${data_name}.nsg | tee -a ${output}
#echo "Reversed Graph" | tee -a ${output}
#${bin} ${data_path}/${data_name}_base.fvecs ${data_path}/${data_name}.reversed.nsg | tee -a ${output}
#
## DEEP10M
#data_path=/scratch/zpeng/deep1b
#data_name=deep10M
#k=400
#l=400
#output=output.${data_name}.txt
#echo "---- ${data_path}/${data_name} ----"
#echo "Original Graph" | tee ${output}
#${bin} ${data_path}/${data_name}_base.fvecs ${data_path}/${data_name}.nsg | tee -a ${output}
#echo "Reversed Graph" | tee -a ${output}
#${bin} ${data_path}/${data_name}_base.fvecs ${data_path}/${data_name}.reversed.nsg | tee -a ${output}
## Measure shared candidates
#bin=/home/zpeng/benchmarks/clion/nsg_mac/cmake-build-release/tests/profile_search_shared_tops
#
## SIFT
#data_path=/scratch/zpeng/sift1m
#data_name=sift
#k=200
#l=200
#output=output.${data_name}.txt
#echo "---- ${data_path}/${data_name} ----"
##${bin} ${data_path}/${data_name}_base.fvecs ${data_path}/${data_name}_paired_query.fvecs ${data_path}/${data_name}.reversed.nsg $l $k 40
#${bin} ${data_path}/${data_name}_base.fvecs ${data_path}/${data_name}_paired_query.fvecs ${data_path}/${data_name}.nsg $l $k 40
#
## GIST
#data_path=/scratch/zpeng/gist1m
#data_name=gist
#k=400
#l=400
#output=output.${data_name}.txt
#echo "---- ${data_path}/${data_name} ----"
##${bin} ${data_path}/${data_name}_base.fvecs ${data_path}/${data_name}_paired_query.fvecs ${data_path}/${data_name}.reversed.nsg $l $k 40
#${bin} ${data_path}/${data_name}_base.fvecs ${data_path}/${data_name}_paired_query.fvecs ${data_path}/${data_name}.nsg $l $k 40
#
## DEEP10M
#data_path=/scratch/zpeng/deep1b
#data_name=deep10M
#k=400
#l=400
#output=output.${data_name}.txt
#echo "---- ${data_path}/${data_name} ----"
##${bin} ${data_path}/${data_name}_base.fvecs ${data_path}/${data_name}_paired_query.fvecs ${data_path}/${data_name}.reversed.nsg $l $k 40
#${bin} ${data_path}/${data_name}_base.fvecs ${data_path}/${data_name}_paired_query.fvecs ${data_path}/${data_name}.nsg $l $k 40
## Measure shared top-ranked
#bin=/home/zpeng/benchmarks/clion/nsg_th107b4/cmake-build-release/tests/profile_search_shared_tops
#
## SIFT
#data_path=/scratch/zpeng/sift1m
#data_name=sift
#k=200
#l=200
#output=output.${data_name}.txt
#echo "---- ${data_path}/${data_name} ----"
#${bin} ${data_path}/${data_name}_base.fvecs ${data_path}/${data_name}_paired_query.fvecs ${data_path}/${data_name}.reversed.nsg $l $k 10000
##${bin} ${data_path}/${data_name}_base.fvecs ${data_path}/${data_name}_paired_query.fvecs ${data_path}/${data_name}.nsg $l $k 10000
#
## GIST
#data_path=/scratch/zpeng/gist1m
#data_name=gist
#k=400
#l=400
#output=output.${data_name}.txt
#echo "---- ${data_path}/${data_name} ----"
#${bin} ${data_path}/${data_name}_base.fvecs ${data_path}/${data_name}_paired_query.fvecs ${data_path}/${data_name}.reversed.nsg $l $k 1000
##${bin} ${data_path}/${data_name}_base.fvecs ${data_path}/${data_name}_paired_query.fvecs ${data_path}/${data_name}.nsg $l $k 1000
#
## DEEP10M
#data_path=/scratch/zpeng/deep1b
#data_name=deep10M
#k=400
#l=400
#output=output.${data_name}.txt
#echo "---- ${data_path}/${data_name} ----"
#${bin} ${data_path}/${data_name}_base.fvecs ${data_path}/${data_name}_paired_query.fvecs ${data_path}/${data_name}.reversed.nsg $l $k 10000
##${bin} ${data_path}/${data_name}_base.fvecs ${data_path}/${data_name}_paired_query.fvecs ${data_path}/${data_name}.nsg $l $k 10000
## Try Reversed NSG searching
#bin=/home/zpeng/benchmarks/clion/nsg_th107b4/cmake-build-release/tests/profile_precision
#
## SIFT
#data_path=/scratch/zpeng/sift1m
#data_name=sift
#k=100
#l=100
##k=200
##l=200
#output=output.${data_name}.txt
#echo "---- ${data_path}/${data_name} ----"
#echo "Original_NSG"
#${bin} ${data_path}/${data_name}_base.fvecs ${data_path}/${data_name}_query.fvecs ${data_path}/${data_name}.nsg $l $k output.ivecs 10000 ${data_path}/${data_name}.true-100_NN.q-10000.binary
#echo "Reversed_NSG"
#${bin} ${data_path}/${data_name}_base.fvecs ${data_path}/${data_name}_query.fvecs ${data_path}/${data_name}.reversed.nsg $l $k output.ivecs 10000 ${data_path}/${data_name}.true-100_NN.q-10000.binary
#
## GIST
#data_path=/scratch/zpeng/gist1m
#data_name=gist
#k=100
#l=100
##k=400
##l=400
#output=output.${data_name}.txt
#echo "---- ${data_path}/${data_name} ----"
#echo "Original_NSG"
#${bin} ${data_path}/${data_name}_base.fvecs ${data_path}/${data_name}_query.fvecs ${data_path}/${data_name}.nsg $l $k output.ivecs 10000 ${data_path}/${data_name}.true-100_NN.q-1000.binary
#echo "Reversed_NSG"
#${bin} ${data_path}/${data_name}_base.fvecs ${data_path}/${data_name}_query.fvecs ${data_path}/${data_name}.reversed.nsg $l $k output.ivecs 10000 ${data_path}/${data_name}.true-100_NN.q-1000.binary
#
## DEEP10M
#data_path=/scratch/zpeng/deep1b
#data_name=deep10M
#k=100
#l=100
##k=400
##l=400
#output=output.${data_name}.txt
#echo "---- ${data_path}/${data_name} ----"
#echo "Original_NSG"
#${bin} ${data_path}/${data_name}_base.fvecs ${data_path}/${data_name}_query.fvecs ${data_path}/${data_name}.nsg $l $k output.ivecs 10000 ${data_path}/${data_name}.true-100_NN.q-10000.binary
#echo "Reversed_NSG"
#${bin} ${data_path}/${data_name}_base.fvecs ${data_path}/${data_name}_query.fvecs ${data_path}/${data_name}.reversed.nsg $l $k output.ivecs 10000 ${data_path}/${data_name}.true-100_NN.q-10000.binary
## Generate Reversed NSG
#bin=/home/zpeng/benchmarks/clion/nsg_th107b4/cmake-build-release/tests/profile_reversed_nsg_index
#
## SIFT
#data_path=/scratch/zpeng/sift1m
#data_name=sift
#k=200
#l=200
#output=output.${data_name}.txt
#echo "---- ${data_path}/${data_name} ----"
#${bin} ${data_path}/${data_name}_base.fvecs ${data_path}/${data_name}_${k}nn.graph 40 50 500 ${data_path}/${data_name}.reversed.nsg
#
## GIST
#data_path=/scratch/zpeng/gist1m
#data_name=gist
#k=400
#l=400
#output=output.${data_name}.txt
#echo "---- ${data_path}/${data_name} ----"
#${bin} ${data_path}/${data_name}_base.fvecs ${data_path}/${data_name}_${k}nn.graph 60 70 500 ${data_path}/${data_name}.reversed.nsg
#
## DEEP10M
#data_path=/scratch/zpeng/deep1b
#data_name=deep10M
#k=400
#l=400
#output=output.${data_name}.txt
#echo "---- ${data_path}/${data_name} ----"
#${bin} ${data_path}/${data_name}_base.fvecs ${data_path}/${data_name}_${k}nn.graph 60 70 500 ${data_path}/${data_name}.reversed.nsg
#
## Try Reversed NSG searching
#bin=/home/zpeng/benchmarks/clion/nsg_th107b4/cmake-build-release/tests/test_nsg_optimized_search
#
## SIFT
#data_path=/scratch/zpeng/sift1m
#data_name=sift
#k=200
#l=200
#output=output.${data_name}.txt
#echo "---- ${data_path}/${data_name} ----"
#echo "Original_NSG"
#${bin} ${data_path}/${data_name}_base.fvecs ${data_path}/${data_name}_query.fvecs ${data_path}/${data_name}.nsg $l $k output.ivecs 10000
#echo "Reversed_NSG"
#${bin} ${data_path}/${data_name}_base.fvecs ${data_path}/${data_name}_query.fvecs ${data_path}/${data_name}.reversed.nsg $l $k output.ivecs 10000
#
## GIST
#data_path=/scratch/zpeng/gist1m
#data_name=gist
#k=400
#l=400
#output=output.${data_name}.txt
#echo "---- ${data_path}/${data_name} ----"
#echo "Original_NSG"
#${bin} ${data_path}/${data_name}_base.fvecs ${data_path}/${data_name}_query.fvecs ${data_path}/${data_name}.nsg $l $k output.ivecs 10000
#echo "Reversed_NSG"
#${bin} ${data_path}/${data_name}_base.fvecs ${data_path}/${data_name}_query.fvecs ${data_path}/${data_name}.reversed.nsg $l $k output.ivecs 10000
#
## DEEP10M
#data_path=/scratch/zpeng/deep1b
#data_name=deep10M
#k=400
#l=400
#output=output.${data_name}.txt
#echo "---- ${data_path}/${data_name} ----"
#echo "Original_NSG"
#${bin} ${data_path}/${data_name}_base.fvecs ${data_path}/${data_name}_query.fvecs ${data_path}/${data_name}.nsg $l $k output.ivecs 10000
#echo "Reversed_NSG"
#${bin} ${data_path}/${data_name}_base.fvecs ${data_path}/${data_name}_query.fvecs ${data_path}/${data_name}.reversed.nsg $l $k output.ivecs 10000
#
## Batched Joint Traversal Search, and Sequntial Search
#bin=/home/zpeng/benchmarks/clion/nsg_th107b4/cmake-build-release/tests/profile_precision
#
## SIFT
#data_path=/scratch/zpeng/sift1m
#data_name=sift
#k=200
#l=200
#output=output.${data_name}.txt
#echo "---- ${data_path}/${data_name} ----"
#${bin} ${data_path}/${data_name}_base.fvecs ${data_path}/${data_name}_query.fvecs ${data_path}/${data_name}.nsg $l $k output.ivecs 10000 ${data_path}/${data_name}.true-100_NN.q-10000.binary
#
## GIST
#data_path=/scratch/zpeng/gist1m
#data_name=gist
#k=400
#l=400
#output=output.${data_name}.txt
#echo "---- ${data_path}/${data_name} ----"
#${bin} ${data_path}/${data_name}_base.fvecs ${data_path}/${data_name}_query.fvecs ${data_path}/${data_name}.nsg $l $k output.ivecs 10000 ${data_path}/${data_name}.true-100_NN.q-1000.binary
#
## DEEP10M
#data_path=/scratch/zpeng/deep1b
#data_name=deep10M
#k=400
#l=400
#output=output.${data_name}.txt
#echo "---- ${data_path}/${data_name} ----"
#${bin} ${data_path}/${data_name}_base.fvecs ${data_path}/${data_name}_query.fvecs ${data_path}/${data_name}.nsg $l $k output.ivecs 10000 ${data_path}/${data_name}.true-100_NN.q-10000.binary
## Batched Joint Traversal Search, and Sequntial Search
#bin_joint_searching=/home/zpeng/benchmarks/clion/nsg_th107b4/cmake-build-release/tests/profile_batched_joint_traversal_search
#bin_normal_searching=/home/zpeng/benchmarks/clion/nsg_th107b4/cmake-build-release/tests/test_nsg_optimized_search
### Breakdown optimized searching
##bin=/home/zpeng/benchmarks/clion/nsg_th107b4/cmake-build-release/tests/profile_breakdown_optimized_searching
#
## SIFT
#data_path=/scratch/zpeng/sift1m
#data_name=sift
#k=200
#l=200
#output=output.${data_name}.txt
#echo "---- ${data_path}/${data_name} ----"
#echo "---- Sequntial Search ----" | tee ${output}
#${bin_normal_searching} ${data_path}/${data_name}_base.fvecs ${data_path}/${data_name}_query.fvecs ${data_path}/${data_name}.nsg $l $k output.${data_name}.ivecs 10000 | tee -a ${output}
#echo "---- Batched Joint Search ----" | tee -a ${output}
#${bin_joint_searching} ${data_path}/${data_name}_base.fvecs ${data_path}/${data_name}_all_same_query.fvecs ${data_path}/${data_name}.nsg $l $k output.${data_name}.ivecs 10000 | tee -a ${output}
#
## GIST
#data_path=/scratch/zpeng/gist1m
#data_name=gist
#k=400
#l=400
#output=output.${data_name}.txt
#echo "---- ${data_path}/${data_name} ----"
#echo "---- Sequntial Search ----" | tee ${output}
#${bin_normal_searching} ${data_path}/${data_name}_base.fvecs ${data_path}/${data_name}_query.fvecs ${data_path}/${data_name}.nsg $l $k output.${data_name}.ivecs 10000 | tee -a ${output}
#echo "---- Batched Joint Search ----" | tee -a ${output}
#${bin_joint_searching} ${data_path}/${data_name}_base.fvecs ${data_path}/${data_name}_all_same_query.fvecs ${data_path}/${data_name}.nsg $l $k output.${data_name}.ivecs 10000 | tee -a ${output}
#
## DEEP10M
#data_path=/scratch/zpeng/deep1b
#data_name=deep10M
#k=400
#l=400
#output=output.${data_name}.txt
#echo "---- ${data_path}/${data_name} ----"
#echo "---- Sequntial Search ----" | tee ${output}
#${bin_normal_searching} ${data_path}/${data_name}_base.fvecs ${data_path}/${data_name}_query.fvecs ${data_path}/${data_name}.nsg $l $k output.${data_name}.ivecs 10000 | tee -a ${output}
#echo "---- Batched Joint Search ----" | tee -a ${output}
#${bin_joint_searching} ${data_path}/${data_name}_base.fvecs ${data_path}/${data_name}_all_same_query.fvecs ${data_path}/${data_name}.nsg $l $k output.${data_name}.ivecs 10000 | tee -a ${output}
## Normal searching, Joint searching, and Parallel searching
## Generate paired queries
#bin_normal_searching=/home/zpeng/benchmarks/clion/nsg_th107b4/cmake-build-release/tests/test_nsg_optimized_search
#bin_joint_searching=/home/zpeng/benchmarks/clion/nsg_th107b4/cmake-build-release/tests/profile_joint_traversal_search
#bin_parallel_searching=/home/zpeng/benchmarks/clion/nsg_th107b4/cmake-build-release/tests/profile_parallel_optimized_search
## SIFT
#data_path=/scratch/zpeng/sift1m
#data_name=sift
#k=200
#l=200
#output=output.${data_name}.txt
#echo "---- ${data_path}/${data_name} ----"
#echo "normal_searching:" > ${output}
#${bin_normal_searching} ${data_path}/${data_name}_base.fvecs ${data_path}/${data_name}_all_same_query.fvecs ${data_path}/${data_name}.nsg $l $k output.ivecs 10000 | tee -a ${output}
#echo "joint_searching" >> ${output}
#${bin_joint_searching} ${data_path}/${data_name}_base.fvecs ${data_path}/${data_name}_all_same_query.fvecs ${data_path}/${data_name}.nsg $l $k output.ivecs 10000 | tee -a ${output}
#echo "parallel_searching" >> ${output}
#${bin_parallel_searching} ${data_path}/${data_name}_base.fvecs ${data_path}/${data_name}_all_same_query.fvecs ${data_path}/${data_name}.nsg $l $k output.ivecs 10000 32 | tee -a ${output}
#
## GIST
#data_path=/scratch/zpeng/gist1m
#data_name=gist
#k=400
#l=400
#output=output.${data_name}.txt
#echo "---- ${data_path}/${data_name} ----"
#echo "normal_searching:" > ${output}
#${bin_normal_searching} ${data_path}/${data_name}_base.fvecs ${data_path}/${data_name}_all_same_query.fvecs ${data_path}/${data_name}.nsg $l $k output.ivecs 10000 | tee -a ${output}
#echo "joint_searching" >> ${output}
#${bin_joint_searching} ${data_path}/${data_name}_base.fvecs ${data_path}/${data_name}_all_same_query.fvecs ${data_path}/${data_name}.nsg $l $k output.ivecs 10000 | tee -a ${output}
#echo "parallel_searching" >> ${output}
#${bin_parallel_searching} ${data_path}/${data_name}_base.fvecs ${data_path}/${data_name}_all_same_query.fvecs ${data_path}/${data_name}.nsg $l $k output.ivecs 10000 32 | tee -a ${output}
#
## DEEP10M
#data_path=/scratch/zpeng/deep1b
#data_name=deep10M
#k=400
#l=400
#output=output.${data_name}.txt
#echo "---- ${data_path}/${data_name} ----"
#echo "normal_searching:" > ${output}
#${bin_normal_searching} ${data_path}/${data_name}_base.fvecs ${data_path}/${data_name}_all_same_query.fvecs ${data_path}/${data_name}.nsg $l $k output.ivecs 10000 | tee -a ${output}
#echo "joint_searching" >> ${output}
#${bin_joint_searching} ${data_path}/${data_name}_base.fvecs ${data_path}/${data_name}_all_same_query.fvecs ${data_path}/${data_name}.nsg $l $k output.ivecs 10000 | tee -a ${output}
#echo "parallel_searching" >> ${output}
#${bin_parallel_searching} ${data_path}/${data_name}_base.fvecs ${data_path}/${data_name}_all_same_query.fvecs ${data_path}/${data_name}.nsg $l $k output.ivecs 10000 32 | tee -a ${output}
## Generate paired queries
#bin=/home/zpeng/benchmarks/clion/nsg_th107b4/cmake-build-release/tests/profile_pair_queries
#
## SIFT
#data_path=/scratch/zpeng/sift1m
#data_name=sift
#k=200
#l=200
#output=output.${data_name}.txt
#echo "---- ${data_path}/${data_name} ----"
#${bin} ${data_path}/${data_name}_query.fvecs ${data_path}/${data_name}_paired_query.fvecs 500
#
## GIST
#data_path=/scratch/zpeng/gist1m
#data_name=gist
#k=400
#l=400
#output=output.${data_name}.txt
#echo "---- ${data_path}/${data_name} ----"
#${bin} ${data_path}/${data_name}_query.fvecs ${data_path}/${data_name}_paired_query.fvecs 500
#
## DEEP10M
#data_path=/scratch/zpeng/deep1b
#data_name=deep10M
#k=400
#l=400
#output=output.${data_name}.txt
#echo "---- ${data_path}/${data_name} ----"
#${bin} ${data_path}/${data_name}_query.fvecs ${data_path}/${data_name}_paired_query.fvecs 500
#
#
## Measure percentage of shared candidates
#bin=/home/zpeng/benchmarks/clion/nsg_th107b4/cmake-build-release/tests/profile_search_shared_candidates
#
## SIFT
#data_path=/scratch/zpeng/sift1m
#data_name=sift
#k=200
#l=200
#output=output.${data_name}.txt
#echo "---- ${data_path}/${data_name} ----"
#${bin} ${data_path}/${data_name}_base.fvecs ${data_path}/${data_name}_query.fvecs ${data_path}/${data_name}.nsg $l $k output.${data_name}.ivecs 20 | tee ${output}
##${bin} ${data_path}/${data_name}_base.fvecs ${data_path}/${data_name}_paired_query.fvecs ${data_path}/${data_name}.nsg $l $k output.${data_name}.ivecs 20 | tee ${output}
#
## GIST
#data_path=/scratch/zpeng/gist1m
#data_name=gist
#k=400
#l=400
#output=output.${data_name}.txt
#echo "---- ${data_path}/${data_name} ----"
#${bin} ${data_path}/${data_name}_base.fvecs ${data_path}/${data_name}_query.fvecs ${data_path}/${data_name}.nsg $l $k output.${data_name}.ivecs 20 | tee ${output}
##${bin} ${data_path}/${data_name}_base.fvecs ${data_path}/${data_name}_paired_query.fvecs ${data_path}/${data_name}.nsg $l $k output.${data_name}.ivecs 20 | tee ${output}
#
## DEEP10M
#data_path=/scratch/zpeng/deep1b
#data_name=deep10M
#k=400
#l=400
#output=output.${data_name}.txt
#echo "---- ${data_path}/${data_name} ----"
#${bin} ${data_path}/${data_name}_base.fvecs ${data_path}/${data_name}_query.fvecs ${data_path}/${data_name}.nsg $l $k output.${data_name}.ivecs 20 | tee ${output}
##${bin} ${data_path}/${data_name}_base.fvecs ${data_path}/${data_name}_paired_query.fvecs ${data_path}/${data_name}.nsg $l $k output.${data_name}.ivecs 20 | tee ${output}
## FAKE
#data_path=/scratch/zpeng/fake
#data_name=fake
#vol_start=1000
#vol_bound=10000000
##vol_step=20000
#dim_start=64
#dim_bound=512
##dim_step=64
#K=400
#L=400
#echo "---- ${data_path}/${data_name} ----"
##for ((vol = vol_start; vol <= vol_bound; vol += vol_step)); do
## for ((dim = dim_start; dim <= dim_bound; dim += dim_step)); do
#for ((vol = vol_start; vol <= vol_bound; vol *= 10)); do
# for ((dim = dim_start; dim <= dim_bound; dim *= 2)); do
# ./test_nsg_optimized_search ${data_path}/${data_name}_v${vol}d${dim}_base.fvecs ${data_path}/${data_name}_v${vol}d${dim}_query.fvecs ${data_path}/${data_name}_v${vol}d${dim}.nsg $L $K fake.ivecs 10000
# done
#done
set +x
|
FactoryGirl.define do
factory :product_item_tax_pis, class: BrNfe::Product::Nfe::ItemTax::Pis do
codigo_cst '04'
end
end |
<filename>dhcpv6/option_archtype_test.go
package dhcpv6
import (
"testing"
"github.com/talos-systems/dhcp/iana"
"github.com/stretchr/testify/require"
)
func TestParseOptClientArchType(t *testing.T) {
data := []byte{
0, 6, // EFI_IA32
}
opt, err := ParseOptClientArchType(data)
require.NoError(t, err)
require.Equal(t, iana.EFI_IA32, opt.ArchTypes[0])
}
func TestParseOptClientArchTypeInvalid(t *testing.T) {
data := []byte{42}
_, err := ParseOptClientArchType(data)
require.Error(t, err)
}
func TestOptClientArchTypeParseAndToBytes(t *testing.T) {
data := []byte{
0, 8, // EFI_XSCALE
}
opt, err := ParseOptClientArchType(data)
require.NoError(t, err)
require.Equal(t, data, opt.ToBytes())
}
func TestOptClientArchType(t *testing.T) {
opt := OptClientArchType{
ArchTypes: []iana.Arch{iana.EFI_ITANIUM},
}
require.Equal(t, OptionClientArchType, opt.Code())
require.Contains(t, opt.String(), "archtype=EFI Itanium", "String() should contain the correct ArchType output")
}
|
<filename>src/vuejsclient/ts/components/supervision/dashboard/item/SupervisionDashboardItemComponent.ts
import Component from 'vue-class-component';
import { Prop, Watch } from 'vue-property-decorator';
import Dates from '../../../../../../shared/modules/FormatDatesNombres/Dates/Dates';
import ModuleFormatDatesNombres from '../../../../../../shared/modules/FormatDatesNombres/ModuleFormatDatesNombres';
import ISupervisedItem from '../../../../../../shared/modules/Supervision/interfaces/ISupervisedItem';
import ISupervisedItemController from '../../../../../../shared/modules/Supervision/interfaces/ISupervisedItemController';
import SupervisionController from '../../../../../../shared/modules/Supervision/SupervisionController';
import VueComponentBase from '../../../../../ts/components/VueComponentBase';
import { ModuleSupervisionAction } from '../SupervisionDashboardStore';
import './SupervisionDashboardItemComponent.scss';
@Component({
template: require('./SupervisionDashboardItemComponent.pug'),
components: {}
})
export default class SupervisionDashboardItemComponent extends VueComponentBase {
@ModuleSupervisionAction
private set_selected_item: (selected_item: ISupervisedItem) => void;
@Prop()
private item: ISupervisedItem;
@Prop({ default: false })
private noclick: boolean;
private state_classname: string = 'STATE_UNKNOWN';
private fa_class_name: string = null;
private formatted_date: string = null;
private formatted_last_value: string = null;
@Watch('item', { immediate: true })
private onchange_item() {
this.set_state_classname();
this.set_fa_class_name();
this.set_formatted_date();
this.set_formatted_last_value();
}
private set_state_classname() {
if (!this.item) {
this.state_classname = "STATE_UNKNOWN";
return;
}
let state_classname: string = null;
switch (this.item.state) {
case SupervisionController.STATE_ERROR:
state_classname = "STATE_ERROR";
break;
case SupervisionController.STATE_ERROR_READ:
state_classname = "STATE_ERROR_READ";
break;
case SupervisionController.STATE_OK:
state_classname = "STATE_OK";
break;
case SupervisionController.STATE_PAUSED:
state_classname = "STATE_PAUSED";
break;
case SupervisionController.STATE_UNKOWN:
state_classname = "STATE_UNKOWN";
break;
case SupervisionController.STATE_WARN:
state_classname = "STATE_WARN";
break;
case SupervisionController.STATE_WARN_READ:
state_classname = "STATE_WARN_READ";
break;
default:
break;
}
this.state_classname = state_classname;
}
private set_fa_class_name() {
if (!this.state_classname) {
this.fa_class_name = "";
return;
}
let fa_class_name: string = null;
switch (this.state_classname) {
case "STATE_ERROR":
fa_class_name = "fa-exclamation-triangle";
break;
case "STATE_ERROR_READ":
fa_class_name = "fa-exclamation-triangle";
break;
case "STATE_OK":
fa_class_name = "fa-check";
break;
case "STATE_PAUSED":
fa_class_name = "fa-pause";
break;
case "STATE_UNKOWN":
fa_class_name = "fa-question";
break;
case "STATE_WARN":
fa_class_name = "fa-exclamation";
break;
case "STATE_WARN_READ":
fa_class_name = "fa-exclamation";
break;
default:
break;
}
this.fa_class_name = fa_class_name;
}
private set_formatted_date() {
if (!this.item) {
this.formatted_date = null;
return;
}
this.formatted_date = this.item.last_update ? Dates.format(this.item.last_update, ModuleFormatDatesNombres.FORMAT_YYYYMMDD_HHmmss) : "-";
}
private set_formatted_last_value() {
if (!this.item) {
this.formatted_last_value = null;
return;
}
this.formatted_last_value = this.item.last_value == null ? "-" : this.item.last_value.toLocaleString();
}
get supervised_item_controller(): ISupervisedItemController<any> {
return SupervisionController.getInstance().registered_controllers[this.item._type];
}
get router_to() {
return {
name: SupervisionController.ROUTE_NAME_DASHBOARD_ITEM,
params: {
dashboard_key: this.$route.params.dashboard_key,
supervised_item_vo_type: this.item._type,
supervised_item_vo_id: this.item.id.toString(),
}
};
}
} |
#!/bin/bash
if [ `pgrep led_process` > 0 ]; then
echo "Killing led_process"
kill `pgrep led_process`
fi
if [ `pgrep button_process` > 0 ]; then
echo "Killing button_process"
kill `pgrep button_process`
fi
ID=$(ipcs -s | grep `printf 0x"%08x\n" 1234` | awk -F' ' '{print $2}')
if [ $ID > 0 ]; then
echo "Removing semaphore ID=${ID}"
ipcrm -s $ID
fi
|
import heapq
def huffman_encode(data):
HEAP = []
ENCODING = dict()
FREQUENCY = dict()
# Frequency calculation
for item in data:
if item in FREQUENCY:
FREQUENCY[item] += 1
else:
FREQUENCY[item] = 1
# Create a heap
for key in FREQUENCY:
HEAP.append((FREQUENCY[key], key))
heapq.heapify(HEAP)
while len(HEAP) > 1:
node_1 = heapq.heappop(HEAP)
node_2 = heapq.heappop(HEAP)
for char in node_1[1]:
ENCODING[char] = '0' + ENCODING.get(char, "")
for char in node_2[1]:
ENCODING[char] = '1' + ENCODING.get(char, "")
heapq.heappush(HEAP, (node_1[0] + node_2[0], node_1[1] + node_2[1]))
# Encode the data
encoded_data = ""
for item in data:
encoded_data += ENCODING[item]
return encoded_data
data = [3, 4, 5, 6, 7, 8, 8]
encoded_data = huffman_encode(data)
print(encoded_data)
# Output: 1001000111 |
alias myssh="pbcopy < ~/.ssh/id_rsa.pub"
|
#!/bin/sh
# arm_attr_merge.sh -- test ARM attributes merging.
# Copyright (C) 2010-2017 Free Software Foundation, Inc.
# Written by Doug Kwan <dougkwan@google.com>
# This file is part of gold.
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street - Fifth Floor, Boston,
# MA 02110-1301, USA.
# This file goes with the assembler source files arm_attr_merge*.s
check()
{
file=$1
pattern=$2
found=`grep "$pattern" $file`
if test -z "$found"; then
echo "pattern \"$pattern\" not found in file $file."
exit 1
fi
}
# This is a bit crude.
check arm_attr_merge_6.stdout "Tag_MPextension_use: Allowed"
check arm_attr_merge_6r.stdout "Tag_MPextension_use: Allowed"
check arm_attr_merge_7.stdout "Tag_MPextension_use: Allowed"
exit 0
|
package models_test
import (
"encoding/json"
"testing"
"github.com/aaawoyucheng/wayne/src/backend/common"
"github.com/aaawoyucheng/wayne/src/backend/models"
)
func TestListApp(t *testing.T) {
apps, err := models.AppModel.List(&common.QueryParam{
PageNo: 1,
PageSize: 10,
Query: map[string]interface{}{
"namespace_id": 2,
"deleted": false,
},
}, true, 1)
if err != nil {
t.Error(err)
}
app, err := json.Marshal(apps)
if err != nil {
t.Error(err)
}
t.Log(string(app))
}
func TestCreatApp(t *testing.T) {
id, err := models.AppModel.Add(&models.App{
Name: "example",
Namespace: &models.Namespace{
Id: 1,
},
})
if err != nil {
t.Error(err)
}
t.Log(string(id))
}
|
CREATE OR REPLACE FUNCTION fn_untank_parent_txn
(
pbranch IN sttms_branch.branch_code%TYPE,
plcy IN cytms_ccy_defn.ccy_code%TYPE,
P_TRN_REF_NO IN ACTBS_DAILY_LOG.TRN_REF_NO%TYPE,
perrcode OUT ertbs_msgs.err_code%TYPE,
pdate IN DATE,
pparams OUT VARCHAR2
)
RETURN BOOLEAN IS
CURSOR
cr_tank(pbranch IN sttms_branch.branch_code%TYPE) IS
SELECT l.*, p.pc_start_date, p.pc_end_date
FROM actbs_daily_log l, sttms_period_codes p
WHERE balance_upd = 'T'
AND ac_branch = pbranch
AND p.period_code=l.period_code
AND p.fin_cycle=l.financial_cycle
And NVL(delete_stat,'X') <> 'D'
AND TRN_REF_NO =p_TRN_REF_NO
ORDER BY ac_no;
l_balrec sttms_account_bal_tov%ROWTYPE;
l_gl_flag sttms_bank.online_gl_update%TYPE;
err_upd EXCEPTION;
no NUMBER;
l_freq NUMBER;
l_period sttms_branch.current_period%TYPE;
l_cycle sttms_branch.current_cycle%TYPE;
l_locked BOOLEAN := FALSE;
l_retry NUMBER;
l_maxtry NUMBER :=16;
i actbs_daily_log%ROWTYPE ; --Oracle 8 change
BEGIN
-- Select flag from on line update
SELECT
online_gl_update
INTO
l_gl_flag
FROM sttms_bank;
--
-- select the current period and cycle
SELECT
current_period,
current_cycle
INTO
l_period,
l_cycle
FROM
sttms_branch
WHERE
branch_code = pbranch;
Debug.pr_debug('AC','The values selected are :
Online update: '||l_gl_flag||
'Current period :'||l_period||
'Current Cycle: '||l_cycle);
--
FOR DUMMY IN cr_tank(pbranch)
LOOP
BEGIN
SAVEPOINT save_upd;
l_retry:=0;
l_locked:=FALSE;
WHILE (l_locked = FALSE)
LOOP
BEGIN
SELECT * INTO i
FROM actbs_daily_log
WHERE
ac_entry_sr_no = DUMMY.ac_entry_sr_no
FOR UPDATE NOWAIT;
l_locked:=TRUE;
EXCEPTION
WHEN OTHERS THEN
IF SQLCODE=-54 THEN
l_retry := l_retry + 1;
IF l_retry = l_maxtry THEN
Debug.pr_debug('AC','Account already locked : Exitting after
'||TO_CHAR(l_retry)|| ' attempts');
perrcode:='AC-UPD02';
RETURN FALSE;
END IF;
ELSE
Debug.pr_debug('AC','Error '||SQLERRM||' while locking the account: '
||i.ac_branch ||' '||i.ac_no);
perrcode:='AC-UPD02';
RETURN FALSE;
END IF;
END;
END LOOP;
Debug.pr_debug('AC','Will untank the account :'||i.ac_no);
IF i.cust_gl <> 'G' THEN
IF NOT acpkss_eod.fn_untank_child(i,pdate,plcy) THEN
RAISE err_upd;
ELSE
UPDATE actbs_daily_log
SET
balance_upd = 'U',
trn_dt = pdate,
period_code = DECODE(SIGN(DUMMY.pc_end_date-i.txn_init_date),-1,period_code,l_period),
financial_cycle = DECODE(SIGN(DUMMY.pc_end_date-i.txn_init_date),-1,financial_cycle,l_cycle)
WHERE
ac_entry_sr_no = i.ac_entry_sr_no;
END IF;
ELSE
IF l_gl_flag = 'Y' THEN
UPDATE actbs_daily_log
SET
balance_upd = 'R',
trn_dt = pdate,
period_code = DECODE(SIGN(DUMMY.pc_end_date-i.txn_init_date),-1,period_code,l_period),
financial_cycle = DECODE(SIGN(DUMMY.pc_end_date-i.txn_init_date),-1,financial_cycle,l_cycle)
WHERE
ac_entry_sr_no = i.ac_entry_sr_no;
ELSE
UPDATE actbs_daily_log
SET
balance_upd = 'D',
trn_dt = pdate,
period_code = DECODE(SIGN(DUMMY.pc_end_date-i.txn_init_date),-1,period_code,l_period),
financial_cycle = DECODE(SIGN(DUMMY.pc_end_date-i.txn_init_date),-1,financial_cycle,l_cycle)
WHERE
ac_entry_sr_no = i.ac_entry_sr_no;
END IF;
END IF;
EXCEPTION
WHEN err_upd THEN
ROLLBACK TO save_upd;
END;
Debug.pr_debug('AC','Will commit now...');
COMMIT;
END LOOP;
Debug.pr_debug('AC','Finished all. Will exit successfully');
RETURN TRUE;
EXCEPTION WHEN OTHERS THEN
RETURN FALSE;
END fn_untank_parent_txn;
/
sho err
|
<filename>src/com/client/listener/ChatListener.java
package com.client.listener;
import com.client.ClientMain;
import com.client.window.WindowController;
import io.netty.channel.Channel;
import javax.swing.*;
import java.awt.*;
import java.awt.event.KeyEvent;
import java.awt.event.KeyListener;
/**
* Created by form on 2017-07-20.
*/
public class ChatListener implements KeyListener {
private String message;
public void keyPressed(KeyEvent event){
if(KeyEvent.VK_ENTER == event.getKeyCode()){
try{
JPanel panel = WindowController.getPanel();
for(Component component : panel.getComponents()){
String name = component.getName();
if("message".equals(name)){
JTextField message = (JTextField)component;
this.message = message.getText();
message.setText("");
break;
}
}
ClientMain client = ClientMain.getInstance();
Channel channel = client.getChannel();
if(!"".equals(message) && message != null){
channel.writeAndFlush(message);
}
}catch (Exception e){
e.printStackTrace();
}
}
}
public void keyTyped(KeyEvent e){
}
public void keyReleased(KeyEvent e){
}
}
|
<filename>src/templates/team-administrators.js
import React from "react";
// nodejs library that concatenates classes
import classnames from "classnames";
import {graphql} from 'gatsby'
import Layout from '../components/Layout'
// reactstrap components
import {
Modal,
Badge,
Button,
Card,
CardBody,
CardHeader,
CardTitle,
CardImg,
CardImgOverlay,
FormGroup,
Input,
InputGroupAddon,
InputGroupText,
InputGroup,
Container,
Row,
Col
} from "reactstrap";
// core components
import DemoNavbar from "../components/Navbars/DemoNavbar.jsx";
import CardsFooter from "../components/Footers/CardsFooter.jsx";
import PortfolioCardStyled from "../components/PortfolioCardStyled";
const clinicAdminData = {
data:[
{
id:0,
name:"<NAME>",
role:"Administrator",
image:"https://piano.uottawa.ca/mwc/img/raina.jpeg"
},
{
id:1,
name:"<NAME>",
role:"Administrator",
image:"https://piano.uottawa.ca/mwc/img/jeff.jpeg"
},
{
id:2,
name:"<NAME>",
role:"Web Developer",
image:"https://piano.uottawa.ca/mwc/img/lilian.jpeg"
}
]
}
const researchAdminData = {
data:[
{
id:0,
name:"<NAME>",
role:"Research Coordinator",
image:"https://piano.uottawa.ca/mwc/img/mikael.jpeg"
},
{
id:1,
name:"<NAME>",
role:"Administrator",
image:"https://piano.uottawa.ca/mwc/img/nicole.jpeg"
},
{
id:2,
name:"<NAME>",
role:"Technicial / Web Developer",
image:"https://piano.uottawa.ca/mwc/img/chen.jpeg"
}
]
}
class TeamAdministratorsTemplate extends React.Component {
state = {
exampleModal: false,
masterProgramModal: false,
fourCourseModal:false,
workshopsModal:false,
masterclassModal:false
};
toggleModal = state => {
this.setState({
[state]: !this.state[state]
});
};
componentDidMount() {
document.documentElement.scrollTop = 0;
document.scrollingElement.scrollTop = 0;
}
render() {
return (
<Layout>
<DemoNavbar/>
<main ref="main">
<div className="position-relative">
{/* shape Hero */}
<section className="section section-shaped ext-large" style={{
backgroundImage: 'linear-gradient(rgba(114, 105, 80, 0.37), rgba(0, 0, 0, 0.5)), url("https://piano.uottawa.ca/mwc/img/admin.jpeg")',
}}>
<Container className="py-lg-md d-flex">
<div className="col px-0">
<Row>
<Col lg="10">
<h1 className="display-3 text-white">
Administrators{" "}
</h1>
<h3 className="text-white">Our centre is supported by an enthusiastic team of administrators.</h3>
<p className="lead text-white">
All of our administrators are students and employees of the University of Ottawa who are passionate about musicians’ wellness.
</p>
</Col>
</Row>
</div>
</Container>
</section>
</div>
<div>
<section className="section section-lg bg-dark">
<Container >
<h1 className="display-5 text-center text-white">Clinic Administrators</h1>
<hr/>
<br/>
<Row className="justify-content-center hover-zoom">
{clinicAdminData.data.map(administrator =>
<Col md={"4"} xs={"7"}>
<PortfolioCardStyled name={administrator.name} image={administrator.image} qualification={administrator.role}/>
</Col>
)}
</Row>
</Container>
</section>
</div>
<div>
<section className="section section-lg bg-lighter">
<Container >
<h1 className="display-5 text-center">Research Administrators</h1>
<hr className="bg-dark"/>
<br/>
<Row className="justify-content-center hover-zoom" >
{researchAdminData.data.map(administrator =>
<Col md={"4"} xs={"7"}>
<PortfolioCardStyled name={administrator.name} image={administrator.image} qualification={administrator.role}/>
</Col>
)}
</Row>
</Container>
</section>
</div>
</main>
{/*<CardsFooter/>*/}
</Layout>
);
}
}
const TeamAdministrators = ({data}) => {
return (
<TeamAdministratorsTemplate/>
)
}
export default TeamAdministrators
|
#!/bin/bash
clear; ONLY=cncf GHA2DB_USE_ES_ONLY=1 GHA2DB_USE_ES=1 GHA2DB_SKIPTSDB=1 GHA2DB_SKIPPDB=1 ./devel/vars_all.sh
|
make
make install
|
ROOT=$(cd "$(dirname $0)"; pwd)
CLASS_PATH=$ROOT/target/scala-2.11/classes/:\
$HOME/.ivy2/cache/nu.pattern/opencv/jars/opencv-2.4.9-7.jar:\
$HOME/.ivy2/cache/org.scala-lang/scala-library/jars/scala-library-2.11.8.jar:\
$HOME/.ivy2/cache/org.scalanlp/breeze_2.11/jars/breeze_2.11-0.12.jar:\
$HOME/.ivy2/cache/org.scalanlp/breeze-macros_2.11/jars/breeze-macros_2.11-0.12.jar:\
$HOME/.ivy2/cache/com.github.fommil.netlib/core/jars/core-1.1.2.jar:\
$HOME/.ivy2/cache/net.sourceforge.f2j/arpack_combined_all/jars/arpack_combined_all-0.1.jar:\
$HOME/.ivy2/cache/net.sf.opencsv/opencsv/jars/opencsv-2.3.jar:\
$HOME/.ivy2/cache/com.github.rwl/jtransforms/jars/jtransforms-2.4.0.jar:\
$HOME/.ivy2/cache/junit/junit/jars/junit-4.8.2.jar:\
$HOME/.ivy2/cache/org.apache.commons/commons-math3/jars/commons-math3-3.2.jar:\
$HOME/.ivy2/cache/org.spire-math/spire_2.11/jars/spire_2.11-0.7.4.jar:\
$HOME/.ivy2/cache/org.spire-math/spire-macros_2.11/jars/spire-macros_2.11-0.7.4.jar:\
$HOME/.ivy2/cache/org.slf4j/slf4j-api/jars/slf4j-api-1.7.5.jar:\
$HOME/.ivy2/cache/com.chuusai/shapeless_2.11/bundles/shapeless_2.11-2.0.0.jar:\
$HOME/.ivy2/cache/org.scalanlp/breeze-natives_2.11/jars/breeze-natives_2.11-0.12.jar:\
$HOME/.ivy2/cache/com.github.fommil.netlib/netlib-native_ref-osx-x86_64/jars/netlib-native_ref-osx-x86_64-1.1-natives.jar:\
$HOME/.ivy2/cache/com.github.fommil.netlib/native_ref-java/jars/native_ref-java-1.1.jar:\
$HOME/.ivy2/cache/com.github.fommil/jniloader/jars/jniloader-1.1.jar:\
$HOME/.ivy2/cache/com.github.fommil.netlib/netlib-native_ref-linux-x86_64/jars/netlib-native_ref-linux-x86_64-1.1-natives.jar:\
$HOME/.ivy2/cache/com.github.fommil.netlib/netlib-native_ref-linux-i686/jars/netlib-native_ref-linux-i686-1.1-natives.jar:\
$HOME/.ivy2/cache/com.github.fommil.netlib/netlib-native_ref-win-x86_64/jars/netlib-native_ref-win-x86_64-1.1-natives.jar:\
$HOME/.ivy2/cache/com.github.fommil.netlib/netlib-native_ref-win-i686/jars/netlib-native_ref-win-i686-1.1-natives.jar:\
$HOME/.ivy2/cache/com.github.fommil.netlib/netlib-native_ref-linux-armhf/jars/netlib-native_ref-linux-armhf-1.1-natives.jar:\
$HOME/.ivy2/cache/com.github.fommil.netlib/netlib-native_system-osx-x86_64/jars/netlib-native_system-osx-x86_64-1.1-natives.jar:\
$HOME/.ivy2/cache/com.github.fommil.netlib/native_system-java/jars/native_system-java-1.1.jar:\
$HOME/.ivy2/cache/com.github.fommil.netlib/netlib-native_system-linux-x86_64/jars/netlib-native_system-linux-x86_64-1.1-natives.jar:\
$HOME/.ivy2/cache/com.github.fommil.netlib/netlib-native_system-linux-i686/jars/netlib-native_system-linux-i686-1.1-natives.jar:\
$HOME/.ivy2/cache/com.github.fommil.netlib/netlib-native_system-linux-armhf/jars/netlib-native_system-linux-armhf-1.1-natives.jar:\
$HOME/.ivy2/cache/com.github.fommil.netlib/netlib-native_system-win-x86_64/jars/netlib-native_system-win-x86_64-1.1-natives.jar:\
$HOME/.ivy2/cache/com.github.fommil.netlib/netlib-native_system-win-i686/jars/netlib-native_system-win-i686-1.1-natives.jar:\
$HOME/.ivy2/cache/org.scalanlp/breeze-viz_2.11/jars/breeze-viz_2.11-0.12.jar:\
$HOME/.ivy2/cache/jfree/jcommon/jars/jcommon-1.0.16.jar:\
$HOME/.ivy2/cache/jfree/jfreechart/jars/jfreechart-1.0.13.jar:\
$HOME/.ivy2/cache/org.apache.xmlgraphics/xmlgraphics-commons/jars/xmlgraphics-commons-1.3.1.jar:\
$HOME/.ivy2/cache/commons-io/commons-io/jars/commons-io-1.3.1.jar:\
$HOME/.ivy2/cache/commons-logging/commons-logging/jars/commons-logging-1.0.4.jar:\
$HOME/.ivy2/cache/com.lowagie/itext/jars/itext-2.1.5.jar:\
$HOME/.ivy2/cache/bouncycastle/bcmail-jdk14/jars/bcmail-jdk14-138.jar:\
$HOME/.ivy2/cache/bouncycastle/bcprov-jdk14/jars/bcprov-jdk14-138.jar
PYTHON_DIR_PATH=$ROOT/python
IMG_PATH=$ROOT/images/img/flower.png
OUT_PATH=$ROOT/images/pencil_result.jpg
TEXTURE_IMG_PATH=$ROOT/images/textures/texture.jpg
# set to 0 generates the pencil drawing,
# set to 1 generates the colourful pencil drawing
IS_DRAW_COLOR=1
# set to 1 will output the middle results
# set to 0 just output the final result
IS_SHOW_STEP=1
java -Xmx4G -cp $CLASS_PATH \
-Djava.library.path=$LIBRARY_PATH \
PencilStyle $IMG_PATH $OUT_PATH $TEXTURE_IMG_PATH $IS_DRAW_COLOR $PYTHON_DIR_PATH $IS_SHOW_STEP
|
<gh_stars>0
const focus = (target: HTMLElement) => {
if (!document.activeElement || document.activeElement !== target) {
target.focus();
}
};
const isHTMLTextAreaElement = (
element: HTMLElement | HTMLTextAreaElement
): element is HTMLTextAreaElement => {
return element.contentEditable !== "true";
};
const caret = (element: HTMLElement | HTMLTextAreaElement) => {
const isContentEditable = element.contentEditable === "true";
return {
get: () => {
//textarea
if (isHTMLTextAreaElement(element)) {
return element.selectionStart;
}
focus(element);
const selection = window.getSelection();
if (!selection) return;
const range1 = selection.getRangeAt(0);
const range2 = range1.cloneRange();
range2.selectNodeContents(element);
range2.setEnd(range1.endContainer, range1.endOffset);
return range2.toString().length;
},
set: (position: number) => {
// move to last
if (position === -1) {
if (isHTMLTextAreaElement(element)) {
position = element.value.length;
} else {
position = element.innerText.length;
}
}
//textarea
if (isHTMLTextAreaElement(element)) {
element.setSelectionRange(position, position);
focus(element);
}
//contenteditable
else {
focus(element);
window.getSelection()?.collapse(element.firstChild, position);
}
},
};
};
export default caret;
|
export default (url: string): string => {
const result = /^(https:\/\/(?:www\.)?flickr\.com\/.*?)(?:\/with\/\d+\/?|\/in\/[^\s/]+\/?)?$/.exec(
url
)
return result ? result[1] : url
}
|
#!/bin/bash -e
# Upserts awardees, organizations, and/or sites from CSV input files in the data dir
USAGE="tools/import_organizations.sh [--account <USER>@pmi-ops.org --project <PROJECT>] [--dry_run]"
while true; do
case "$1" in
--account) ACCOUNT=$2; shift 2;;
--project) PROJECT=$2; shift 2;;
--dry_run) DRY_RUN=--dry_run; shift 1;;
--use_fixture_data) USE_FIXTURES=true; shift 1;;
-- ) shift; break ;;
* ) break ;;
esac
done
if [ -z "${ACCOUNT}" ] && [ "${PROJECT}" ];
then
echo "Usage: $USAGE"
exit 1
fi
TMP_GEOCODE_DIR=$(mktemp -d)
TMP_GEOCODE_INFO_FILE=${TMP_GEOCODE_DIR}/geocode_key.json
function cleanup {
:
}
function get_geocode_key {
echo "Getting geocode api key ..."
(tools/install_config.sh --key geocode_key --account "${ACCOUNT}" \
--project "pmi-drc-api-test" --config_output "$TMP_GEOCODE_INFO_FILE")
export API_KEY=$(cat $TMP_GEOCODE_INFO_FILE | python -c 'import json,sys;obj=json.load(sys.stdin);print(obj["'api_key'"])')
}
CREDS_ACCOUNT="${ACCOUNT}"
if [ -z "${ACCOUNT}" ]
then
echo "Using stub geocoding when --account is not specified"
GEOCODE_FLAG=--stub_geocoding
else
get_geocode_key
fi
EXTRA_ARGS="$@"
if [ "${PROJECT}" ]
then
echo "Getting credentials for ${PROJECT}..."
source tools/auth_setup.sh
run_cloud_sql_proxy
set_db_connection_string
EXTRA_ARGS+=" --creds_file ${CREDS_FILE} --instance ${INSTANCE} --project ${PROJECT}"
else
if [ -z "${DB_CONNECTION_STRING}" ]
then
source tools/setup_local_vars.sh
set_local_db_connection_string
fi
fi
source tools/set_path.sh
if [[ ${USE_FIXTURES} = true ]];
then DATA_DIR=test/test-data/fixtures;
else DATA_DIR=data;
fi
python tools/import_organizations.py \
--awardee_file ${DATA_DIR}/awardees.csv \
--organization_file ${DATA_DIR}/organizations.csv \
--site_file ${DATA_DIR}/sites.csv \
$EXTRA_ARGS $DRY_RUN $GEOCODE_FLAG
function finish {
cleanup
rm -rf ${TMP_GEOCODE_DIR}
rm -f ${TMP_GEOCODE_INFO_FILE}
}
trap finish EXIT
|
<filename>beginer/3998202.cpp
// 输入:5
//
//输出:
//
// A
// ABA
// ABCBA
// ABCDCBA
// ABCDEDCBA
//
//以下程序实现了这一功能,请你填补空白处内容:
//
//#include <stdio.h>
//#include <iostream>
//using namespace std;
//int main()
//{
// int N;
// cin >> N;
// for (int i = 0; i < N; i++)
// {
// for (int j = 0; j < N - i; j++)
// printf(" ");
// ______________________;
// printf("\n");
// }
// return 0;
//}
//
// Created by 鹄思鹄想_bit森 on 2022/5/4.
#include <stdio.h>
#include <iostream>
using namespace std;
int main()
{
int N;
cin >> N;
for (int i = 0; i < N; i++)
{
for (int j = 0; j < N - i; j++)
printf(" ");
for (int j = 0; j < i; j++)
printf("%c", (char)(j + 'A'));
for (int j = i; j >= 0; j--)
printf("%c", (char)(j + 'A'));
printf("\n");
}
return 0;
}
|
<reponame>altermarkive/interview-training
package hackerrank.sherlock_and_array;
/**
* https://www.hackerrank.com/challenges/sherlock-and-array
*/
public final class SolutionCore {
private SolutionCore() {
}
protected static boolean equilibrium(final int[] a) {
int[] left = new int[a.length];
int[] right = new int[a.length];
for (int i = 1; i < a.length; i++) {
left[i] = left[i - 1] + a[i - 1];
right[a.length - 1 - i] = right[a.length - i] + a[a.length - i];
}
for (int i = 0; i < a.length; i++) {
if (left[i] == right[i]) {
return true;
}
}
return false;
}
}
|
def print_dict(my_dict):
print(my_dict.items())
print_dict(my_dict) |
#!/usr/bin/env bash
### every exit != 0 fails the script
set -e
set -u
echo "Install noVNC - HTML5 based VNC viewer"
mkdir -p $NO_VNC_HOME/utils/websockify
wget -O- https://github.com/novnc/noVNC/archive/v1.0.0.tar.gz | tar xz --strip 1 -C $NO_VNC_HOME
# use older version of websockify to prevent hanging connections on offline containers, see https://github.com/ConSol/docker-headless-vnc-container/issues/50
wget -O- https://github.com/novnc/websockify/archive/v0.9.0.tar.gz | tar xz --strip 1 -C $NO_VNC_HOME/utils/websockify
chmod +x -v $NO_VNC_HOME/utils/*.sh
cp $INST_SCRIPTS/vnc_lite.html $NO_VNC_HOME/index.html
|
<gh_stars>0
import React from 'react'
import { shallow } from 'enzyme'
import { AddToLayoutButton } from '../AddToLayoutButton'
describe('The AddToLayoutButton component ', () => {
let props
let shallowButton
const getShallowAddToLayoutButton = () => {
if (!shallowButton) {
shallowButton = shallow(<AddToLayoutButton {...props} />)
}
return shallowButton
}
// TODO remove
console.log('getShallowAddToLayoutButton', getShallowAddToLayoutButton)
beforeEach(() => {
props = {
classes: {},
closeDialog: jest.fn(),
dimensionIdsInLayout: ['dx', 'pe', 'ou'],
dialogId: '',
onAddDimension: jest.fn(),
ui: { type: 'COLUMN' },
}
shallowButton = undefined
})
it('new test below must be fixed', () => {
expect(true).toBe(true)
})
// TODO: Add new tests, https://jira.dhis2.org/browse/DHIS2-7809
// it('renders an update button if dialogid exists in layout', () => {
// props.dialogId = 'dx';
// const button = getShallowAddToLayoutButton();
// expect(button.find('button').attr('data-test')).toEqual(
// 'update-button'
// );
// });
// deprecated tests
/*it('renders two buttons, (DropDownIcon and "Add to series") if state buttonType is equal to -1 ', () => {
const button = getShallowAddToLayoutButton();
button.setState({ buttonType: -1 });
const fragmentWrapper = button.find('div');
expect(fragmentWrapper.children().length).toBeGreaterThan(1);
});
it('renders only an "Add to filter" button if current chart type is year on year', () => {
props.layoutType = 'YEAR_OVER_YEAR_LINE';
const button = getShallowAddToLayoutButton();
button.setState({ buttonType: -1 });
const addToFilterButton = button.find(Button).first();
expect(addToFilterButton.find('div').length).toEqual(0);
expect(addToFilterButton.length).toEqual(1);
});*/
})
|
/*
* Copyright 2017 Wultra s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.getlime.security.powerauth.lib.nextstep.model.entity.enumeration;
/**
* Enumeration representing hashing algorithms.
*
* @author <NAME>, <EMAIL>
*/
public enum HashAlgorithm {
/**
* Algorithm argon2d.
*/
ARGON_2D("argon2d", 0),
/**
* Algorithm argon2i.
*/
ARGON_2I("argon2i", 1),
/**
* Algorithm argon2id.
*/
ARGON_2ID("argon2id", 2);
private final String name;
private final int id;
/**
* Hash algorithm constructor.
* @param name Algorithm name for Modular Crypt Format.
* @param id Algorithm ID in Bouncy Castle library.
*/
HashAlgorithm(String name, int id) {
this.name = name;
this.id = id;
}
/**
* Get algorithm name for Modular Crypt Format.
* @return Algorithm name.
*/
public String getName() {
return name;
}
/**
* Get algorithm ID in Bouncy Castle library.
* @return Algorithm ID.
*/
public int getId() {
return id;
}
}
|
#!/bin/bash
#set python executable
PYTHON_BIN=/home/user1/pythonenv/python27/scripts/python
# base path, it is the parent of root package path
BASE_PATH=/home/user1/trunk/workspace/Edwin
# set your python script file. Please be noticed to trim the tailed space of file name
MY_PY_SCRIPT=runserver.py
#=================================
# do not change the following code
#=================================
PYTHONPATH=$BASE_PATH
SCRIPT_PATH=$BASE_PATH/edwinServer/web
BIN_PATH=$BASE_PATH/edwinServer/bin
LOCK_FILE=$BIN_PATH/locks/$MY_PY_SCRIPT.lock
MY_PY_FILE=$SCRIPT_PATH/$MY_PY_SCRIPT
cnt=`ls $LOCK_FILE|wc -l`
if [ ${cnt} -lt 1 ]
then
touch $LOCK_FILE
$PYTHON_BIN $MY_PY_FILE
rm $LOCK_FILE
fi
|
#!/bin/bash
MAIN_DIR=${0%/*}
cd $MAIN_DIR/..
TARGET_CODE=las.pytorch/eval.py
MODEL_PATH=models
if [ ! -f $TARGET_CODE ]; then
echo "[ERROR] TARGET_CODE($TARGET_CODE) not found."
exit
fi
if [ ! -d $MODEL_PATH ]; then
mkdir $MODEL_PATH
fi
if [ ! -d $LOG_PARENT_PATH ]; then
mkdir $LOG_PARENT_PATH
fi
################################################################
## Careful while modifying lines above.
################################################################
DATA=$1
if [ -n {$2} ]; then
TEST_FILE=data/${DATA}/test_${DATA}.json
else
TEST_FILE=data/${DATA}/test_${DATA}_few.json
fi
LABEL_FILE=data/kor_syllable.json
if [ ${DATA} = "AIhub" ]; then
DATASET_PATH=data/${DATA}/KsponSpeech
else
DATASET_PATH=data/${DATA}/clean
fi
CUDA_DEVICE_ID=1
# Default
RNN_TYPE=LSTM
# LAS
ENCODER_LAYERS=3
ENCODER_SIZE=512
DECODER_LAYERS=2
DECODER_SIZE=512
GPU_SIZE=1
CPU_SIZE=4
################################################################
## Careful while modifying lines below.
################################################################
CUDA_VISIBLE_DEVICES=$CUDA_DEVICE_ID \
python -u $TARGET_CODE \
--num_workers $CPU_SIZE \
--num_gpu $GPU_SIZE \
--rnn-type $RNN_TYPE \
--encoder_layers $ENCODER_LAYERS --encoder_size $ENCODER_SIZE \
--decoder_layers $DECODER_LAYERS --decoder_size $DECODER_SIZE \
--test-file-list $TEST_FILE \
--labels-path $LABEL_FILE \
--dataset-path $DATASET_PATH \
--model-path models/final.pth
|
<reponame>zhangyut/wolf<filename>Billiard_2D/app/src/main/java/com/bn/d2/bill/Cue.java
package com.bn.d2.bill;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Matrix;
import android.graphics.Paint;
public class Cue {
float x;
float y;
float rotateX;
float rotateY;
private float angdeg=0;
float width;
float height;
float disWithBall=Constant.DIS_WITH_BALL;
Bitmap bitmap;
Ball mainBall;
private boolean showCueFlag=true;
private final float angleSpanSlow=0.2f;
private final float angleSpanFast=1f;
private boolean aimFlag=true;
private final float lineLength=Table.tableAreaWidth;
private final float backSpan=3;
private final float forwardSpan=10;
private final float maxDis=50;
private float span=backSpan;
private boolean showingAnimFlag=false;
public Cue(Bitmap bitmap,Ball mainBall)
{
this.bitmap=bitmap;
this.mainBall=mainBall;
this.width=bitmap.getWidth();
this.height=bitmap.getHeight();
}
public void drawSelf(Canvas canvas,Paint paint)
{
if(!showCueFlag){
return;
}
this.rotateX=this.width+this.disWithBall+Ball.r;
this.rotateY=this.height/2;
x=mainBall.getX()-width-disWithBall;
y=mainBall.getY()+Ball.r-height/2;
Matrix m1=new Matrix();
m1.setTranslate(x+Constant.X_OFFSET,y+Constant.Y_OFFSET);
Matrix m2=new Matrix();
m2.setRotate(angdeg, rotateX, rotateY);
Matrix mz=new Matrix();
mz.setConcat(m1, m2);
canvas.drawBitmap(bitmap, mz,paint);
canvas.save();
canvas.clipRect(Table.lkx+Constant.X_OFFSET, Table.ady+Constant.Y_OFFSET, Table.efx+Constant.X_OFFSET, Table.jgy+Constant.Y_OFFSET);
float angrad=(float) Math.toRadians(angdeg);
float startX=(float) (mainBall.getX()+Constant.X_OFFSET+Ball.r+Ball.r*Math.cos(angrad));
float startY=(float) (mainBall.getY()+Constant.Y_OFFSET+Ball.r+Ball.r*Math.sin(angrad));
float stopX=startX+(float)(lineLength*Math.cos(angrad));
float stopY=startY+(float)(lineLength*Math.sin(angrad));
paint.setColor(Color.YELLOW);
paint.setAlpha(240);
canvas.drawLine(startX, startY, stopX, stopY, paint);
canvas.restore();
paint.setAlpha(255);
}
public void calcuAngle(float pressX,float pressY)
{
float dirX=pressX-(mainBall.getX()+Ball.r+Constant.X_OFFSET);
float dirY=pressY-(mainBall.getY()+Ball.r+Constant.Y_OFFSET);
if(!aimFlag){
dirX = -dirX;
dirY = -dirY;
}
if(dirY>=0)
{
angdeg=(float) Math.toDegrees((Math.atan(-dirX/dirY)+Math.PI/2));
}
else if(dirY<0)
{
angdeg=(float) Math.toDegrees((Math.atan(-dirX/dirY)+Math.PI*3/2));
}
}
public void rotateLeftSlowly(){
angdeg+=angleSpanSlow;
}
public void rotateRightSlowly(){
angdeg-=angleSpanSlow;
}
public void rotateLeftFast(){
angdeg+=angleSpanFast;
}
public void rotateRightFast(){
angdeg-=angleSpanFast;
}
public float changeDisWithBall()
{
if(disWithBall>=maxDis){
span=-forwardSpan;
}
disWithBall+=span;
return disWithBall;
}
public void resetAnimValues(){
disWithBall=Constant.DIS_WITH_BALL;
span=backSpan;
}
public float getAngle() {
return angdeg;
}
public boolean isShowCueFlag() {
return showCueFlag;
}
public void setShowCueFlag(boolean showCueFlag) {
this.showCueFlag = showCueFlag;
}
public boolean isAimFlag() {
return aimFlag;
}
public void setAimFlag(boolean aimFlag) {
this.aimFlag = aimFlag;
}
public boolean isShowingAnimFlag() {
return showingAnimFlag;
}
public void setShowingAnimFlag(boolean showingAnimFlag) {
this.showingAnimFlag = showingAnimFlag;
}
}
|
#!/bin/sh
# Installs the AMQ distribution into the filesystem.
set -e
SOURCES_DIR=/tmp/artifacts/
DISTRIBUTION_VERSION="jboss-a-mq-6.3.0.redhat-416"
ACTIVEMQ_VERSION="apache-activemq-5.11.0.redhat-630416"
AMQ_HOME=/opt/amq
unzip -q "$SOURCES_DIR/${DISTRIBUTION_VERSION}.zip"
ls $SOURCES_DIR
pushd $DISTRIBUTION_VERSION/extras > /dev/null
unzip -q "${ACTIVEMQ_VERSION}-bin.zip"
mv $ACTIVEMQ_VERSION $AMQ_HOME
popd > /dev/null
rm -rf $DISTRIBUTION_VERSION
|
package Hamming_Distance;
public class Solution {
public int hammingDistance(int x, int y) {
return Integer.bitCount(x ^ y);
}
public static void main(String[] args) {
Solution s = new Solution();
}
} |
<reponame>zhangxinlei-cn/pyfbx
import pytest
import numpy as np
import tatsu
import logging
class TestGrammar():
class FBXSemantics(object):
logger = logging.getLogger(__name__)
def int_64(self, ast):
self.logger.debug(ast.value)
return np.frombuffer(ast.value, dtype=np.int_)
def int_32(self, ast):
print(ast.value)
return np.frombuffer(ast.value, dtype=np.intc)
def int_8(self, ast):
self.logger.debug(ast.value)
return np.frombuffer(ast.value, dtype=np.byte)
def char(self, ast):
self.logger.debug(ast.value)
return np.frombuffer(ast.value, dtype=np.char)
def _default(self, ast, *args, **kwargs):
pass
logger = logging.getLogger(__name__)
def test_grammar(self):
grammar = ""
with open('grammar/fbx_binary.ebnf') as ebnf:
grammar = ebnf.read()
parser = tatsu.compile(grammar, semantics=self.FBXSemantics())
with open('tests/resources/Zombie.binary.fbx', 'rb') as zombie:
model = parser.parse(zombie.read(32).decode(encoding='ascii', errors='ignore'))
print(model)
if __name__ == "__main__":
TestGrammar().test_grammar() |
<filename>src/main/scala/ru/tinkoff/gatling/amqp/checks/AmqpCheckSupport.scala
package ru.tinkoff.gatling.amqp.checks
import java.nio.charset.Charset
import io.gatling.commons.validation._
import io.gatling.core.check._
import io.gatling.core.check.extractor.bytes.BodyBytesCheckType
import io.gatling.core.check.extractor.string.BodyStringCheckType
import io.gatling.core.check.extractor.xpath.XmlParsers
import io.gatling.core.json.JsonParsers
import ru.tinkoff.gatling.amqp.AmqpCheck
import ru.tinkoff.gatling.amqp.checks.AmqpResponseCodeCheckBuilder.{AmqpMessageCheckType, ExtendedDefaultFindCheckBuilder, _}
import ru.tinkoff.gatling.amqp.request.AmqpProtocolMessage
import scala.annotation.implicitNotFound
import scala.util.Try
trait AmqpCheckSupport {
def messageCheck: AmqpMessageCheck.type = AmqpMessageCheck
val responseCode: ExtendedDefaultFindCheckBuilder[AmqpMessageCheckType, AmqpProtocolMessage, String] = ResponseCode
@implicitNotFound("Could not find a CheckMaterializer. This check might not be valid for AMQP.")
implicit def checkBuilder2AmqpCheck[A, P, X](checkBuilder: CheckBuilder[A, P, X])(
implicit materializer: CheckMaterializer[A, AmqpCheck, AmqpProtocolMessage, P]): AmqpCheck =
checkBuilder.build(materializer)
@implicitNotFound("Could not find a CheckMaterializer. This check might not be valid for AMQP.")
implicit def validatorCheckBuilder2AmqpCheck[A, P, X](validatorCheckBuilder: ValidatorCheckBuilder[A, P, X])(
implicit materializer: CheckMaterializer[A, AmqpCheck, AmqpProtocolMessage, P]): AmqpCheck =
validatorCheckBuilder.exists
@implicitNotFound("Could not find a CheckMaterializer. This check might not be valid for AMQP.")
implicit def findCheckBuilder2AmqpCheck[A, P, X](findCheckBuilder: FindCheckBuilder[A, P, X])(
implicit materializer: CheckMaterializer[A, AmqpCheck, AmqpProtocolMessage, P]): AmqpCheck =
findCheckBuilder.find.exists
implicit def amqpXPathMaterializer(implicit xmlParsers: XmlParsers): AmqpXPathCheckMaterializer =
new AmqpXPathCheckMaterializer(xmlParsers)
implicit def amqpJsonPathMaterializer(implicit jsonParsers: JsonParsers): AmqpJsonPathCheckMaterializer =
new AmqpJsonPathCheckMaterializer(jsonParsers)
implicit def amqpBodyStringMaterializer: AmqpCheckMaterializer[BodyStringCheckType, String] =
new CheckMaterializer[BodyStringCheckType, AmqpCheck, AmqpProtocolMessage, String] {
override protected def preparer: Preparer[AmqpProtocolMessage, String] = replyMessage => {
val bodyCharset = Try(Charset.forName(replyMessage.amqpProperties.getContentEncoding))
.getOrElse(Charset.defaultCharset())
if (replyMessage.payload.length > 0) {
new String(replyMessage.payload, bodyCharset).success
} else "".success
}
override protected def specializer: Specializer[AmqpCheck, AmqpProtocolMessage] = identity
}
implicit def amqpBodyByteMaterializer: AmqpCheckMaterializer[BodyBytesCheckType, Array[Byte]] =
new CheckMaterializer[BodyBytesCheckType, AmqpCheck, AmqpProtocolMessage, Array[Byte]] {
override protected def preparer: Preparer[AmqpProtocolMessage, Array[Byte]] = replyMessage => {
if (replyMessage.payload.length > 0) {
replyMessage.payload.success
} else Array.emptyByteArray.success
}
override protected def specializer: Specializer[AmqpCheck, AmqpProtocolMessage] = identity
}
implicit val httpStatusCheckMaterializer: AmqpCheckMaterializer[AmqpMessageCheckType, AmqpProtocolMessage] =
new AmqpCheckMaterializer[AmqpMessageCheckType, AmqpProtocolMessage] {
override val specializer: Specializer[AmqpCheck, AmqpProtocolMessage] = identity
override val preparer: Preparer[AmqpProtocolMessage, AmqpProtocolMessage] = _.success
}
}
|
func voteOnProposal(networkId: String, proposalId: String, vote: String) -> EventLoopFuture<Void> {
// Assuming the existence of a client object and its execute method
let input: [String: String] = ["networkId": networkId, "proposalId": proposalId, "vote": vote]
let serviceConfig = ServiceConfig(...) // Initialize service configuration
let eventLoop = ... // Initialize event loop
let logger = ... // Initialize logger
return client.execute(operation: "VoteOnProposal", path: "/networks/\(networkId)/proposals/\(proposalId)/votes", httpMethod: .POST, serviceConfig: serviceConfig, input: input, on: eventLoop, logger: logger)
} |
#!/bin/sh
# request Bourne shell as shell for job
#$ -S /bin/sh
# The name of the job, can be anything, simply used when displaying the list of running jobs
#$ -N mcce
# assume current working directory as paths
#$ -cwd
# Giving the name of the output log file
#$ -o run.log
# log of running errors
#$ -e error.log
# Now comes the commands to be executed
#/home/cai/mcce3.5_enum_ms/mcce
../../Stable-MCCE/bin/mcce
|
package com.github.maracas.forges.github;
import com.github.maracas.forges.Commit;
import com.github.maracas.forges.ForgeException;
import com.github.maracas.forges.PullRequest;
import com.github.maracas.forges.Repository;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.kohsuke.github.GitHub;
import org.kohsuke.github.GitHubBuilder;
import java.io.IOException;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertThrows;
class GitHubForgeTest {
GitHubForge github;
@BeforeEach
void setUp() {
try {
GitHub gh = GitHubBuilder.fromEnvironment().build();
github = new GitHubForge(gh);
} catch (IOException e) {
e.printStackTrace();
}
}
@Test
void fetchRepository_unknown() {
assertThrows(ForgeException.class, () -> github.fetchRepository("alien-tools", "unknown"));
}
@Test
void fetchRepository_valid() {
Repository repo = github.fetchRepository("alien-tools", "maracas");
assertEquals("alien-tools", repo.owner());
assertEquals("maracas", repo.name());
assertEquals("alien-tools/maracas", repo.fullName());
assertEquals("https://github.com/alien-tools/maracas.git", repo.remoteUrl());
assertEquals("main", repo.branch());
}
@Test
void fetchRepository_branch_valid() {
Repository repo = github.fetchRepository("alien-tools", "maracas", "main");
assertEquals("alien-tools", repo.owner());
assertEquals("maracas", repo.name());
assertEquals("alien-tools/maracas", repo.fullName());
assertEquals("https://github.com/alien-tools/maracas.git", repo.remoteUrl());
assertEquals("main", repo.branch());
}
@Test
void fetchRepository_branch_unknown() {
assertThrows(ForgeException.class, () -> github.fetchRepository("alien-tools", "maracas", "unknown"));
}
@Test
void fetchPullRequest_unknown() {
Repository repo = github.fetchRepository("alien-tools", "comp-changes");
assertThrows(ForgeException.class, () -> github.fetchPullRequest(repo, -1));
}
@Test
void fetchPullRequest_opened() {
Repository repo = github.fetchRepository("alien-tools", "comp-changes");
PullRequest pr = github.fetchPullRequest(repo, 2);
assertEquals(2, pr.number());
assertEquals(new Commit(repo, "43463c9c73933ae0e791dbf8d1d6e152101a4ba9"), pr.head());
assertEquals(new Commit(repo, "00dde47b0bf583c4a9320e2968d5fbad0af81265"), pr.prBase());
assertEquals("main", pr.baseBranch());
assertEquals("prepare-v2", pr.headBranch());
assertEquals("alien-tools", pr.repository().owner());
assertEquals("comp-changes", pr.repository().name());
assertEquals("main", pr.repository().branch());
}
@Test
void fetchPullRequest_closed() {
Repository repo = github.fetchRepository("INRIA", "spoon");
PullRequest pr = github.fetchPullRequest(repo, 4625);
assertEquals(4625, pr.number());
assertEquals(new Commit(repo, "1ef7b095d58ff671b74f5eef7186c96aa573304e"), pr.head());
assertEquals(new Commit(repo, "3f3557e2ec95d4a2a552bdf0cc322c4e6d054725"), pr.prBase());
assertEquals("master", pr.baseBranch());
assertEquals("regression-resource", pr.headBranch());
assertEquals("INRIA", pr.repository().owner());
assertEquals("spoon", pr.repository().name());
assertEquals("master", pr.repository().branch());
}
@Test
void fetchCommit_unknown() {
assertThrows(ForgeException.class, () -> github.fetchCommit("alien-tools", "maracas", "unknown"));
}
@Test
void fetchCommit_valid() {
Commit c = github.fetchCommit("alien-tools", "maracas", "655f99bad85435c145fc816018962dc7644edb1f");
assertEquals("655f99bad85435c145fc816018962dc7644edb1f", c.sha());
assertEquals("alien-tools", c.repository().owner());
assertEquals("maracas", c.repository().name());
assertEquals("alien-tools/maracas", c.repository().fullName());
assertEquals("https://github.com/alien-tools/maracas.git", c.repository().remoteUrl());
assertEquals("main", c.repository().branch());
}
}
|
<gh_stars>0
import en from './assets/locales/en/messages.json';
import { Locale } from './definitions';
import { pathLocalePrefixRegex } from './utils';
export const MESSAGES_ALL: any = {
[Locale.EN]: en,
};
export class DocsLocalization {
private readonly locale: string;
private readonly bundle: { [id: string]: string };
constructor() {
const regexRes = pathLocalePrefixRegex.exec(window.location.pathname);
const language = regexRes ? regexRes[1] : null;
this.locale = language && Boolean(MESSAGES_ALL[language]) ? language : 'en';
this.bundle = MESSAGES_ALL[this.locale];
}
getLocale = () => this.locale;
getString = (id: string) => this.bundle[id] || (en as any)[id];
}
export const l10n = new DocsLocalization();
|
TERMUX_PKG_HOMEPAGE=https://gitlab.com/opennota/findimagedupes
TERMUX_PKG_DESCRIPTION="Find visually similar or duplicate images"
TERMUX_PKG_LICENSE="GPL-3.0"
TERMUX_PKG_MAINTAINER="@termux"
TERMUX_PKG_VERSION=0.20190114
TERMUX_PKG_REVISION=11
_COMMIT=237ed2ef4bbb91c79eee0f5ee84a1adad9c014ff
TERMUX_PKG_SRCURL=https://gitlab.com/opennota/findimagedupes/-/archive/${_COMMIT}/findimagedupes-${_COMMIT}.tar.gz
TERMUX_PKG_SHA256=7eb4fbab38c8c1965dafd1d0fddbfac58ba6e1a3d52cd1220df488a0a338abb0
TERMUX_PKG_DEPENDS="file, libc++, libjpeg-turbo, libpng, libtiff"
TERMUX_PKG_CONFLICTS="findimagedupes"
TERMUX_PKG_REPLACES="findimagedupes"
termux_step_make() {
termux_setup_golang
export GOPATH=$TERMUX_PKG_BUILDDIR
export CGO_CFLAGS="$CFLAGS $CPPFLAGS -I$TERMUX_PREFIX/include/libpng16 -D__GLIBC__"
export CGO_CXXFLAGS="$CXXFLAGS $CPPFLAGS -I$TERMUX_PREFIX/include/libpng16 -D__GLIBC__"
export CGO_LDFLAGS="$LDFLAGS"
mkdir -p "$GOPATH"/src/gitlab.com/opennota
ln -sf "$TERMUX_PKG_SRCDIR" "$GOPATH"/src/gitlab.com/opennota/findimagedupes
cd "$GOPATH"/src/gitlab.com/opennota/findimagedupes
go build .
}
termux_step_make_install() {
install -Dm700 \
"$GOPATH"/src/gitlab.com/opennota/findimagedupes/findimagedupes \
"$TERMUX_PREFIX"/bin/findimagedupes
}
|
SELECT StudentID, Marks
FROM Students
WHERE Marks > (SELECT AVG(Marks) FROM Students); |
/*:
* @plugindesc This plugin adds undo functionality to Yanfly Grid Free Doodads plugin <GFDCtrlZAddon>
* @author biud436
*
* @param maxCounts
* @type number
* @desc
* @default 5
*
* @help
* Ctrl + Z - Undo
* Ctrl + Alt - Grid Lock
* =============================================================================
* Version Log
* =============================================================================
* 2016.12.05 (v1.0.0) - First Release.
* 2016.12.06 (v1.0.1) - Added New Key and description.
*/
/*:ko
* @plugindesc YEP_GridFreeDoodads 플러그인에 되돌리기 기능을 추가합니다. <GFDCtrlZAddon>
* @author 러닝은빛
*
* @param maxCounts
* @text 되돌리기 가능 횟수
* @type number
* @desc 공간을 적게 사용할 수록 좋습니다!
* @default 5
*
* @help
* =============================================================================
* 플러그인 동작 환경
* =============================================================================
* 플러그인 관리를 열고 다음 플러그인 밑에 추가합니다.
*
* YEP_GridFreeDoodads
* YEP_X_ExtDoodadPack1
*
* 추가 순서가 맞지 않으면 동작하지 않습니다. 또한 버전 업데이트에 대응하지 못할 수
* 있습니다.
*
* 플러그인이 갑자기 삭제될 수도 있습니다. 사용 가능 키는 다음과 같습니다.
*
* Ctrl + Z - 취소
* Ctrl + Alt - 그리드 잠금 모드 설정
*
* =============================================================================
* 변경 기록
* =============================================================================
* 2016.12.05 (v1.0.0) - First Release.
* 2016.12.06 (v1.0.1) - 새로운 키에 대한 설명을 추가하였습니다.
*/
var Imported = Imported || {};
Imported.GFDCtrlZAddon = true;
var Yanfly = Yanfly || {};
Yanfly.Param = Yanfly.Param || {};
(function() {
var parameters = $plugins.filter(function (i) {
return i.description.contains('<GFDCtrlZAddon>');
});
parameters = (parameters.length > 0) && parameters[0].parameters;
var gDoodadsStack = [];
var keyboardManager = {};
var isClicked = false;
var isGridLockMode;
var maxCounts = parseInt(parameters['maxCounts'] || 5);
//============================================================================
// DoodadManager
//============================================================================
var alias_DoodadManager_addNew = DoodadManager.addNew;
DoodadManager.addNew = function(doodad) {
if(gDoodadsStack.length >= maxCounts) gDoodadsStack.shift();
gDoodadsStack.push(doodad);
alias_DoodadManager_addNew.call(this, doodad);
};
var alias_DoodadManager_clearMap = DoodadManager.clearMap;
DoodadManager.clearMap = function() {
alias_DoodadManager_clearMap.call(this);
// Empty Data
gDoodadsStack = [];
keyboardManager = {};
isClicked = false;
isGridLockMode = Yanfly.Param.GFDGridSnap;
};
DoodadManager.revertOneStep = function() {
// This executes when you press Ctrl + Z key
if(keyboardManager[90] && keyboardManager[17] && !isClicked) {
var d = gDoodadsStack.pop();
if(d) DoodadManager.delete(d);
isClicked = true;
}
// This executes when you press Ctrl + Alt key
if(keyboardManager[17] && keyboardManager[18] && !isGridLockMode) {
isGridLockMode = true;
DoodadManager.setGridLockMode(isGridLockMode);
}
};
//============================================================================
// Scene_Map
//============================================================================
document.addEventListener('keydown', function(e) {
keyboardManager[e.keyCode] = true;
}, false);
document.addEventListener('keyup', function(e) {
keyboardManager[e.keyCode] = false;
isClicked = false;
if(isGridLockMode) {
DoodadManager.setGridLockMode(false);
isGridLockMode = false;
}
}, false);
var alias_Scene_Map_update = Scene_Map.prototype.updateScene;
Scene_Map.prototype.updateScene = function() {
if ($gameTemp._modeGFD) {
DoodadManager.revertOneStep();
return;
}
alias_Scene_Map_update.call(this);
};
//============================================================================
// Window_GFD_Canvas (Override)
//============================================================================
Window_GFD_Canvas.prototype.getMaxLines = function () {
return 7;
};
Window_GFD_Canvas.prototype.__originalRefresh = function (dx, dy, dw, dh) {
this.drawDarkRect(0, dy, this.contents.width, dh);
dy += this.lineHeight() / 2;
var text = 'Q E - Layer -/+';
this.drawText(text, dx, dy + this.lineHeight() * 0, dw);
var text = 'Layer: ' + this.currentLayer();
this.drawText(text, dx, dy + this.lineHeight() * 0, dw, 'right');
if (DoodadManager._editMode) {
var text = 'G - Grid Settings';
} else {
var text = 'T - Tweak Settings';
}
this.drawText(text, dx, dy + this.lineHeight() * 1, dw);
var text = 'W A S D - Move Screen';
this.drawText(text, dx, dy + this.lineHeight() * 2, dw);
var text = 'X: ' + Yanfly.Util.toGroup(this.currentDoodadX());
this.drawText(text, dx, dy + this.lineHeight() * 2, dw, 'right');
var text = '↑←↓→ - Precision Move';
this.drawText(text, dx, dy + this.lineHeight() * 3, dw);
var text = 'Y: ' + Yanfly.Util.toGroup(this.currentDoodadY());
this.drawText(text, dx, dy + this.lineHeight() * 3, dw, 'right');
var text = 'Z X - Place / Return';
this.drawText(text, dx, dy + this.lineHeight() * 4, dw);
};
Window_GFD_Canvas.prototype.__multiplyText = function (dx, dy, dw, dh) {
dy += this.lineHeight() / 2;
var text = 'Ctrl + Z - Undo';
this.drawText(text, dx, dy + this.lineHeight() * 1, dw, 'right');
var text = 'Ctrl + Alt - Grid Lock';
this.drawText(text, dx, dy + this.lineHeight() * 5, dw, 'left');
};
Window_GFD_Canvas.prototype.refresh = function() {
this.contents.clear();
var dh = this.lineHeight() * this.getMaxLines();
var dy = this.contents.height - dh;
var dx = Window_Base._faceWidth;
var dw = this.contents.width - dx * 2;
this.__originalRefresh(dx, dy, dw, dh);
this.__multiplyText(dx, dy, dw, dh);
};
Window_GFD_Canvas.prototype.isReduceOpacity = function() {
var y = this.contents.height - this.lineHeight() * this.getMaxLines();
if (TouchInput._mouseOverY > y) return true;
if (DoodadManager._manualMove && DoodadManager._manualY > y) return true;
return false;
};
})();
|
require "spec_helper"
describe Jkf::Converter::Ki2 do
let(:ki2_converter) { Jkf::Converter::Ki2.new }
let(:ki2_parser) { Jkf::Parser::Ki2.new }
subject { ki2_parser.parse(ki2_converter.convert(jkf)) }
shared_examples(:parse_file) do |filename|
let(:str) do
if File.extname(filename) == ".ki2"
File.read(filename, encoding: "Shift_JIS").toutf8
else
File.read(filename).toutf8
end
end
let(:jkf) { ki2_parser.parse(str).to_json }
it "should be parse #{File.basename(filename)}" do
is_expected.to eq JSON.parse(jkf)
end
end
fixtures(:ki2).each do |fixture|
it_behaves_like :parse_file, fixture
end
describe "#csa2relative(relative)" do
let(:pairs) do
{
"L" => "左",
"C" => "直",
"R" => "右",
"U" => "上",
"M" => "寄",
"D" => "引",
"H" => "打"
}
end
it "should convert csa to relative string" do
pairs.each do |csa, relative_str|
expect(ki2_converter.send(:csa2relative, csa)).to eq relative_str
end
expect(ki2_converter.send(:csa2relative, "UNKOWN")).to eq ""
end
end
end
|
#!/bin/bash -ex
./create-usb-storage
factory/factory run \
--image liquid-cloud-x86_64 \
--smp 4 \
--memory 4096 \
--share guest-scripts:/mnt/scripts \
--share setup:/opt/setup \
--tcp 10080:80 \
--tcp 10022:22 \
--usb-storage usb.raw \
/mnt/scripts/liquid_vm_test.sh \
"$@"
|
g++ InitalizationOrder.cpp -o InitalizationOrder.out
./InitalizationOrder.out |
#!/usr/bin/env
# Notes
# File for finding the correct os type, then running the setup file for that os
# Gets the os type from $OSTYPE and normalizes it
keg_get_os_type(){
local OS_TYPE="$OSTYPE"
if [[ "$OSTYPE" == "linux-gnu"* ]]; then
OS_TYPE="linux"
elif [[ "$OSTYPE" == "darwin"* ]]; then
OS_TYPE="mac"
elif [[ "$OSTYPE" == "cygwin" ]]; then
OS_TYPE="linux"
elif [[ "$OSTYPE" == "msys" ]]; then
OS_TYPE="win"
elif [[ "$OSTYPE" == "win32" ]]; then
OS_TYPE="win"
elif [[ "$OSTYPE" == "freebsd"* ]]; then
OS_TYPE="linux"
fi
echo $OS_TYPE
}
# Gets the os type, and runs the setup script for it
# Example on mac
# OS_TYPE === mac
# SETUP_FILE === ~/keg/keg-cli/setup/scripts/mac-init.sh
keg_run_os_setup_file(){
local OS_TYPE="$(keg_get_os_type)"
local SETUP_FILE=$KEG_CLI_PATH/setup/scripts/$OS_TYPE-init.sh
if [[ -f "$SETUP_FILE" ]]; then
/bin/bash $SETUP_FILE
else
echo "[ KEG-CLI ] ERROR: Could not find setup script for $OS_TYPE."
echo "[ KEG-CLI ] ERROR: Please ensure your Operating System is supported!"
fi
}
# Run the setup script
keg_run_os_setup_file |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
// THIS IS A GENERATED FILE. DO NOT MODIFY MANUALLY. @see scripts/compile-icons.js
import * as React from 'react';
interface SVGRProps {
title?: string;
titleId?: string;
}
const EuiIconTokenMethod = ({
title,
titleId,
...props
}: React.SVGProps<SVGSVGElement> & SVGRProps) => (
<svg
xmlns="http://www.w3.org/2000/svg"
width={16}
height={16}
viewBox="0 0 16 16"
aria-labelledby={titleId}
{...props}
>
{title ? <title id={titleId}>{title}</title> : null}
<path
fillRule="evenodd"
d="M3.333 11.027V5.05h2.059v1.136h.063c.25-.747.891-1.214 1.728-1.214.848 0 1.524.483 1.65 1.214h.063c.204-.731.927-1.214 1.822-1.214 1.155 0 1.949.798 1.949 2.023v4.03h-2.169V7.542c0-.521-.29-.84-.738-.84s-.723.319-.723.84v3.486H6.963V7.54c0-.521-.29-.84-.739-.84-.447 0-.722.319-.722.84v3.486H3.333z"
/>
</svg>
);
export const icon = EuiIconTokenMethod;
|
<gh_stars>10-100
package com.peony.core.data.cache;
import java.util.List;
import java.util.Map;
public interface IRemoteCacheClient {
/**
* 初始化cache
*/
public void init();
/**
* 关闭cache
*/
public void close();
public boolean set(String key, int exp, Object obj);
/*
* set 不等待
* */
public void setWithNoReply(String key, int exp, Object obj);
/**
* 增加到缓存,如果已经存在,返回的future.get()为false,否则为true; 用于做同步锁
*
* @param key
* @param exp
* @param obj
* @return
*/
public boolean add(String key, int exp, Object obj);
public Object get(String key);
public boolean delete(String key);
/**
* 删除,不等待相应
*/
public void deleteWithNoReply(String key);
public Map<String, Object> getBulk(String[] keys);
public Map<String, Object> getBulk(List<String> keys);
public long incr(String key, int by);
public long decr(String key, int by);
public void flush();
}
|
<filename>dev/sbt-plugin/src/main/scala/com/lightbend/lagom/sbt/Internal.scala
/*
* Copyright (C) 2016 Lightbend Inc. <http://www.lightbend.com>
*/
package com.lightbend.lagom.sbt
import sbt._
import play.sbt.PlayInteractionMode
object Internal {
object Configs {
val DevRuntime = config("dev-mode").hide extend Runtime
val CassandraRuntime = config("dev-mode-cassandra").hide extend Runtime
}
object Keys {
val interactionMode = SettingKey[PlayInteractionMode]("interactionMode", "Hook to configure how a service blocks when running")
val stop = TaskKey[Unit]("stop", "Stop services, if have been started in non blocking mode")
}
} |
<filename>src/sockets/events.js
const SocketEvent = {
CONNECT: 'zmq::socket::connect',
RECONNECT: 'zmq::socket::reconnect',
RECONNECT_FAILURE: 'zmq::socket:reconnect-failure',
DISCONNECT: 'zmq::socket::disconnect',
CONNECT_DELAY: 'zmq::socket::connect-delay',
CONNECT_RETRY: 'zmq::socket::connect-retry',
LISTEN: 'zmq::socket::listen',
BIND_ERROR: 'zmq::socket::bind-error',
ACCEPT: 'zmq::socket::accept',
ACCEPT_ERROR: 'zmq::socket::accept-error',
CLOSE: 'zmq::socket::close',
CLOSE_ERROR: 'zmq::socket::close-error'
}
export default SocketEvent
|
<filename>app/src/main/java/com/codernauti/sweetie/registration/RegisterContract.java
package com.codernauti.sweetie.registration;
import android.net.Uri;
interface RegisterContract {
// SetInfoFragment
interface RegisterView {
void setPresenter(RegisterPresenter presenter);
void showNextScreen();
}
interface RegisterPresenter {
void saveUserData(String uid, String email, String username, String phonNumber, boolean gender);
}
// StepThree
interface SetUserImageView {
void setPresenter(SetUserImagePresenter presenter);
void setProgressViewsVisible(boolean visible);
void showNextScreen();
void showImage(String imageUrl);
}
interface SetUserImagePresenter {
void uploadImage(Uri imgLocalUri);
}
}
|
import { all } from 'redux-saga/effects';
import setupClient from './setupClient';
import auth from './auth';
function* runLoop(client) {
let restarts = 0;
while (true) {
try {
yield all(
[
// bookmarks(client),
// clientSaga(client),
// files(client),
// forms(client),
// rooms(client),
// messages(client),
// presence(client),
// user(client),
// settings()
]
);
} catch (e) {
console.error('Caught error in saga, restarting:');
console.error(e);
restarts += 1;
if (restarts > 10) {
throw new Error('Already restarted 10 times');
}
}
}
}
function* xmppSaga() {
const client = setupClient();
yield all([runLoop(client), auth(client)]);
}
export default xmppSaga;
|
<reponame>sraashis/coinstac
'use strict';
const pify = require('util').promisify;
const CoinstacClient = require('coinstac-client-core');
const rimraf = require('rimraf');
const dir = CoinstacClient.getDefaultAppDirectory();
/* eslint-disable no-console */
console.log(`Removing ${dir} …`);
pify(rimraf)(dir)
.then(() => console.log('Removed!'))
.catch(console.error);
/* eslint-enable no-console */
|
file1 = open('test.txt','a')
file1.write('student')
for number in [1,-2,3,-4]:
if number > 0:
print(number) |
<html>
<head>
<title>List of Books</title>
</head>
<body>
<h1>List of Books</h1>
<ul>
<li>
<strong>Title:</strong> War and Peace
<strong>Author:</strong> Leo Tolstoy
<strong>Genre:</strong> Historical Fiction
</li>
<li>
<strong>Title:</strong> Pride and Prejudice
<strong>Author:</strong> Jane Austen
<strong>Genre:</strong> Romance
</li>
<li>
<strong>Title:</strong> 1984
<strong>Author:</strong> George Orwell
<strong>Genre:</strong> Dystopian
</li>
</ul>
</body>
</html> |
<gh_stars>10-100
package com.nolanlawson.keepscore.serialization;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.StringWriter;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.zip.GZIPInputStream;
import org.xmlpull.v1.XmlPullParser;
import org.xmlpull.v1.XmlPullParserException;
import org.xmlpull.v1.XmlPullParserFactory;
import org.xmlpull.v1.XmlSerializer;
import android.content.ContentResolver;
import android.net.Uri;
import android.util.Xml;
import com.nolanlawson.keepscore.db.Delta;
import com.nolanlawson.keepscore.db.Game;
import com.nolanlawson.keepscore.db.PlayerScore;
import com.nolanlawson.keepscore.helper.SdcardHelper.Format;
import com.nolanlawson.keepscore.helper.PlayerColor;
import com.nolanlawson.keepscore.helper.XmlHelper;
import com.nolanlawson.keepscore.util.CollectionUtil;
import com.nolanlawson.keepscore.util.Pair;
import com.nolanlawson.keepscore.util.StringUtil;
import com.nolanlawson.keepscore.util.UtilLogger;
/**
* helper classes for serializing/deserializing Games and PlayerScores.
*
* @author nolan
*
*/
public class GamesBackupSerializer {
/** Version without "automatic" - i.e., everything was manual */
public static final int VERSION_ONE = 1;
/** Version where "automatic" was added, to distinguish automatic backups from manual backups */
public static final int VERSION_TWO = 2;
/**
* Version where "backupFilename" was added, for cases where the filename could not easily be determined
* (e.g. from Gmail attachments)
*/
public static final int VERSION_THREE = 3;
/**
* Version where the lastUpdate was added.
*/
public static final int VERSION_FOUR = 4;
/**
* Version where colors and player history timestamps were added.
*/
public static final int VERSION_FIVE = 5;
public static final int CURRENT_VERSION = VERSION_FIVE;
private static final String ATTRIBUTE_NULL = "isNull";
private static final String ATTRIBUTE_EMPTY = "isEmpty";
private static UtilLogger log = new UtilLogger(GamesBackupSerializer.class);
private static enum Tag {
PlayerScore, Game, GamesBackup, gameCount, version, automatic, backupFilename, dateGameSaved,
dateBackupSaved, dateGameStarted, gameName, playerName, score, playerNumber, history, historyTimestamps,
lastUpdate, color, Games, PlayerScores;
}
/**
* Don't read the entire file; just read the game count and other basic, summarized information.
*
* @param backupFilename
* @return
*/
@SuppressWarnings("incomplete-switch")
public static GamesBackupSummary readGamesBackupSummary(Uri uri, Format format, ContentResolver contentResolver) {
GamesBackupSummary result = new GamesBackupSummary();
int infoReceived = 0;
try {
XmlPullParser parser = null;
BufferedReader reader = null;
int parserEvent = -1;
try {
XmlPullParserFactory parserFactory = XmlPullParserFactory.newInstance();
parser = parserFactory.newPullParser();
InputStream inputStream = contentResolver.openInputStream(uri);
if (format == Format.GZIP) { // new, gzipped format
inputStream = new GZIPInputStream(inputStream);
}
reader = new BufferedReader(new InputStreamReader(inputStream, "UTF-8"), 0x1000);
parser.setInput(reader);
parserEvent = parser.getEventType();
Tag tag = null;
String text = null;
while (parserEvent != XmlPullParser.END_DOCUMENT) {
parserEvent = parser.next();
switch (parserEvent) {
case XmlPullParser.START_TAG:
tag = Tag.valueOf(parser.getName());
break;
case XmlPullParser.TEXT:
text = parser.getText();
break;
case XmlPullParser.END_TAG:
switch (tag) {
case gameCount:
result.setGameCount(Integer.parseInt(text));
infoReceived++;
break;
case version:
result.setVersion(Integer.parseInt(text));
infoReceived++;
if (result.getVersion() < VERSION_TWO) {
// no automatic vs. manual distinction in version one
result.setAutomatic(false);
infoReceived++;
}
if (result.getVersion() < VERSION_THREE) {
// filename not stored in XML file itself until version three
result.setFilename(uri.getLastPathSegment());
infoReceived++;
}
break;
case automatic:
result.setAutomatic(Boolean.parseBoolean(text));
infoReceived++;
break;
case dateBackupSaved:
result.setDateSaved(Long.parseLong(text));
infoReceived++;
break;
case backupFilename:
result.setFilename(text);
infoReceived++;
break;
}
break;
}
if (infoReceived == 5) {
// this is all the info required to create a summary
return result;
}
}
} finally {
if (reader != null) {
reader.close();
}
}
} catch (NumberFormatException e) {
log.e(e, "unexpected exception for " + uri);
throw new RuntimeException(e);
} catch (IOException e) {
log.e(e, "unexpected exception for " + uri);
throw new RuntimeException(e);
} catch (XmlPullParserException e) {
log.e(e, "unexpected exception for " + uri);
throw new RuntimeException(e);
}
throw new RuntimeException("failed to find summary for " + uri);
}
@SuppressWarnings("incomplete-switch")
public static GamesBackup deserialize(String xmlData) {
int parserEvent = -1;
XmlPullParser parser = null;
Tag tag = null;
GamesBackup gamesBackup = new GamesBackup();
gamesBackup.setGames(new ArrayList<Game>());
Game game = null;
PlayerScore playerScore = null;
Map<String, String> attributes = null;
try {
// calls service (referenced in url) to request XML serialized data
parser = XmlHelper.loadData(xmlData);
parserEvent = parser.getEventType();
while (parserEvent != XmlPullParser.END_DOCUMENT) {
switch (parserEvent) {
case XmlPullParser.START_TAG:
tag = Tag.valueOf(parser.getName());
switch (tag) {
case Game:
game = new Game();
game.setPlayerScores(new ArrayList<PlayerScore>());
break;
case PlayerScore:
playerScore = new PlayerScore();
break;
}
// null or empty marker
if (parser.getAttributeCount() != -1) {
attributes = getAttributes(parser);
}
break;
case XmlPullParser.END_TAG:
tag = Tag.valueOf(parser.getName());
switch (tag) {
case Game:
gamesBackup.getGames().add(game);
break;
case PlayerScore:
game.getPlayerScores().add(playerScore);
break;
}
break;
case XmlPullParser.TEXT:
String text = parser.getText();
if (!StringUtil.isEmptyOrWhitespace(text)) {
handleText(text, tag, attributes, gamesBackup, game, playerScore);
}
break;
}
parserEvent = parser.next();
}
} catch (XmlPullParserException e) {
log.e(e, "unexpected");
} catch (IOException e) {
log.e(e, "unexpected");
}
applyVersionFixes(gamesBackup);
// return de-serialized game backup
return gamesBackup;
}
private static void applyVersionFixes(GamesBackup gamesBackup) {
// fix for older versions of keepscore without colors
if (gamesBackup.getVersion() < VERSION_FIVE) {
for (Game game : gamesBackup.getGames()) {
for (PlayerScore playerScore : game.getPlayerScores()) {
playerScore.setPlayerColor(PlayerColor.BUILT_INS[playerScore.getPlayerNumber()]);
}
}
}
}
@SuppressWarnings("incomplete-switch")
private static void handleText(String text, Tag tag, Map<String, String> attributes, GamesBackup gamesBackup,
Game game, PlayerScore playerScore) {
switch (tag) {
case gameCount:
gamesBackup.setGameCount(Integer.parseInt(text));
break;
case backupFilename:
gamesBackup.setFilename(text);
break;
case version:
gamesBackup.setVersion(Integer.parseInt(text));
break;
case automatic:
gamesBackup.setAutomatic(Boolean.parseBoolean(text));
break;
case dateBackupSaved:
gamesBackup.setDateSaved(Long.parseLong(text));
break;
case dateGameSaved:
game.setDateSaved(Long.parseLong(text));
break;
case dateGameStarted:
game.setDateStarted(Long.parseLong(text));
break;
case gameName:
game.setName(getTextOrNullOrEmpty(attributes, text));
break;
case playerName:
playerScore.setName(getTextOrNullOrEmpty(attributes, text));
break;
case playerNumber:
playerScore.setPlayerNumber(Integer.parseInt(text));
break;
case history:
playerScore.setHistory(Delta.fromJoinedScores(getTextOrNullOrEmpty(attributes, text)));
break;
case historyTimestamps:
// update the existing list; assume this tag always comes after the "history" tag
List<Long> timestamps = CollectionUtil.stringsToLongs(
StringUtil.split(getTextOrNullOrEmpty(attributes, text), ','));
for (int i = 0, len = playerScore.getHistory().size(); i < len; i++) {
playerScore.getHistory().get(i).setTimestamp(timestamps.get(i));
}
break;
case color:
playerScore.setPlayerColor(PlayerColor.deserialize(text));
break;
case score:
playerScore.setScore(Long.parseLong(text));
break;
case lastUpdate:
playerScore.setLastUpdate(Long.parseLong(text));
break;
}
}
private static String getTextOrNullOrEmpty(Map<String, String> attributes, String text) {
if (Boolean.parseBoolean(attributes.get(ATTRIBUTE_NULL))) {
return null;
} else if (Boolean.parseBoolean(attributes.get(ATTRIBUTE_EMPTY))) {
return "";
}
return text;
}
public static String serialize(GamesBackup gamesBackup) {
String rawXml = serializeAsRawXml(gamesBackup);
// pretty-print the xml
return XmlHelper.prettyPrint(rawXml);
}
private static String serializeAsRawXml(GamesBackup gamesBackup) {
XmlSerializer serializer = Xml.newSerializer();
StringWriter writer = new StringWriter();
try {
serializer.setOutput(writer);
serializer.startDocument("UTF-8", true);
serializer.startTag("", Tag.GamesBackup.name());
addTag(serializer, Tag.gameCount, gamesBackup.getGameCount());
addTag(serializer, Tag.version, gamesBackup.getVersion());
addTag(serializer, Tag.automatic, gamesBackup.isAutomatic());
addTag(serializer, Tag.backupFilename, gamesBackup.getFilename());
addTag(serializer, Tag.dateBackupSaved, gamesBackup.getDateSaved());
serializer.startTag("", Tag.Games.name());
for (Game game : gamesBackup.getGames()) {
serializer.startTag("", Tag.Game.name());
addTag(serializer, Tag.dateGameSaved, game.getDateSaved());
addTag(serializer, Tag.dateGameStarted, game.getDateStarted());
addTag(serializer, Tag.gameName, game.getName());
serializer.startTag("", Tag.PlayerScores.name());
for (PlayerScore playerScore : game.getPlayerScores()) {
serializer.startTag("", Tag.PlayerScore.name());
addTag(serializer, Tag.playerName, playerScore.getName());
addTag(serializer, Tag.score, playerScore.getScore());
addTag(serializer, Tag.playerNumber, playerScore.getPlayerNumber());
Pair<String,String> historyAsStrings = Delta.toJoinedStrings(playerScore.getHistory());
addTag(serializer, Tag.history, historyAsStrings.getFirst());
addTag(serializer, Tag.historyTimestamps, historyAsStrings.getSecond());
addTag(serializer, Tag.lastUpdate, Long.toString(playerScore.getLastUpdate()));
addTag(serializer, Tag.color, PlayerColor.serialize(playerScore.getPlayerColor()));
serializer.endTag("", Tag.PlayerScore.name());
}
serializer.endTag("", Tag.PlayerScores.name());
serializer.endTag("", Tag.Game.name());
}
serializer.endTag("", Tag.Games.name());
serializer.endTag("", Tag.GamesBackup.name());
serializer.endDocument();
return writer.toString();
} catch (Exception e) {
throw new RuntimeException(e);
}
}
/**
* Convenience method for adding tags
*
* @param serializer
* @param tag
* @param value
* @throws IOException
*/
private static void addTag(XmlSerializer serializer, Tag tag, Object value) throws IOException {
serializer.startTag("", tag.name());
if (value == null) {
// explicitly mark nulls with an attribute
serializer.attribute("", ATTRIBUTE_NULL, Boolean.TRUE.toString());
} else if (value.equals("")) {
// explicitly mark empty strings
serializer.attribute("", ATTRIBUTE_EMPTY, Boolean.TRUE.toString());
}
serializer.text(String.valueOf("".equals(value) ? null : value));
serializer.endTag("", tag.name());
}
private static Map<String, String> getAttributes(XmlPullParser parser) {
Map<String, String> attrs = new HashMap<String, String>();
for (int i = 0; i < parser.getAttributeCount(); i++) {
attrs.put(parser.getAttributeName(i), parser.getAttributeValue(i));
}
return attrs;
}
}
|
<gh_stars>1-10
module.exports = class CommandError extends Error {
constructor (message, showUsage = false) {
super(message)
this.showUsage = showUsage
}
}
|
/* Copyright (c) 2001-2011, The HSQL Development Group
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* Neither the name of the HSQL Development Group nor the names of its
* contributors may be used to endorse or promote products derived from this
* software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL HSQL DEVELOPMENT GROUP, HSQLDB.ORG,
* OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.hsqldb.test;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import junit.framework.TestCase;
import junit.framework.TestResult;
/**
* HSQLDB TestBug778213 Junit test case. <p>
*
* Test to ensure that DDL can be executed through the
* HSQLDB PreparedStatement interface implementation and
* that the behaviour of the prepared statement object is
* nominally correct under "prepared" DDL.
*
* @author <EMAIL>
* @version 1.7.2
* @since 1.7.2
*/
public class TestBug778213 extends TestBase {
public TestBug778213(String name) {
super(name);
}
/* Implements the TestBug778213_Part3 test */
public void test() throws Exception {
Connection conn = newConnection();
PreparedStatement pstmt;
int updateCount;
try {
pstmt = conn.prepareStatement("drop table test if exists");
pstmt.executeUpdate();
pstmt = conn.prepareStatement("create table test(id int)");
updateCount = pstmt.executeUpdate();
assertTrue("expected update count of zero", updateCount == 0);
pstmt = conn.prepareStatement("drop table test");
updateCount = pstmt.executeUpdate();
assertTrue("expected update count of zero", updateCount == 0);
} catch (Exception e) {
assertTrue("unable to prepare or execute DDL", false);
} finally {
conn.close();
}
conn = newConnection();
try {
pstmt = conn.prepareStatement("create table test(id int)");
assertTrue("got data expecting update count", !pstmt.execute());
} catch (Exception e) {
assertTrue("unable to prepare or execute DDL", false);
} finally {
conn.close();
}
conn = newConnection();
boolean exception = true;
try {
pstmt = conn.prepareStatement("drop table test");
pstmt.executeQuery();
} catch (SQLException e) {
exception = false;
} finally {
conn.close();
}
if (exception) {
assertTrue("no exception thrown for executeQuery(DDL)", false);
}
conn = newConnection();
try {
pstmt = conn.prepareStatement("call identity()");
pstmt.execute();
} catch (Exception e) {
assertTrue("unable to prepare or execute call", false);
} finally {
conn.close();
}
exception = false;
conn = newConnection();
try {
pstmt = conn.prepareStatement("create table test(id int)");
pstmt.addBatch();
} catch (SQLException e) {
exception = true;
} finally {
conn.close();
}
if (exception) {
assertTrue("not expected exception batching prepared DDL", false);
}
conn = newConnection();
try {
pstmt = conn.prepareStatement("create table test(id int)");
assertTrue("expected null ResultSetMetadata for prepared DDL",
null == pstmt.getMetaData());
} finally {
conn.close();
}
conn = newConnection();
try {
pstmt = conn.prepareStatement("create table test(id int)");
assertTrue("expected zero parameter for prepared DDL",
0 == pstmt.getParameterMetaData().getParameterCount());
} finally {
conn.close();
}
}
/* Runs TestBug778213_Part3 test from the command line*/
public static void main(String[] args) throws Exception {
TestResult result;
TestCase test;
java.util.Enumeration failures;
int count;
result = new TestResult();
test = new TestBug778213("test");
test.run(result);
count = result.failureCount();
System.out.println("TestBug778213 failure count: " + count);
failures = result.failures();
while (failures.hasMoreElements()) {
System.out.println(failures.nextElement());
}
}
}
|
import pandas as pd
import pickle
class DataHandler:
def __init__(self, file_name):
self.file_name = file_name
self.data = pd.DataFrame()
def load_data(self):
try:
with open(self.file_name, 'rb') as file:
self.data = pickle.load(file)
return self.data
except FileNotFoundError:
print("File not found.")
return None
def save_data(self, data):
self.data = data
with open(self.file_name, 'wb') as file:
pickle.dump(self.data, file)
# Demonstration
file_name = 'data.pkl'
handler = DataHandler(file_name)
# Create sample data
sample_data = pd.DataFrame({'A': [1, 2, 3], 'B': [4, 5, 6]})
# Save data to file
handler.save_data(sample_data)
# Load data from file
loaded_data = handler.load_data()
print(loaded_data) |
#!/bin/sh
# Copyright (c) 2014-2015 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
set -e
ROOTDIR=dist
BUNDLE="${ROOTDIR}/Viacoin-Qt.app"
CODESIGN=codesign
TEMPDIR=sign.temp
TEMPLIST=${TEMPDIR}/signatures.txt
OUT=signature.tar.gz
OUTROOT=osx
if [ ! -n "$1" ]; then
echo "usage: $0 <codesign args>"
echo "example: $0 -s MyIdentity"
exit 1
fi
rm -rf ${TEMPDIR} ${TEMPLIST}
mkdir -p ${TEMPDIR}
${CODESIGN} -f --file-list ${TEMPLIST} "$@" "${BUNDLE}"
grep -v CodeResources < "${TEMPLIST}" | while read i; do
TARGETFILE="${BUNDLE}/`echo "${i}" | sed "s|.*${BUNDLE}/||"`"
SIZE=`pagestuff "$i" -p | tail -2 | grep size | sed 's/[^0-9]*//g'`
OFFSET=`pagestuff "$i" -p | tail -2 | grep offset | sed 's/[^0-9]*//g'`
SIGNFILE="${TEMPDIR}/${OUTROOT}/${TARGETFILE}.sign"
DIRNAME="`dirname "${SIGNFILE}"`"
mkdir -p "${DIRNAME}"
echo "Adding detached signature for: ${TARGETFILE}. Size: ${SIZE}. Offset: ${OFFSET}"
dd if="$i" of="${SIGNFILE}" bs=1 skip=${OFFSET} count=${SIZE} 2>/dev/null
done
grep CodeResources < "${TEMPLIST}" | while read i; do
TARGETFILE="${BUNDLE}/`echo "${i}" | sed "s|.*${BUNDLE}/||"`"
RESOURCE="${TEMPDIR}/${OUTROOT}/${TARGETFILE}"
DIRNAME="`dirname "${RESOURCE}"`"
mkdir -p "${DIRNAME}"
echo "Adding resource for: "${TARGETFILE}""
cp "${i}" "${RESOURCE}"
done
rm ${TEMPLIST}
tar -C "${TEMPDIR}" -czf "${OUT}" .
rm -rf "${TEMPDIR}"
echo "Created ${OUT}"
|
<gh_stars>0
package com.lepao.ydcgkf.mvp.model;
public class FingerModel {
/**
* msg : 匹配成功
* code : 200
* data : {"serialNumber":"123","memberId":0}
*/
private String msg;
private int code;
private DataBean data;
public String getMsg() {
return msg;
}
public void setMsg(String msg) {
this.msg = msg;
}
public int getCode() {
return code;
}
public void setCode(int code) {
this.code = code;
}
public DataBean getData() {
return data;
}
public void setData(DataBean data) {
this.data = data;
}
public static class DataBean {
/**
* serialNumber : 123
* memberId : 0
*/
private String serialNumber;
private int memberId;
public String getSerialNumber() {
return serialNumber;
}
public void setSerialNumber(String serialNumber) {
this.serialNumber = serialNumber;
}
public int getMemberId() {
return memberId;
}
public void setMemberId(int memberId) {
this.memberId = memberId;
}
}
}
|
#!/bin/bash
set -ex
LOCAL_DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
ROOT_DIR=$(cd "$LOCAL_DIR"/.. && pwd)
# Setup sccache if SCCACHE_BUCKET is set
if [ -n "${SCCACHE_BUCKET}" ]; then
mkdir -p ./sccache
SCCACHE="$(which sccache)"
if [ -z "${SCCACHE}" ]; then
echo "Unable to find sccache..."
exit 1
fi
# Setup wrapper scripts
for compiler in cc c++ gcc g++ x86_64-linux-gnu-gcc; do
(
echo "#!/bin/sh"
echo "exec $SCCACHE $(which $compiler) \"\$@\""
) > "./sccache/$compiler"
chmod +x "./sccache/$compiler"
done
# CMake must find these wrapper scripts
export PATH="$PWD/sccache:$PATH"
fi
# Setup ccache if configured to use it (and not sccache)
if [ -z "${SCCACHE}" ] && which ccache > /dev/null; then
mkdir -p ./ccache
ln -sf "$(which ccache)" ./ccache/cc
ln -sf "$(which ccache)" ./ccache/c++
ln -sf "$(which ccache)" ./ccache/gcc
ln -sf "$(which ccache)" ./ccache/g++
ln -sf "$(which ccache)" ./ccache/x86_64-linux-gnu-gcc
export CCACHE_WRAPPER_DIR="$PWD/ccache"
export PATH="$CCACHE_WRAPPER_DIR:$PATH"
fi
CMAKE_ARGS=("-DBUILD_BINARY=ON")
CMAKE_ARGS+=("-DUSE_OBSERVERS=ON")
CMAKE_ARGS+=("-DUSE_ZSTD=ON")
# Run build script from scripts if applicable
if [[ "${BUILD_ENVIRONMENT}" == *-android* ]]; then
export ANDROID_NDK=/opt/ndk
"${ROOT_DIR}/scripts/build_android.sh" ${CMAKE_ARGS[*]} "$@"
exit 0
fi
if [[ "${BUILD_ENVIRONMENT}" == conda* ]]; then
# click (required by onnx) wants these set
export LANG=C.UTF-8
export LC_ALL=C.UTF-8
# SKIP_CONDA_TESTS refers to only the 'test' section of the meta.yaml
export SKIP_CONDA_TESTS=1
export CONDA_INSTALL_LOCALLY=1
"${ROOT_DIR}/scripts/build_anaconda.sh" "$@"
exit 0
fi
# Run cmake from ./build directory
mkdir -p ./build
cd ./build
INSTALL_PREFIX="/usr/local/caffe2"
CMAKE_ARGS+=("-DCMAKE_INSTALL_PREFIX=${INSTALL_PREFIX}")
# Explicitly set Python executable.
# On Ubuntu 16.04 the default Python is still 2.7.
PYTHON="$(which python)"
if [[ "${BUILD_ENVIRONMENT}" == py3* ]]; then
PYTHON=/usr/bin/python3
CMAKE_ARGS+=("-DPYTHON_EXECUTABLE=${PYTHON}")
fi
case "${BUILD_ENVIRONMENT}" in
*-mkl*)
CMAKE_ARGS+=("-DBLAS=MKL")
;;
*-cuda*)
CMAKE_ARGS+=("-DUSE_CUDA=ON")
CMAKE_ARGS+=("-DCUDA_ARCH_NAME=Maxwell")
CMAKE_ARGS+=("-DUSE_NNPACK=OFF")
# Add ccache symlink for nvcc
ln -sf "$(which ccache)" "${CCACHE_WRAPPER_DIR}/nvcc"
# Explicitly set path to NVCC such that the symlink to ccache is used
CMAKE_ARGS+=("-DCUDA_NVCC_EXECUTABLE=${CCACHE_WRAPPER_DIR}/nvcc")
# Ensure FindCUDA.cmake can infer the right path to the CUDA toolkit.
# Setting PATH to resolve to the right nvcc alone isn't enough.
# See /usr/share/cmake-3.5/Modules/FindCUDA.cmake, block at line 589.
export CUDA_PATH="/usr/local/cuda"
# Ensure the ccache symlink can still find the real nvcc binary.
export PATH="/usr/local/cuda/bin:$PATH"
;;
esac
# Try to include Redis support for Linux builds
if [ "$(uname)" == "Linux" ]; then
CMAKE_ARGS+=("-DUSE_REDIS=ON")
fi
# Currently, on Jenkins mac os, we will use custom protobuf. Mac OS
# contbuild at the moment is minimal dependency - it doesn't use glog
# or gflags either.
if [ "$(uname)" == "Darwin" ]; then
CMAKE_ARGS+=("-DBUILD_CUSTOM_PROTOBUF=ON")
fi
# We test the presence of cmake3 (for platforms like Centos and Ubuntu 14.04)
# and use that if so.
if [[ -x "$(command -v cmake3)" ]]; then
CMAKE_BINARY=cmake3
else
CMAKE_BINARY=cmake
fi
# Configure
${CMAKE_BINARY} "${ROOT_DIR}" ${CMAKE_ARGS[*]} "$@"
# Build
if [ "$(uname)" == "Linux" ]; then
make "-j$(nproc)" install
else
echo "Don't know how to build on $(uname)"
exit 1
fi
# Install ONNX into a local directory
ONNX_INSTALL_PATH="/usr/local/onnx"
pip install "${ROOT_DIR}/third_party/onnx" -t "${ONNX_INSTALL_PATH}"
# Symlink the caffe2 base python path into the system python path,
# so that we can import caffe2 without having to change $PYTHONPATH.
# Run in a subshell to contain environment set by /etc/os-release.
#
# This is only done when running on Jenkins! We don't want to pollute
# the user environment with Python symlinks and ld.so.conf.d hacks.
#
if [ -n "${JENKINS_URL}" ]; then
(
source /etc/os-release
function python_version() {
"$PYTHON" -c 'import sys; print("python%d.%d" % sys.version_info[0:2])'
}
# Debian/Ubuntu
if [[ "$ID_LIKE" == *debian* ]]; then
python_path="/usr/local/lib/$(python_version)/dist-packages"
sudo ln -sf "${INSTALL_PREFIX}/caffe2" "${python_path}"
sudo ln -sf "${ONNX_INSTALL_PATH}/onnx" "${python_path}"
fi
# RHEL/CentOS
if [[ "$ID_LIKE" == *rhel* ]]; then
python_path="/usr/lib64/$(python_version)/site-packages/"
sudo ln -sf "${INSTALL_PREFIX}/caffe2" "${python_path}"
sudo ln -sf "${ONNX_INSTALL_PATH}/onnx" "${python_path}"
fi
# /etc/ld.so.conf.d is used on both Debian and RHEL
echo "${INSTALL_PREFIX}/lib" | sudo tee /etc/ld.so.conf.d/caffe2.conf
sudo ldconfig
)
fi
|
package abstractfactory;
import products.dumplings.Dumplings;
import products.dumplings.Salmon;
import products.pasta.Carbonara;
import products.pasta.Pasta;
import products.pizza.Italiana;
import products.pizza.Pizza;
public class ItalianRestaurant implements AbstractFactory {
private static ItalianRestaurant instance;
private ItalianRestaurant() {
}
public static synchronized ItalianRestaurant getInstance() {
if (instance == null) {
instance = new ItalianRestaurant();
}
return instance;
}
@Override
public Pizza getPizza() {
return new Italiana();
}
@Override
public Pasta getPasta() { return new Carbonara();}
@Override
public Dumplings getDumplings() { return new Salmon(); }
}
|
#!/bin/sh
tar xof ${PKG_NAME}-*.tar.*
|
def search(list_tmp, value):
for index, item in enumerate(list_tmp):
if item == value:
return index
return -1 |
package io.tedy.spotbugsplugin.visiblefortestingdetector;
public class ClassWhichAbusesExposedMethods {
public void abuseExposedMethods() {
ClassWithExposedMethods sut = new ClassWithExposedMethods();
sut.privateMethodThatIsExposedForTesting(); // Failure 1
ClassWithExposedMethods.privateStaticMethodThatIsExposedForTesting(); // Failure 2
}
public static void abuseExposedMethodsStatically() {
ClassWithExposedMethods sut = new ClassWithExposedMethods();
sut.privateMethodThatIsExposedForTesting(); // Failure 3
ClassWithExposedMethods.privateStaticMethodThatIsExposedForTesting(); // Failure 4
}
}
|
#!/usr/bin/env bash
# Use this script to test if a given TCP host/port are available
cmdname=$(basename $0)
echoerr() { if [[ $QUIET -ne 1 ]]; then echo "$@" 1>&2; fi }
usage()
{
cat << USAGE >&2
Usage:
$cmdname host:port [-s] [-t timeout] [-- command args]
-h HOST | --host=HOST Host or IP under test
-p PORT | --port=PORT TCP port under test
Alternatively, you specify the host and port as host:port
-s | --strict Only execute subcommand if the test succeeds
-q | --quiet Don't output any status messages
-t TIMEOUT | --timeout=TIMEOUT
Timeout in seconds, zero for no timeout
-- COMMAND ARGS Execute command with args after the test finishes
USAGE
exit 1
}
wait_for()
{
if [[ $TIMEOUT -gt 0 ]]; then
echoerr "$cmdname: waiting $TIMEOUT seconds for $HOST:$PORT"
else
echoerr "$cmdname: waiting for $HOST:$PORT without a timeout"
fi
start_ts=$(date +%s)
while :
do
if [[ $ISBUSY -eq 1 ]]; then
nc -z $HOST $PORT
result=$?
else
(echo > /dev/tcp/$HOST/$PORT) >/dev/null 2>&1
result=$?
fi
if [[ $result -eq 0 ]]; then
end_ts=$(date +%s)
echoerr "$cmdname: $HOST:$PORT is available after $((end_ts - start_ts)) seconds"
break
fi
sleep 1
done
return $result
}
wait_for_wrapper()
{
# In order to support SIGINT during timeout: http://unix.stackexchange.com/a/57692
if [[ $QUIET -eq 1 ]]; then
timeout $BUSYTIMEFLAG $TIMEOUT $0 --quiet --child --host=$HOST --port=$PORT --timeout=$TIMEOUT &
else
timeout $BUSYTIMEFLAG $TIMEOUT $0 --child --host=$HOST --port=$PORT --timeout=$TIMEOUT &
fi
PID=$!
trap "kill -INT -$PID" INT
wait $PID
RESULT=$?
if [[ $RESULT -ne 0 ]]; then
echoerr "$cmdname: timeout occurred after waiting $TIMEOUT seconds for $HOST:$PORT"
fi
return $RESULT
}
# process arguments
while [[ $# -gt 0 ]]
do
case "$1" in
*:* )
hostport=(${1//:/ })
HOST=${hostport[0]}
PORT=${hostport[1]}
shift 1
;;
--child)
CHILD=1
shift 1
;;
-q | --quiet)
QUIET=1
shift 1
;;
-s | --strict)
STRICT=1
shift 1
;;
-h)
HOST="$2"
if [[ $HOST == "" ]]; then break; fi
shift 2
;;
--host=*)
HOST="${1#*=}"
shift 1
;;
-p)
PORT="$2"
if [[ $PORT == "" ]]; then break; fi
shift 2
;;
--port=*)
PORT="${1#*=}"
shift 1
;;
-t)
TIMEOUT="$2"
if [[ $TIMEOUT == "" ]]; then break; fi
shift 2
;;
--timeout=*)
TIMEOUT="${1#*=}"
shift 1
;;
--)
shift
CLI="$@"
break
;;
--help)
usage
;;
*)
echoerr "Unknown argument: $1"
usage
;;
esac
done
if [[ "$HOST" == "" || "$PORT" == "" ]]; then
echoerr "Error: you need to provide a host and port to test."
usage
fi
TIMEOUT=${TIMEOUT:-120}
STRICT=${STRICT:-0}
CHILD=${CHILD:-0}
QUIET=${QUIET:-0}
# check to see if timeout is from busybox?
# check to see if timeout is from busybox?
TIMEOUT_PATH=$(realpath $(which timeout))
if [[ $TIMEOUT_PATH =~ "busybox" ]]; then
ISBUSY=1
BUSYTIMEFLAG="-t"
else
ISBUSY=0
BUSYTIMEFLAG=""
fi
if [[ $CHILD -gt 0 ]]; then
wait_for
RESULT=$?
exit $RESULT
else
if [[ $TIMEOUT -gt 0 ]]; then
wait_for_wrapper
RESULT=$?
else
wait_for
RESULT=$?
fi
fi
if [[ $CLI != "" ]]; then
if [[ $RESULT -ne 0 && $STRICT -eq 1 ]]; then
echoerr "$cmdname: strict mode, refusing to execute subprocess"
exit $RESULT
fi
exec $CLI
else
exit $RESULT
fi |
printf 'Account Number: '
read -r ACCOUNT_NUMBER
VALID_NUMBER=$(curl -s -b $COOKIE $BML_URL/validate/account/$ACCOUNT_NUMBER \
| jq -r .success)
if [ "$VALID_NUMBER" = "true" ]
then
ACCOUNT_NAME=$(curl -s -b $COOKIE $BML_URL/validate/account/$ACCOUNT_NUMBER \
| jq -r '.["payload"] | .name')
CURRENCY=$(curl -s -b $COOKIE $BML_URL/validate/account/$ACCOUNT_NUMBER \
| jq -r '.["payload"] | .currency')
echo "Account Name: $ACCOUNT_NAME"
echo "Currency: $CURRENCY"
echo ""
printf 'Contact Name: '
read -r CONTACT_NAME
if [ "$CONTACT_NAME" = "" ]
then
CONTACT_NAME=$ACCOUNT_NAME
else
:
fi
CONTACT_NAME=`echo "$CONTACT_NAME" | sed "s/ /%20/"`
ADDCONTACT=$(curl -s -b $COOKIE $BML_URL/contacts \
--data-raw contact_type=IAT \
--data-raw account=$ACCOUNT_NUMBER \
--data-raw alias=$CONTACT_NAME \
--compressed \
| jq -r .success)
if [ "$ADDCONTACT" = "true" ]
then
echo "Contact added successfully"
else
echo "${red}There was an error${reset}"
source contactsmenu.sh
fi
else
echo "${red}Invalid Account${reset}" 1>&2
source contactsmenu.sh
fi
source contactsmenu.sh
|
import Vue from 'vue'
import VueRouter from 'vue-router'
Vue.use(VueRouter)
const router = new VueRouter({
// mode: 'history', //后端支持可开
scrollBehavior: () => ({y: 0}),
routes: []
})
export default previewComponent => {
// https://vuejs.org/v2/guide/render-function.html
return {
router,
render(createElement) {
return createElement(previewComponent)
}
}
}
|
<gh_stars>0
import keyMirror from 'react/lib/keyMirror';
export default {
// event name triggered from store, listened to by views
CHANGE_EVENT: 'change',
// Each time you add an action, add it here... They should be past-tense
ActionTypes: keyMirror({
TASK_ADDED: null,
SEARCHED : null,
RECEIVED_RESULT : null,
SAVED : null,
SAVE_DONE:null,
GOTALL:null,
ALL_DONE:null,
}),
ActionSources: keyMirror({
SERVER_ACTION: null,
VIEW_ACTION: null
})
};
|
#!/bin/sh
echo -en "\`\`\` \n"
echo -en "$(cat /proc/meminfo | sed -n '1,5p')\`\`\`"
|
#!/bin/sh
python ../../docker_tools/manage_docker.py export --hash $1 --export-path $(git rev-parse --show-toplevel)/kronos/docker/chronology --repository-path $(git rev-parse --show-toplevel) && \
python ../../docker_tools/manage_docker.py build --dockerfile-directory . --tag $2
|
<reponame>Shigawire/eui
import React from "react";
const EuiIconKqlField = props => <svg width={16} height={10} viewBox="0 0 16 10" xmlns="http://www.w3.org/2000/svg" {...props}><path d="M8 9a5 5 0 1 1 0-8 5 5 0 1 1 0 8zm.75-.692a4 4 0 1 0 0-6.615A4.981 4.981 0 0 1 10 5a4.981 4.981 0 0 1-1.25 3.308zM4.133 8V5.559h2.496v-.625H4.133V2.996h2.719v-.633H3.43V8h.703z" /></svg>;
export const icon = EuiIconKqlField; |
/**
* @author <NAME>
* First run "createRandomEmailList.js", THEN this one.
* It runs the algorithm with production within the confines of this folder.
*/
const Alg = require('../doAlg.js');
Alg.doAlg('./email-list.txt').then((hash) => {
Alg.writeOutFile(hash, './email-list.txt');
}); |
<reponame>DonRumata710/NetCowork
#ifndef TYPES_H
#define TYPES_H
#include "enumutil.h"
#define Token(DO) \
DO(TOKEN_IMPORT, "import") \
DO(TOKEN_CLASS, "class") \
DO(TOKEN_STRUCT, "struct") \
DO(TOKEN_ENUM, "enum") \
DO(TOKEN_FUNCTION, "function") \
DO(TOKEN_GETTER, "get") \
DO(TOKEN_SETTER, "set") \
DO(TOKEN_OPENING_BRACE, "{") \
DO(TOKEN_CLOSING_BRACE, "}") \
DO(TOKEN_OPENING_PARENTHESIS, "(") \
DO(TOKEN_CLOSING_PARENTHESIS, ")") \
DO(TOKEN_COMMA, ",") \
DO(TOKEN_ASSIGNMENT, "=") \
DO(TOKEN_SEMICOLON, ";") \
DO(TOKEN_COLON, ":") \
DO(TOKEN_DOT, ".") \
DO(TOKEN_NONE, "")
ENUM_WITH_NAMES(Token)
STR_TO_ENUM_DECL(Token)
//enum class Token
//{
// CLASS,
// STRUCT,
// ENUM,
// FUNCTION,
// PROPERTY,
// OPENING_BRACE,
// CLOSING_BRACE,
// OPENING_PARENTHESIS,
// CLOSING_PARENTHESIS,
// NONE
//};
#define Type(DO) \
DO(Type, NONE, "NONE") \
DO(Type, UI64, "ui64") \
DO(Type, UI32, "ui32") \
DO(Type, UI16, "ui16") \
DO(Type, UI8, "ui8") \
DO(Type, I64, "i64") \
DO(Type, I32, "i32") \
DO(Type, I16, "i16") \
DO(Type, I8, "i8") \
DO(Type, FLOAT, "float") \
DO(Type, DFLOAT, "double") \
DO(Type, CHAR, "char") \
DO(Type, STRING, "string") \
DO(Type, FUNCTION, "function") \
DO(Type, STRUCT, "struct") \
DO(Type, CLASS, "class") \
DO(Type, ENUM, "enum")
ENUM_C_WITH_NAMES(Type)
STR_TO_ENUM_DECL(Type)
ENUM_TO_STR_DECL(Type)
//enum class Type
//{
// UI64,
// UI32,
// UI16,
// UI8,
// I64,
// I32,
// I16,
// I8,
// CHAR,
// STRING,
// FUNCTION,
// STRUCT,
// CLASS,
// NONE
//};
#endif // TYPES_H
|
import functools
import datetime
from flask import request
from src.model.models import Token
from src import errors
def login_required(f):
@functools.wraps(f)
def decorated_function(**kwargs):
if "Authorization" not in request.headers:
return errors.not_authorized()
else:
is_valid = False
actual_token = get_token_from_request()
if actual_token:
if actual_token.valid_until > datetime.datetime.now():
is_valid = True
if not is_valid:
return errors.token_invalid()
return f(**kwargs)
return decorated_function
def get_token_from_request():
if "Authorization" not in request.headers:
return None
token = request.headers["Authorization"]
actual_token = Token.get_or_none(
(Token.token == token) & (Token.token_type == "access")
)
return actual_token
def get_user_from_request():
token = get_token_from_request()
if token:
return token.user
return None
|
<reponame>nmuzychuk/room-service<gh_stars>0
package com.nmuzychuk.roomservice;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
@SpringBootApplication
public class RoomServiceApplication {
public static void main(String[] args) {
SpringApplication.run(RoomServiceApplication.class, args);
}
}
|
<reponame>todinesh/MySpringQuartz
package com.dishatech.myspringquartz.config;
import java.io.IOException;
import java.util.Map;
import java.util.Properties;
import javax.sql.DataSource;
import org.quartz.JobDataMap;
import org.quartz.JobDetail;
import org.quartz.SimpleTrigger;
import org.quartz.spi.JobFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.config.PropertiesFactoryBean;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.io.ClassPathResource;
import org.springframework.scheduling.quartz.CronTriggerFactoryBean;
import org.springframework.scheduling.quartz.JobDetailFactoryBean;
import org.springframework.scheduling.quartz.SchedulerFactoryBean;
import org.springframework.scheduling.quartz.SimpleTriggerFactoryBean;
@Configuration
@ConditionalOnProperty(name = "quartz.enabled")
public class QuartzConfig {
@Autowired
private ApplicationContext applicationContext;
@Bean
public JobFactory jobFactory() {
AutowiringSpringBeanJobFactory jobFactory = new AutowiringSpringBeanJobFactory();
jobFactory.setApplicationContext(applicationContext);
return jobFactory;
}
@Bean
public SchedulerFactoryBean schedulerFactoryBean(DataSource dataSource, JobFactory jobFactory) throws IOException {
SchedulerFactoryBean factory = new SchedulerFactoryBean();
factory.setOverwriteExistingJobs(false);
factory.setAutoStartup(true);
factory.setDataSource(dataSource);
factory.setJobFactory(jobFactory);
factory.setQuartzProperties(quartzProperties());
return factory;
}
@Bean
public Properties quartzProperties() throws IOException {
PropertiesFactoryBean propertiesFactoryBean = new PropertiesFactoryBean();
propertiesFactoryBean.setLocation(new ClassPathResource("/quartz.properties"));
propertiesFactoryBean.afterPropertiesSet();
return propertiesFactoryBean.getObject();
}
public static SimpleTriggerFactoryBean createTrigger(JobDetail jobDetail, long pollFrequencyMs) {
SimpleTriggerFactoryBean factoryBean = new SimpleTriggerFactoryBean();
factoryBean.setJobDetail(jobDetail);
factoryBean.setStartDelay(0L);
factoryBean.setRepeatInterval(pollFrequencyMs);
factoryBean.setRepeatCount(SimpleTrigger.REPEAT_INDEFINITELY);
// in case of misfire, ignore all missed triggers and continue :
factoryBean.setMisfireInstruction(SimpleTrigger.MISFIRE_INSTRUCTION_RESCHEDULE_NEXT_WITH_REMAINING_COUNT);
return factoryBean;
}// Use this method for creating cron triggers instead of simple triggers:
public static CronTriggerFactoryBean createCronTrigger(JobDetail jobDetail, String cronExpression) {
CronTriggerFactoryBean factoryBean = new CronTriggerFactoryBean();
factoryBean.setJobDetail(jobDetail);
factoryBean.setCronExpression(cronExpression);
factoryBean.setMisfireInstruction(SimpleTrigger.MISFIRE_INSTRUCTION_FIRE_NOW);
return factoryBean;
}
public static JobDetailFactoryBean createJobDetail(Class clazz, String description, Map<String, Object> params) {
JobDetailFactoryBean factoryBean = new JobDetailFactoryBean();
factoryBean.setJobClass(clazz);
factoryBean.setDescription(description);
factoryBean.setJobDataMap(new JobDataMap(params));
// job has to be durable to be stored in DB:
factoryBean.setDurability(true);
return factoryBean;
}
} |
package com.company;
import java.util.Scanner;
public class Exercise_5_32 {
public static void main(String[] args) {
// generate lottery number
int lotteryDigit1 = (int)(Math.random()*10);
int lotteryDigit2;
do {
lotteryDigit2 = (int) (Math.random() * 10);
} while (lotteryDigit1 == lotteryDigit2);
int lottery = lotteryDigit1*10+lotteryDigit2;
// prompt the user to enter a guess
Scanner input = new Scanner(System.in);
System.out.print("Enter your lottery pick (two digits): ");
int guess = input.nextInt();
int guessDigit1 = guess / 10;
int guessDigit2 = guess % 10;
System.out.println("The lottery number is "+ lotteryDigit1 + lotteryDigit2);
// check the guess
if (guess == lottery)
System.out.println("Exact match: you win $10,000");
else if (guessDigit2 == lotteryDigit1 && guessDigit1==lotteryDigit2)
System.out.println("Match all digits: you win $3,000");
else if (guessDigit1 == lotteryDigit1 || guessDigit1 == lotteryDigit2 ||
guessDigit2 == lotteryDigit1 || guessDigit2 == lotteryDigit2)
System.out.println("Match one digit: you win $1,000");
else
System.out.println("Sorry, no match");
}
}
|
<reponame>anticipasean/girakkafunc<gh_stars>0
package cyclops.reactor.stream.syncflux;
import cyclops.reactive.ReactiveSeq;
import cyclops.reactor.stream.CollectableTest;
import cyclops.reactor.stream.FluxReactiveSeq;
import reactor.core.publisher.Flux;
public class SyncRSCollectableTest extends CollectableTest {
public <T> ReactiveSeq<T> of(T... values) {
return FluxReactiveSeq.reactiveSeq(Flux.just(values));
}
}
|
wget -q https://www.virtualbox.org/download/oracle_vbox_2016.asc -O- | sudo apt-key add -
wget -q https://www.virtualbox.org/download/oracle_vbox.asc -O- | sudo apt-key add -
echo "deb [arch=amd64] http://download.virtualbox.org/virtualbox/debian $(lsb_release -sc) contrib" | sudo tee /etc/apt/sources.list.d/virtualbox.list
sudo apt update
sudo apt install linux-headers-$(uname -r) dkms
sudo apt-get install virtualbox-6.0
cd ~/
wget https://download.virtualbox.org/virtualbox/6.0.4/Oracle_VM_VirtualBox_Extension_Pack-6.0.0.vbox-extpack |
//===============================================================================
// @ Game.cpp
// ------------------------------------------------------------------------------
// Game core routines
//
// Copyright (C) 2008-2015 by <NAME> and <NAME>.
// All rights reserved.
//
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
//
// Game::Orthographic() creates a orthographic projection matrix and sets it in
// the rendering pipeline. It does this based on the locations of a view window
// and the near and far planes.
//
// You can toggle between the orthographic and perspective projections (using
// Frustum(), which has a similar interface) and see the differences. Note how
// the view window affects how much is rendered depending on the projection, and
// the different effects on parallel lines into the distance.
//
// The key commands are:
//
// w, s - move near plane out and in
// e, d - move far plane out and in
// i, k - move view window up and down
// j, l - move view window left and right
// p - toggle between orthographic and perspective projection
// space - reset to start
//
//===============================================================================
//-------------------------------------------------------------------------------
//-- Dependencies ---------------------------------------------------------------
//-------------------------------------------------------------------------------
#include <math.h>
#include <IvClock.h>
#include <IvRenderer.h>
#include <IvEventHandler.h>
#include <IvMath.h>
#include <IvMatrix33.h>
#include <IvMatrix44.h>
#include <IvRendererHelp.h>
#include <IvVector4.h>
#include "Game.h"
//-------------------------------------------------------------------------------
//-- Static Members -------------------------------------------------------------
//-------------------------------------------------------------------------------
//-------------------------------------------------------------------------------
//-- Methods --------------------------------------------------------------------
//-------------------------------------------------------------------------------
//-------------------------------------------------------------------------------
// @ IvGame::Create()
//-------------------------------------------------------------------------------
// Static constructor
// Needs to be here so that IvGame::mGame variable can be created in IvMain.cpp
//-------------------------------------------------------------------------------
bool
IvGame::Create()
{
IvGame::mGame = new Game();
return ( IvGame::mGame != 0 );
} // End of IvGame::Create()
//-------------------------------------------------------------------------------
// @ Game::Game()
//-------------------------------------------------------------------------------
// Constructor
//-------------------------------------------------------------------------------
Game::Game() : IvGame()
{
// init variables
mXCenter = 0.0f;
mYCenter = 0.0f;
mUseOrthographic = true;
} // End of Game::Game()
//-------------------------------------------------------------------------------
// @ Game::~Game()
//-------------------------------------------------------------------------------
// Destructor
//-------------------------------------------------------------------------------
Game::~Game()
{
} // End of Game::~Game()
//-------------------------------------------------------------------------------
// @ Game::PostRendererInitialize()
//-------------------------------------------------------------------------------
// Set up internal subsystems
//-------------------------------------------------------------------------------
bool
Game::PostRendererInitialize()
{
// Set up base class
if ( !IvGame::PostRendererInitialize() )
return false;
::IvSetDefaultLighting();
IvRenderer::mRenderer->SetNearPlane(3.5f);
IvRenderer::mRenderer->SetFarPlane(25.0f);
return true;
} // End of Game::PostRendererInitialize()
//-------------------------------------------------------------------------------
// @ Game::Update()
//-------------------------------------------------------------------------------
// Main update loop
//-------------------------------------------------------------------------------
void
Game::UpdateObjects( float dt )
{
// set near plane
if (mEventHandler->IsKeyDown('w'))
{
IvRenderer::mRenderer->SetNearPlane( IvRenderer::mRenderer->GetNearPlane() + dt );
}
if (mEventHandler->IsKeyDown('s'))
{
IvRenderer::mRenderer->SetNearPlane( IvRenderer::mRenderer->GetNearPlane() - dt );
}
// set far plane
if (mEventHandler->IsKeyDown('e'))
{
IvRenderer::mRenderer->SetFarPlane( IvRenderer::mRenderer->GetFarPlane() + dt );
}
if (mEventHandler->IsKeyDown('d'))
{
IvRenderer::mRenderer->SetFarPlane( IvRenderer::mRenderer->GetFarPlane() - dt );
}
// set x position of window
if (mEventHandler->IsKeyDown('j'))
{
mXCenter -= 0.75f*dt;
}
if (mEventHandler->IsKeyDown('l'))
{
mXCenter += 0.75f*dt;
}
// set y position of window
if (mEventHandler->IsKeyDown('i'))
{
mYCenter += 0.75f*dt;
}
if (mEventHandler->IsKeyDown('k'))
{
mYCenter -= 0.75f*dt;
}
// toggle projection
if (mEventHandler->IsKeyPressed('p'))
{
mUseOrthographic = !mUseOrthographic;
}
// reset
if (mEventHandler->IsKeyDown(' '))
{
IvRenderer::mRenderer->SetFOV( 45.0f );
IvRenderer::mRenderer->SetNearPlane( 3.5f );
IvRenderer::mRenderer->SetFarPlane(25.0f);
mXCenter = 0.0f;
mYCenter = 0.0f;
}
} // End of Game::Update()
//-------------------------------------------------------------------------------
// @ Game::Orthographic()
//-------------------------------------------------------------------------------
// Set orthographic projection matrix
//-------------------------------------------------------------------------------
void
Game::Orthographic( float left, float right,
float bottom, float top,
float nearZ, float farZ )
{
IvMatrix44 ortho;
if ( IvRenderer::mRenderer->GetAPI() == kOpenGL )
{
float recipX = 1.0f/(right-left);
float recipY = 1.0f/(top-bottom);
float recipZ = 1.0f/(nearZ-farZ);
ortho(0,0) = 2.0f*recipX;
ortho(0,3) = -(right+left)*recipX;
ortho(1,1) = 2.0f*recipY;
ortho(1,3) = -(top+bottom)*recipY;
ortho(2,2) = 2.0f*recipZ;
ortho(2,3) = (nearZ+farZ)*recipZ;
}
else
{
float recipX = 1.0f/(right-left);
float recipY = 1.0f/(top-bottom);
float recipZ = 1.0f/(farZ-nearZ);
ortho(0,0) = 2.0f*recipX;
ortho(0,3) = -(right+left)*recipX;
ortho(1,1) = 2.0f*recipY;
ortho(1,3) = -(top+bottom)*recipY;
ortho(2,2) = recipZ;
ortho(2,3) = -nearZ*recipZ;
}
// send to renderer
::IvSetProjectionMatrix( ortho );
}
//-------------------------------------------------------------------------------
// @ Game::Frustum()
//-------------------------------------------------------------------------------
// Set perspective projection matrix
//-------------------------------------------------------------------------------
void
Game::Frustum( float left, float right,
float bottom, float top,
float nearZ, float farZ )
{
IvMatrix44 perspective;
if (IvRenderer::mRenderer->GetAPI() == kOpenGL)
{
float recipX = 1.0f/(right-left);
float recipY = 1.0f/(top-bottom);
float recipZ = 1.0f/(nearZ-farZ);
perspective(0,0) = 2.0f*nearZ*recipX;
perspective(0,2) = (right+left)*recipX;
perspective(1,1) = 2.0f*nearZ*recipY;
perspective(1,2) = (top+bottom)*recipY;
perspective(2,2) = (nearZ+farZ)*recipZ;
perspective(2,3) = 2.0f*nearZ*farZ*recipZ;
perspective(3,2) = -1.0f;
perspective(3,3) = 0.0f;
}
else
{
float recipX = 1.0f/(right-left);
float recipY = 1.0f/(top-bottom);
float Q = farZ/(farZ-nearZ);
perspective(0,0) = 2.0f*nearZ*recipX;
perspective(0,2) = -(right+left)*recipX;
perspective(1,1) = 2.0f*nearZ*recipY;
perspective(1,2) = -(top+bottom)*recipY;
perspective(2,2) = Q;
perspective(2,3) = -nearZ*Q;
perspective(3,2) = 1.0f;
perspective(3,3) = 0.0f;
}
// send to renderer
::IvSetProjectionMatrix( perspective );
}
//-------------------------------------------------------------------------------
// @ Game::Render()
//-------------------------------------------------------------------------------
// Render stuff
//-------------------------------------------------------------------------------
void
Game::Render()
{
// set perspective matrix
float aspect = (float)IvRenderer::mRenderer->GetWidth()/(float)IvRenderer::mRenderer->GetHeight();
if ( mUseOrthographic )
{
Orthographic( -aspect*3.0f+mXCenter, aspect*3.0f+mXCenter,
-3.0f+mYCenter, 3.0f+mYCenter,
IvRenderer::mRenderer->GetNearPlane(),
IvRenderer::mRenderer->GetFarPlane() );
}
else
{
Frustum( -aspect*3.0f+mXCenter, aspect*3.0f+mXCenter,
-3.0f+mYCenter, 3.0f+mYCenter,
IvRenderer::mRenderer->GetNearPlane(),
IvRenderer::mRenderer->GetFarPlane() );
}
// set viewer
::IvSetDefaultViewer( 6.0f, 0.0f, 5.f );
// draw axes
::IvDrawAxes();
// draw floor
::IvDrawFloor();
// draw some objects
IvMatrix44 worldTransform;
worldTransform.Translation( IvVector3( 0.1f, 0.0f, 1.0f ) );
::IvSetWorldMatrix( worldTransform );
::IvDrawCube();
worldTransform.Translation( IvVector3( 0.1f, 3.0f, 0.0f ) );
::IvSetWorldMatrix( worldTransform );
::IvDrawTeapot();
worldTransform.Translation( IvVector3( 0.1f, -2.5f, 1.0f ) );
::IvSetWorldMatrix( worldTransform );
::IvDrawSphere( 1.0f );
}
|
<gh_stars>1-10
# Generated by Django 2.2.17 on 2021-02-02 17:54
import django.contrib.postgres.fields.jsonb
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('security', '0032_auto_20201007_1448'),
]
operations = [
migrations.RemoveField(
model_name='prisonerprofile',
name='single_offender_id',
),
]
|
#!/bin/bash
YYYYMMDD=$1
destDir=$2
if [ "$YYYYMMDD" == "" ] ;then
echo "usage: $0 YYYYMMDD required"
exit 1
fi
YYYY=`echo $YYYYMMDD| cut -c1-4`
YY=`echo $YYYYMMDD| cut -c3-4`
MM=`echo $YYYYMMDD| cut -c5-6`
DD=`echo $YYYYMMDD| cut -c7-8`
DDMMYY="$DD$MM$YY"
if [ "$destDir" == "" ] ;then
destDir="/apps/logs/ase/data/morgan/positions/$YYYY"
fi
lftp -e "lcd $destDir; set ftp:ssl-protect-fxp true;set ftp:ssl-protect-data true;mget *$YYYYMMDD* ; mget *$DDMMYY* ; quit" -uvaygcoco,OAcXL4iUG48dBRrh sftp://sftp.morganstanley.com
|
<filename>stopify/src/compile.ts
import 'source-map-support/register';
import * as fs from 'fs';
import { stopify } from './index';
import * as parseCli from '@stopify/continuations/dist/src/compiler/parse-compiler-opts';
function main() {
const src = fs.readFileSync(parseCli.srcPath, 'utf8');
const dstCode = stopify(src, parseCli.compilerOpts);
if (parseCli.dstPath) {
fs.writeFileSync(parseCli.dstPath, dstCode);
}
else {
console.log(dstCode);
}
}
try {
main()
} catch (reason) {
console.log(`Stopify error`);
console.log(reason);
process.exit(1);
};
|
import { Component, OnInit, ViewChild } from '@angular/core';
import { MatPaginator } from '@angular/material/paginator';
import { MatTableDataSource } from '@angular/material/table';
import { NgxSpinnerService } from 'ngx-spinner';
import { Router } from '@angular/router';
import { MatDialog } from '@angular/material';
import { TripService, NotificationService, Notification, DeviceService } from '../../services';
import { AppConstant, DeleteAlertDataModel } from '../../app.constants';
import { DeleteDialogComponent } from '..';
import { Subscription, Observable } from 'rxjs';
import { Message } from '@stomp/stompjs';
import { StompRService } from '@stomp/ng2-stompjs';
import * as moment from 'moment-timezone';
import { MessageDialogComponent } from '../common/message-dialog/message-dialog.component';
import { FormGroup, FormControl, Validators } from '@angular/forms';
@Component({
selector: 'app-trips',
templateUrl: './trips.component.html',
styleUrls: ['./trips.component.css'],
providers: [StompRService]
})
export class TripsComponent implements OnInit {
displayedColumns: string[] = ['tripId', 'fleetName', 'driverName', 'startDateTime', 'endDateTime', 'materialType', 'weight', 'coveredMiles', 'status', 'action'];
searchParameters = {
startDate: '',
endDate: '',
status:'',
driverGuid: '',
fleetGuid:'',
pageNumber: 0,
pageSize: 10,
searchText: '',
sortBy: 'tripId asc'
};
tripForm: FormGroup;
checkSubmitStatus: boolean;
isFilterShow: boolean = false;
topics: any[] = [];
reportingData: any = {};
totalRecords = 0;
tripList = [];
isSearch = false;
pageSizeOptions: number[] = [5, 10, 25, 100];
deleteAlertDataModel: DeleteAlertDataModel;
uniqueId: any;
subscription: Subscription;
messages: Observable<Message>;
currentUser = JSON.parse(localStorage.getItem("currentUser"));
deviceIsConnected = false;
isConnected = false;
cpId = '';
subscribed;
stompConfiguration = {
url: '',
headers: {
login: '',
passcode: '',
host: ''
},
heartbeat_in: 0,
heartbeat_out: 2000,
reconnect_delay: 5000,
debug: true
}
lat: number;
lng: number;
totalMiles: number = 0;
remailingMiles: number = 0;
coveredMiles: number = 0;
progressMilesPerc: number = 0;
isAdmin = false;
constructor(
private spinner: NgxSpinnerService,
private router: Router,
public dialog: MatDialog,
public _service: TripService,
private _notificationService: NotificationService,
public _appConstant: AppConstant,
public deviceService: DeviceService,
private stompService: StompRService,) { }
ngOnInit() {
this.getTripList();
this.createFilterForm();
if (this.currentUser.userDetail.roleName == "Admin") {
this.isAdmin = true;
}
else {
this.isAdmin = false;
}
}
/**
* create Filter Form
*/
createFilterForm() {
this.tripForm = new FormGroup({
startDate: new FormControl('', Validators.required),
endDate: new FormControl('', Validators.required)
});
}
/**
* Change page event
* @param pagechangeresponse
*/
ChangePaginationAsPageChange(pagechangeresponse) {
this.searchParameters.pageSize = pagechangeresponse.pageSize;
this.searchParameters.pageNumber = pagechangeresponse.pageIndex;
this.isSearch = true;
this.getTripList();
}
/**
* Searh for text
* @param filterText
*/
searchTextCallback(filterText) {
this.searchParameters.searchText = filterText;
this.searchParameters.pageNumber = 0;
this.isSearch = true;
this.getTripList();
}
/**
* Set order
* @param sort
*/
setOrder(sort: any) {
if (!sort.active || sort.direction === '') {
return;
}
this.searchParameters.sortBy = sort.active + ' ' + sort.direction;
this.getTripList();
}
/**
* Get trip list
* */
getTripList() {
this.spinner.show();
this._service.getTrip(this.searchParameters).subscribe(response => {
this.spinner.hide();
if (response.isSuccess === true) {
this.totalRecords = response.data.count;
this.tripList = response.data.items;
if (this.tripList) {
this.getStompConfig();
}
}
else {
this._notificationService.add(new Notification('error', response.message));
this.tripList = [];
}
}, error => {
this.spinner.hide();
this._notificationService.add(new Notification('error', error));
});
}
/**
* Delete trip comfirmation popup
* @param tripModel
*/
deleteModel(tripModel: any) {
this.deleteAlertDataModel = {
title: "Delete Trip",
message: this._appConstant.msgConfirm.replace('modulename', "Trip"),
okButtonName: "Confirm",
cancelButtonName: "Cancel",
};
const dialogRef = this.dialog.open(DeleteDialogComponent, {
width: '400px',
height: 'auto',
data: this.deleteAlertDataModel,
disableClose: false
});
dialogRef.afterClosed().subscribe(result => {
if (result) {
this.deleteTrip(tripModel.guid);
}
});
}
/**
* Delete trip by tripGuid
* @param tripGuid
*/
deleteTrip(tripGuid) {
this.spinner.show();
this._service.deleteTrip(tripGuid).subscribe(response => {
this.spinner.hide();
if (response.isSuccess === true) {
this._notificationService.add(new Notification('success', this._appConstant.msgDeleted.replace("modulename", "Trip")));
this.getTripList();
}
else {
this._notificationService.add(new Notification('error', response.message));
}
}, error => {
this.spinner.hide();
this._notificationService.add(new Notification('error', error));
});
}
/**
* Get stomp config
* */
getStompConfig() {
this.deviceService.getStompConfig('LiveData').subscribe(response => {
if (response.isSuccess) {
this.stompConfiguration.url = response.data.url;
this.stompConfiguration.headers.login = response.data.user;
this.stompConfiguration.headers.passcode = response.data.password;
this.stompConfiguration.headers.host = response.data.vhost;
this.cpId = response.data.cpId;
this.initStomp();
}
});
}
initStomp() {
let config = this.stompConfiguration;
this.stompService.config = config;
this.stompService.initAndConnect();
this.stompSubscribe();
}
/**
* Stomp subscribe
* */
public stompSubscribe() {
if (this.subscribed) {
return;
}
this.tripList.forEach(element => {
if (!this.topics.find(x => x.uniqueId === element.uniqueId)) {
this.messages = this.stompService.subscribe('/topic/' + this.cpId + '-' + element.uniqueId);
this.subscription = this.messages.subscribe(this.on_next);
this.topics.push({ uniqueId: element.uniqueId, destinationLatitude: element.destinationLatitude, destinationLongitude: element.destinationLongitude });
}
});
}
public on_next = (message: Message) => {
let uniqeId = (message.headers.destination).split("-");
let obj: any = JSON.parse(message.body);
let reporting_data = obj.data.data.reporting;
this.isConnected = true;
this.reportingData = reporting_data
if (this.reportingData) {
this.lat = this.reportingData.gps_lat;
this.lng = this.reportingData.gps_lng;
this.topics.forEach(element => {
if (element.uniqueId == uniqeId[1]) {
this.remailingMiles = this._service.calculateTotalMiles(this.lat, this.lng, element.destinationLatitude, element.destinationLongitude);
}
});
for (const key in this.tripList) {
if (this.tripList[key].uniqueId == uniqeId[1] && this.tripList[key].status == 'On Going') {
this.tripList[key].coveredMiles = this.tripList[key].totalMiles - this.remailingMiles;
}
}
}
let dates = obj.data.data.time;
let now = moment();
if (obj.data.data.status == undefined && obj.data.msgType == 'telemetry' && obj.data.msgType != 'device' && obj.data.msgType != 'simulator') {
this.deviceIsConnected = true;
} else if (obj.data.msgType === 'simulator' || obj.data.msgType === 'device') {
if (obj.data.data.status === 'off') {
this.deviceIsConnected = false;
} else {
this.deviceIsConnected = true;
}
}
obj.data.data.time = now;
}
/**
* it used to get the Date in local form
* @param lDate
*/
getLocalDate(lDate) {
var stillUtc = moment.utc(lDate).toDate();
var local = moment(stillUtc).local().format('MMM DD, YYYY hh:mm:ss A');
return local;
}
/**
* End trip comfirmation popup
* @param tripModel
*/
endModel(tripModel: any) {
this.deleteAlertDataModel = {
title: "End Trip",
message: "Are you sure you want to end this trip?",
okButtonName: "Confirm",
cancelButtonName: "Cancel",
};
const dialogRef = this.dialog.open(DeleteDialogComponent, {
width: '400px',
height: 'auto',
data: this.deleteAlertDataModel,
disableClose: false
});
dialogRef.afterClosed().subscribe(result => {
if (result) {
this.endTrip(tripModel);
}
});
}
/**
* End trip
* @param tripModel
*/
endTrip(tripModel) {
this.progressMilesPerc = tripModel.totalMiles - this.remailingMiles;
let currentdatetime = moment().format('YYYY-MM-DD[T]HH:mm:ss');
let timezone = moment().utcOffset();
let data = {
"tripGuid": tripModel.guid,
"currentDate": currentdatetime,
"timeZone": timezone,
"coveredMiles": Math.floor(this.progressMilesPerc)
}
this._service.endTrip(data).subscribe(response => {
if (response.isSuccess === true) {
this.getTripList();
}
})
}
/**
* Show hide filter
* */
showHideFilter() {
this.isFilterShow = !this.isFilterShow;
}
/**
* filter trip list
*/
filterTripList() {
this.checkSubmitStatus = true;
if (this.tripForm.valid) {
this.searchParameters.startDate = this.tripForm.get("startDate").value;
this.searchParameters.endDate = this.tripForm.get("endDate").value;
this.getTripList();
}
}
resetForm() {
this.tripForm.reset();
this.checkSubmitStatus = false;
this.searchParameters.startDate = "";
this.searchParameters.endDate = "";
//this.showHideFilter();
this.getTripList();
}
}
|
#pragma once
#include <cmath>
#include <vector>
namespace pvmath
{
const double pi = std::acos(-1);
const double epsilonDouble = 1e-9;
const double epsilonFloat = 1e-5f;
template <typename _Type>
bool isEqual( const _Type & value1, const _Type & value2 )
{
return ( value1 == value2 );
}
template <typename _Type>
bool isEqual( const _Type & value1, const _Type & value2, const _Type )
{
return ( value1 == value2 );
}
template <>
bool isEqual<double>( const double & value1, const double & value2 );
template <>
bool isEqual<float>( const float & value1, const float & value2 );
template <>
bool isEqual<double>( const double & value1, const double & value2, const double epsilonMultiplier );
template <>
bool isEqual<float>( const float & value1, const float & value2, const float epsilonMultiplier );
double toRadians(double angleDegree);
double toDegrees(double angleRadians);
void getMatrixRoots( const std::vector<double> & squareMatrix, const std::vector<double> freeTerms, std::vector<double> & roots );
}
template <typename _Type>
struct PointBase2D
{
PointBase2D( _Type _x = 0, _Type _y = 0 )
: x( _x )
, y( _y )
{ }
bool operator == ( const PointBase2D & point ) const
{
return pvmath::isEqual( x, point.x ) && pvmath::isEqual( y, point.y );
}
bool operator != ( const PointBase2D & point ) const
{
return !( *this == point );
}
PointBase2D & operator += ( const PointBase2D & point )
{
x += point.x;
y += point.y;
return *this;
}
PointBase2D & operator -= ( const PointBase2D & point )
{
x -= point.x;
y -= point.y;
return *this;
}
PointBase2D operator + ( const PointBase2D & point ) const
{
return PointBase2D( x + point.x, y + point.y );
}
PointBase2D operator - ( const PointBase2D & point ) const
{
return PointBase2D( x - point.x, y - point.y );
}
PointBase2D operator * ( const _Type & value ) const
{
return PointBase2D( value * x, value * y );
}
_Type x;
_Type y;
};
template <typename _Type, typename T>
PointBase2D<_Type> operator * ( const T & value, const PointBase2D<_Type> & point )
{
return PointBase2D<_Type>( static_cast<_Type>( value ) * point.x, static_cast<_Type>( value ) * point.y );
}
template <typename _Type>
struct PointBase3D : public PointBase2D<_Type>
{
PointBase3D( _Type _x = 0, _Type _y = 0, _Type _z = 0 )
: PointBase2D<_Type>( _x, _y )
, z( _z )
{ }
bool operator == ( const PointBase3D & point ) const
{
return PointBase2D<_Type>::operator==( point ) && pvmath::isEqual( z, point.z );
}
PointBase3D & operator += ( const PointBase3D & point )
{
PointBase2D<_Type>::operator+=( point );
z += point.z;
return *this;
}
PointBase3D & operator -= ( const PointBase3D & point )
{
PointBase2D<_Type>::operator-=( point );
z -= point.z;
return *this;
}
PointBase3D operator + ( const PointBase3D & point ) const
{
return PointBase3D( PointBase2D<_Type>::x + point.x, PointBase2D<_Type>::y + point.y, z + point.z );
}
PointBase3D operator - ( const PointBase3D & point ) const
{
return PointBase3D( PointBase2D<_Type>::x - point.x, PointBase2D<_Type>::y - point.y, z - point.z );
}
_Type z;
};
template <typename _Type>
class LineBase2D
{
public:
LineBase2D( const PointBase2D<_Type> & point1 = PointBase2D<_Type>(), const PointBase2D<_Type> & point2 = PointBase2D<_Type>() )
: _position( point1 )
{
if ( point1 == point2 ) {
_direction = PointBase2D<_Type>( 1, 0 ); // we could raise an exception here instead
}
else {
const _Type xDiff = point2.x - point1.x;
const _Type yDiff = point2.y - point1.y;
const _Type length = std::sqrt( xDiff * xDiff + yDiff * yDiff ); // here we might need more specific code for non-double cases
_direction = PointBase2D<_Type>( xDiff / length, yDiff / length );
}
}
// Angle is in radians
LineBase2D( const PointBase2D<_Type> & position_, _Type angle_ )
: _position( position_ )
, _direction( std::cos(angle_), std::sin(angle_) )
{
}
bool operator == ( const LineBase2D & line ) const
{
return parallel( line ) && pvmath::isEqual<_Type>( distance(line._position), 0 );
}
// This is translation (shift) function
LineBase2D operator + ( const PointBase2D<_Type> & offset ) const
{
return LineBase2D( _position + offset, angle() );
}
LineBase2D & operator += ( const PointBase2D<_Type> & offset )
{
_position += offset;
return *this;
}
_Type angle() const
{
return std::atan2( _direction.y, _direction.x );
}
PointBase2D<_Type> position() const
{
return _position;
}
bool intersection( const LineBase2D & line, PointBase2D<_Type> & point ) const
{
// based on Graphics Gems III, Faster Line Segment Intersection, p. 199-202
// http://www.realtimerendering.com/resources/GraphicsGems/gems.html#gemsiii
const _Type denominator = _direction.y * line._direction.x - _direction.x * line._direction.y;
if ( pvmath::isEqual<_Type>( denominator, 0, 10 ) )
return false; // they are parallel
const PointBase2D<_Type> offset = _position - line._position;
const _Type na = (line._direction.y * offset.x - line._direction.x * offset.y) / denominator;
point = _position + PointBase2D<_Type>( _direction.x * na, _direction.y * na );
return true;
}
bool isParallel( const LineBase2D & line ) const
{
const _Type denominator = _direction.y * line._direction.x - _direction.x * line._direction.y;
return pvmath::isEqual<_Type>( denominator, 0, 10 );
}
bool isIntersect( const LineBase2D & line ) const
{
return !isParallel( line );
}
_Type distance( const PointBase2D<_Type> & point ) const
{
// Line equation in the Cartesian coordinate system is
// y = a * x + b or A * x + B * y + C = 0
// A distance from a point to a line can be calculated as:
// |A * x0 + B * y0 + C| / sqrt(A * A + B * B)
const _Type distanceToLine = _direction.y * (point.x - _position.x) + _direction.x * (_position.y - point.y);
return (distanceToLine < 0 ? -distanceToLine : distanceToLine);
}
PointBase2D<_Type> projection( const PointBase2D<_Type> & point ) const
{
const _Type dotProduct = _direction.x * ( point.x - _position.x ) + _direction.y * ( point.y - _position.y );
const PointBase2D<_Type> offset( _direction.x * dotProduct, _direction.y * dotProduct );
return _position + offset;
}
PointBase2D<_Type> opposite( const PointBase2D<_Type> & point ) const
{
return 2 * projection( point ) - point;
}
template <template <typename, typename...> class _container>
static LineBase2D bestFittingLine( const _container< PointBase2D<_Type> > & points )
{
if ( points.size() < 2 )
return LineBase2D();
_Type sumX = 0;
_Type sumY = 0;
_Type sumXX = 0;
_Type sumYY = 0;
_Type sumXY = 0;
for ( typename _container< PointBase2D<_Type> >::const_iterator point = points.begin(); point != points.end(); ++point ) {
const _Type x = point->x;
const _Type y = point->y;
sumX += x;
sumXX += x * x;
sumY += y;
sumYY += y * y;
sumXY += x * y;
}
const _Type size = static_cast<_Type>( points.size() );
sumX /= size;
sumY /= size;
sumXX /= size;
sumYY /= size;
sumXY /= size;
const PointBase2D<_Type> position( sumX, sumY );
const _Type sigmaX = sumXX - sumX * sumX;
const _Type sigmaY = sumYY - sumY * sumY;
PointBase2D<_Type> direction;
if ( sigmaX > sigmaY ) {
direction.y = sumXY - sumX * sumY;
direction.x = sumXX - sumX * sumX;
}
else {
direction.x = sumXY - sumX * sumY;
direction.y = sumYY - sumY * sumY;
}
return LineBase2D( position, std::atan2( direction.y, direction.x ) );
}
private:
PointBase2D<_Type> _position;
PointBase2D<_Type> _direction;
};
typedef PointBase2D<double> Point2d;
typedef PointBase3D<double> Point3d;
typedef LineBase2D<double> Line2d;
|
"""Routines for the pywwt notebook server extension.
In order to make files available to the WWT engine, we need to serve them over
HTTP. Here we extend the Notebook server to be able to serve both static
pywwt HTML/JS assets and custom local files specified by the user.
The tricky part is that the Notebook server and the kernel are in separate
processes, and there is no super convenient way for them to communicate. Here,
we assume that they share a home directory, and enable communications by
writing JSON to a file in $HOME.
The recently added code in :func:`_compute_notebook_server_base_url`
demonstrates how the kernel can figure out a URL by which to communicate with
the server. So, in principle, we could replace this local-file communication
with REST API calls. I'm not aware of any better way for the kernel and server
to talk to each other. (Note that the notebook "comms" API is for the kernel
to talk to the JavaScript frontend, not the notebook server.)
"""
import os
import json
import mimetypes
from hashlib import md5
from tornado import web
from notebook.utils import url_path_join
from notebook.base.handlers import IPythonHandler
__all__ = [
'load_jupyter_server_extension',
'serve_file',
]
STATIC_DIR = os.path.join(os.path.dirname(__file__), 'web_static')
CONFIG = os.path.expanduser('~/.pywwt')
class WWTFileHandler(IPythonHandler):
def get(self, filename):
# First we check if this is a standard file in the static directory
static_path = os.path.join(STATIC_DIR, filename)
if os.path.exists(static_path):
if os.path.isdir(static_path):
path = os.path.join(static_path, 'index.html')
filename = 'index.html' # for mime-type guess below
else:
path = static_path
else:
# If not, we open the config file which should contain a JSON
# dictionary with filenames and paths.
# I believe that this transformation is not needed, but it's been in
# place for a while ...
filename = os.path.basename(filename)
if not os.path.exists(CONFIG):
raise web.HTTPError(404)
with open(CONFIG) as f:
config = json.load(f)
if filename in config['paths']:
path = config['paths'][filename]
else:
raise web.HTTPError(404)
# Do our best to set an appropriate Content-Type.
content_type = mimetypes.guess_type(filename)[0]
if content_type is None:
content_type = 'application/binary'
self.set_header('Content-Type', content_type)
# Add wide-open CORS headers to allow external WWT apps to access data.
self.set_header('Access-Control-Allow-Origin', '*')
self.set_header('Access-Control-Allow-Methods', 'GET,HEAD')
self.set_header('Access-Control-Allow-Headers', 'Content-Disposition,Content-Encoding,Content-Length,Content-Type')
with open(path, 'rb') as f:
content = f.read()
self.finish(content)
# January 2021: Derived from notebook.notebookapp.list_running_servers, with a
# fix for JupyterLab 3.x (or something recent??), where the JSON files start
# with `jpserver` not `nbserver`.
def _list_running_servers_jl3():
import io
import json
from notebook.utils import check_pid
from jupyter_core.paths import jupyter_runtime_dir
import os.path
import re
runtime_dir = jupyter_runtime_dir()
if not os.path.isdir(runtime_dir):
return
for file_name in os.listdir(runtime_dir):
# here is the fix:
if re.match('nbserver-(.+).json', file_name) or re.match('jpserver-(.+).json', file_name):
with io.open(os.path.join(runtime_dir, file_name), encoding='utf-8') as f:
info = json.load(f)
if ('pid' in info) and check_pid(info['pid']):
yield info
else:
try:
os.unlink(os.path.join(runtime_dir, file_name))
except OSError:
pass
def _compute_notebook_server_base_url():
"""Figure out the base_url of the current Jupyter notebook server.
Copied from
https://github.com/jupyter/notebook/issues/3156#issuecomment-401119433
with miniscule changes. This is gross, but appears to be the best
available option right now.
"""
import ipykernel
import json
import re
import requests
# First, find our ID.
kernel_id = re.search(
'kernel-(.*).json',
ipykernel.connect.get_connection_file()
).group(1)
# Now, check all of the running servers known on this machine. We have to
# talk to each server to figure out if it's ours or somebody else's.
running_server_info = list(_list_running_servers_jl3())
for s in running_server_info:
# We need an API token that in most cases is provided in the runtime
# JSON files. In (recent versions of?) the JupyterHub single-user
# server, it seems that the token is instead obtained from an
# environment variable. Cf.
# https://github.com/jupyterhub/jupyterhub/blob/master/jupyterhub/singleuser/mixins.py
token = s.get('token', '')
if not token:
token = os.environ.get('JUPYTERHUB_API_TOKEN', '')
if not token:
token = os.environ.get('JPY_API_TOKEN', '') # deprecated as of 0.7.2
# Request/response paranoia due to "fun" figuring out how to fix the
# JupyterHub single-user problem - the API call would fail due to auth
# issues and break pywwt, even though there was only one running server
# so we actually didn't even need the API call. In case something breaks
# in the future, add a fallback mode.
try:
response = requests.get(
requests.compat.urljoin(s['url'], 'api/sessions'),
params={'token': token}
)
for n in json.loads(response.text):
if n['kernel']['id'] == kernel_id:
return s['base_url'] # Found it!
except Exception:
pass
# If we got here, we might have auth issues with the api/sessions request.
# If there's only one server, just give it a try.
if len(running_server_info) == 1:
return running_server_info[0]['base_url']
raise Exception('cannot locate our notebook server; is this code running in a Jupyter kernel?')
_server_base_url = None
def get_notebook_server_base_url():
"""Get the "base_url" of the current Jupyter notebook server.
"""
global _server_base_url
if _server_base_url is None:
_server_base_url = _compute_notebook_server_base_url()
return _server_base_url
def serve_file(path, extension=''):
"""Given a path to a file on local disk, instruct the notebook server
to serve it up over HTTP. Returns a relative URL that can be used to
access the file.
"""
if not os.path.exists(path):
raise ValueError("Path {0} does not exist".format(path))
hash = md5(path.encode('utf-8')).hexdigest() + extension
with open(CONFIG) as f:
config = json.load(f)
if hash not in config['paths']:
config['paths'][hash] = os.path.abspath(path)
with open(CONFIG, 'w') as f:
json.dump(config, f)
return url_path_join(get_notebook_server_base_url(), '/wwt/' + hash)
def load_jupyter_server_extension(nb_server_app):
web_app = nb_server_app.web_app
host_pattern = '.*$'
if not os.path.exists(CONFIG):
config = {'paths': {}}
with open(CONFIG, 'w') as f:
json.dump(config, f)
mimetypes.add_type('image/fits', '.fits')
mimetypes.add_type('image/fits', '.fts')
mimetypes.add_type('image/fits', '.fit')
route_pattern = url_path_join(web_app.settings['base_url'], '/wwt/(.*)')
web_app.add_handlers(host_pattern, [(route_pattern, WWTFileHandler)])
|
define((require, exports, module) => {
return () => {
return {
logId: null,
finished_at: null,
horse_id2: null,
horse_id1: null,
field_id1: null,
field_id2: null,
bout_id1: null,
bout_id2: null,
param: {
horse_id1: null,
horse_id2: null,
field_id1: null,
field_id2: null,
bout_id1: null,
bout_id2: null
}
}
}
}) |
// file : xsde/cxx/serializer/non-validating/int.hxx
// author : <NAME> <<EMAIL>>
// copyright : Copyright (c) 2005-2011 Code Synthesis Tools CC
// license : GNU GPL v2 + exceptions; see accompanying LICENSE file
#ifndef XSDE_CXX_SERIALIZER_NON_VALIDATING_INT_HXX
#define XSDE_CXX_SERIALIZER_NON_VALIDATING_INT_HXX
#include <xsde/cxx/serializer/non-validating/xml-schema-sskel.hxx>
namespace xsde
{
namespace cxx
{
namespace serializer
{
namespace non_validating
{
#ifdef XSDE_REUSE_STYLE_MIXIN
struct int_simpl: virtual int_sskel
#else
struct int_simpl: int_sskel
#endif
{
virtual void
pre (int);
virtual void
_serialize_content ();
protected:
int value_;
};
}
}
}
}
#endif // XSDE_CXX_SERIALIZER_NON_VALIDATING_INT_HXX
|
/*
* Licensed to the OpenAirInterface (OAI) Software Alliance under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The OpenAirInterface Software Alliance licenses this file to You under
* the Apache License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*-------------------------------------------------------------------------------
* For more information about the OpenAirInterface (OAI) Software Alliance:
* <EMAIL>
*/
#include <stdio.h>
#include <stdlib.h>
#include <stdint.h>
#include "TLVEncoder.h"
#include "TLVDecoder.h"
#include "EpsMobileIdentity.h"
static int decode_guti_eps_mobile_identity (
GutiEpsMobileIdentity_t * guti,
uint8_t * buffer);
static int decode_imsi_eps_mobile_identity (
ImsiEpsMobileIdentity_t * imsi,
uint8_t * buffer);
static int decode_imei_eps_mobile_identity (
ImeiEpsMobileIdentity_t * imei,
uint8_t * buffer);
static int encode_guti_eps_mobile_identity (
GutiEpsMobileIdentity_t * guti,
uint8_t * buffer);
static int encode_imsi_eps_mobile_identity (
ImsiEpsMobileIdentity_t * imsi,
uint8_t * buffer);
static int encode_imei_eps_mobile_identity (
ImeiEpsMobileIdentity_t * imei,
uint8_t * buffer);
int
decode_eps_mobile_identity (
EpsMobileIdentity * epsmobileidentity,
uint8_t iei,
uint8_t * buffer,
uint32_t len)
{
int decoded_rc = TLV_VALUE_DOESNT_MATCH;
int decoded = 0;
uint8_t ielen = 0;
if (iei > 0) {
CHECK_IEI_DECODER (iei, *buffer);
decoded++;
}
ielen = *(buffer + decoded);
decoded++;
CHECK_LENGTH_DECODER (len - decoded, ielen);
uint8_t typeofidentity = *(buffer + decoded) & 0x7;
if (typeofidentity == EPS_MOBILE_IDENTITY_IMSI) {
decoded_rc = decode_imsi_eps_mobile_identity (&epsmobileidentity->imsi, buffer + decoded);
} else if (typeofidentity == EPS_MOBILE_IDENTITY_GUTI) {
decoded_rc = decode_guti_eps_mobile_identity (&epsmobileidentity->guti, buffer + decoded);
} else if (typeofidentity == EPS_MOBILE_IDENTITY_IMEI) {
decoded_rc = decode_imei_eps_mobile_identity (&epsmobileidentity->imei, buffer + decoded);
}
if (decoded_rc < 0) {
return decoded_rc;
}
#if NAS_DEBUG
dump_eps_mobile_identity_xml (epsmobileidentity, iei);
#endif
return (decoded + decoded_rc);
}
int
encode_eps_mobile_identity (
EpsMobileIdentity * epsmobileidentity,
uint8_t iei,
uint8_t * buffer,
uint32_t len)
{
uint8_t *lenPtr;
int encoded_rc = TLV_VALUE_DOESNT_MATCH;
uint32_t encoded = 0;
/*
* Checking IEI and pointer
*/
CHECK_PDU_POINTER_AND_LENGTH_ENCODER (buffer, EPS_MOBILE_IDENTITY_MINIMUM_LENGTH, len);
#if NAS_DEBUG
dump_eps_mobile_identity_xml (epsmobileidentity, iei);
#endif
if (iei > 0) {
*buffer = iei;
encoded++;
}
lenPtr = (buffer + encoded);
encoded++;
if (epsmobileidentity->imsi.typeofidentity == EPS_MOBILE_IDENTITY_IMSI) {
encoded_rc = encode_imsi_eps_mobile_identity (&epsmobileidentity->imsi, buffer + encoded);
} else if (epsmobileidentity->guti.typeofidentity == EPS_MOBILE_IDENTITY_GUTI) {
encoded_rc = encode_guti_eps_mobile_identity (&epsmobileidentity->guti, buffer + encoded);
} else if (epsmobileidentity->imei.typeofidentity == EPS_MOBILE_IDENTITY_IMEI) {
encoded_rc = encode_imei_eps_mobile_identity (&epsmobileidentity->imei, buffer + encoded);
}
if (encoded_rc < 0) {
return encoded_rc;
}
*lenPtr = encoded + encoded_rc - 1 - ((iei > 0) ? 1 : 0);
return (encoded + encoded_rc);
}
void
dump_eps_mobile_identity_xml (
EpsMobileIdentity * epsmobileidentity,
uint8_t iei)
{
OAILOG_DEBUG (LOG_NAS, "<Eps Mobile Identity>\n");
if (iei > 0)
/*
* Don't display IEI if = 0
*/
OAILOG_DEBUG (LOG_NAS, " <IEI>0x%X</IEI>\n", iei);
if (epsmobileidentity->imsi.typeofidentity == EPS_MOBILE_IDENTITY_IMSI) {
ImsiEpsMobileIdentity_t *imsi = &epsmobileidentity->imsi;
OAILOG_DEBUG (LOG_NAS, " <odd even>%u</odd even>\n", imsi->oddeven);
OAILOG_DEBUG (LOG_NAS, " <Type of identity>IMSI</Type of identity>\n");
OAILOG_DEBUG (LOG_NAS, " <digit1>%u</digit1>\n", imsi->digit1);
OAILOG_DEBUG (LOG_NAS, " <digit2>%u</digit2>\n", imsi->digit2);
OAILOG_DEBUG (LOG_NAS, " <digit3>%u</digit3>\n", imsi->digit3);
OAILOG_DEBUG (LOG_NAS, " <digit4>%u</digit4>\n", imsi->digit4);
OAILOG_DEBUG (LOG_NAS, " <digit5>%u</digit5>\n", imsi->digit5);
OAILOG_DEBUG (LOG_NAS, " <digit6>%u</digit6>\n", imsi->digit6);
OAILOG_DEBUG (LOG_NAS, " <digit7>%u</digit7>\n", imsi->digit7);
OAILOG_DEBUG (LOG_NAS, " <digit8>%u</digit8>\n", imsi->digit8);
OAILOG_DEBUG (LOG_NAS, " <digit9>%u</digit9>\n", imsi->digit9);
OAILOG_DEBUG (LOG_NAS, " <digit10>%u</digit10>\n", imsi->digit10);
OAILOG_DEBUG (LOG_NAS, " <digit11>%u</digit11>\n", imsi->digit11);
OAILOG_DEBUG (LOG_NAS, " <digit12>%u</digit12>\n", imsi->digit12);
OAILOG_DEBUG (LOG_NAS, " <digit13>%u</digit13>\n", imsi->digit13);
OAILOG_DEBUG (LOG_NAS, " <digit14>%u</digit14>\n", imsi->digit14);
OAILOG_DEBUG (LOG_NAS, " <digit15>%u</digit15>\n", imsi->digit15);
} else if (epsmobileidentity->guti.typeofidentity == EPS_MOBILE_IDENTITY_GUTI) {
GutiEpsMobileIdentity_t *guti = &epsmobileidentity->guti;
OAILOG_DEBUG (LOG_NAS, " <odd even>%u</odd even>\n", guti->oddeven);
OAILOG_DEBUG (LOG_NAS, " <Type of identity>GUTI</Type of identity>\n");
OAILOG_DEBUG (LOG_NAS, " <MCC digit 1>%u</MCC digit 1>\n", guti->mccdigit1);
OAILOG_DEBUG (LOG_NAS, " <MCC digit 2>%u</MCC digit 2>\n", guti->mccdigit2);
OAILOG_DEBUG (LOG_NAS, " <MCC digit 3>%u</MCC digit 3>\n", guti->mccdigit3);
OAILOG_DEBUG (LOG_NAS, " <MNC digit 1>%u</MNC digit 1>\n", guti->mncdigit1);
OAILOG_DEBUG (LOG_NAS, " <MNC digit 2>%u</MNC digit 2>\n", guti->mncdigit2);
OAILOG_DEBUG (LOG_NAS, " <MNC digit 3>%u</MNC digit 3>\n", guti->mncdigit3);
OAILOG_DEBUG (LOG_NAS, " <MME group id>%u</MME group id>\n", guti->mmegroupid);
OAILOG_DEBUG (LOG_NAS, " <MME code>%u</MME code>\n", guti->mmecode);
OAILOG_DEBUG (LOG_NAS, " <M TMSI>%u</M TMSI>\n", guti->mtmsi);
} else if (epsmobileidentity->imei.typeofidentity == EPS_MOBILE_IDENTITY_IMEI) {
ImeiEpsMobileIdentity_t *imei = &epsmobileidentity->imei;
OAILOG_DEBUG (LOG_NAS, " <odd even>%u</odd even>\n", imei->oddeven);
OAILOG_DEBUG (LOG_NAS, " <Type of identity>IMEI</Type of identity>\n");
OAILOG_DEBUG (LOG_NAS, " <digit1>%u</digit1>\n", imei->digit1);
OAILOG_DEBUG (LOG_NAS, " <digit2>%u</digit2>\n", imei->digit2);
OAILOG_DEBUG (LOG_NAS, " <digit3>%u</digit3>\n", imei->digit3);
OAILOG_DEBUG (LOG_NAS, " <digit4>%u</digit4>\n", imei->digit4);
OAILOG_DEBUG (LOG_NAS, " <digit5>%u</digit5>\n", imei->digit5);
OAILOG_DEBUG (LOG_NAS, " <digit6>%u</digit6>\n", imei->digit6);
OAILOG_DEBUG (LOG_NAS, " <digit7>%u</digit7>\n", imei->digit7);
OAILOG_DEBUG (LOG_NAS, " <digit8>%u</digit8>\n", imei->digit8);
OAILOG_DEBUG (LOG_NAS, " <digit9>%u</digit9>\n", imei->digit9);
OAILOG_DEBUG (LOG_NAS, " <digit10>%u</digit10>\n", imei->digit10);
OAILOG_DEBUG (LOG_NAS, " <digit11>%u</digit11>\n", imei->digit11);
OAILOG_DEBUG (LOG_NAS, " <digit12>%u</digit12>\n", imei->digit12);
OAILOG_DEBUG (LOG_NAS, " <digit13>%u</digit13>\n", imei->digit13);
OAILOG_DEBUG (LOG_NAS, " <digit14>%u</digit14>\n", imei->digit14);
OAILOG_DEBUG (LOG_NAS, " <digit15>%u</digit15>\n", imei->digit15);
} else {
OAILOG_DEBUG (LOG_NAS, " Wrong type of EPS mobile identity (%u)\n", epsmobileidentity->guti.typeofidentity);
}
OAILOG_DEBUG (LOG_NAS, "</Eps Mobile Identity>\n");
}
static int
decode_guti_eps_mobile_identity (
GutiEpsMobileIdentity_t * guti,
uint8_t * buffer)
{
int decoded = 0;
guti->spare = (*(buffer + decoded) >> 4) & 0xf;
/*
* For the GUTI, bits 5 to 8 of octet 3 are coded as "1111"
*/
if (guti->spare != 0xf) {
return (TLV_VALUE_DOESNT_MATCH);
}
guti->oddeven = (*(buffer + decoded) >> 3) & 0x1;
guti->typeofidentity = *(buffer + decoded) & 0x7;
if (guti->typeofidentity != EPS_MOBILE_IDENTITY_GUTI) {
return (TLV_VALUE_DOESNT_MATCH);
}
decoded++;
guti->mccdigit2 = (*(buffer + decoded) >> 4) & 0xf;
guti->mccdigit1 = *(buffer + decoded) & 0xf;
decoded++;
guti->mncdigit3 = (*(buffer + decoded) >> 4) & 0xf;
guti->mccdigit3 = *(buffer + decoded) & 0xf;
decoded++;
guti->mncdigit2 = (*(buffer + decoded) >> 4) & 0xf;
guti->mncdigit1 = *(buffer + decoded) & 0xf;
decoded++;
//IES_DECODE_U16(guti->mmegroupid, *(buffer + decoded));
IES_DECODE_U16 (buffer, decoded, guti->mmegroupid);
guti->mmecode = *(buffer + decoded);
decoded++;
//IES_DECODE_U32(guti->mtmsi, *(buffer + decoded));
IES_DECODE_U32 (buffer, decoded, guti->mtmsi);
return decoded;
}
static int
decode_imsi_eps_mobile_identity (
ImsiEpsMobileIdentity_t * imsi,
uint8_t * buffer)
{
int decoded = 0;
imsi->typeofidentity = *(buffer + decoded) & 0x7;
if (imsi->typeofidentity != EPS_MOBILE_IDENTITY_IMSI) {
return (TLV_VALUE_DOESNT_MATCH);
}
imsi->oddeven = (*(buffer + decoded) >> 3) & 0x1;
imsi->digit1 = (*(buffer + decoded) >> 4) & 0xf;
decoded++;
imsi->digit2 = *(buffer + decoded) & 0xf;
imsi->digit3 = (*(buffer + decoded) >> 4) & 0xf;
decoded++;
imsi->digit4 = *(buffer + decoded) & 0xf;
imsi->digit5 = (*(buffer + decoded) >> 4) & 0xf;
decoded++;
imsi->digit6 = *(buffer + decoded) & 0xf;
imsi->digit7 = (*(buffer + decoded) >> 4) & 0xf;
decoded++;
imsi->digit8 = *(buffer + decoded) & 0xf;
imsi->digit9 = (*(buffer + decoded) >> 4) & 0xf;
decoded++;
imsi->digit10 = *(buffer + decoded) & 0xf;
imsi->digit11 = (*(buffer + decoded) >> 4) & 0xf;
decoded++;
imsi->digit12 = *(buffer + decoded) & 0xf;
imsi->digit13 = (*(buffer + decoded) >> 4) & 0xf;
decoded++;
imsi->digit14 = *(buffer + decoded) & 0xf;
imsi->digit15 = (*(buffer + decoded) >> 4) & 0xf;
/*
* IMSI is coded using BCD coding. If the number of identity digits is
* even then bits 5 to 8 of the last octet shall be filled with an end
* mark coded as "1111".
*/
if ((imsi->oddeven == EPS_MOBILE_IDENTITY_EVEN) && (imsi->digit15 != 0x0f)) {
return (TLV_VALUE_DOESNT_MATCH);
}
decoded++;
return decoded;
}
static int
decode_imei_eps_mobile_identity (
ImeiEpsMobileIdentity_t * imei,
uint8_t * buffer)
{
int decoded = 0;
imei->typeofidentity = *(buffer + decoded) & 0x7;
if (imei->typeofidentity != EPS_MOBILE_IDENTITY_IMEI) {
return (TLV_VALUE_DOESNT_MATCH);
}
imei->oddeven = (*(buffer + decoded) >> 3) & 0x1;
imei->digit1 = (*(buffer + decoded) >> 4) & 0xf;
decoded++;
imei->digit2 = *(buffer + decoded) & 0xf;
imei->digit3 = (*(buffer + decoded) >> 4) & 0xf;
decoded++;
imei->digit4 = *(buffer + decoded) & 0xf;
imei->digit5 = (*(buffer + decoded) >> 4) & 0xf;
decoded++;
imei->digit6 = *(buffer + decoded) & 0xf;
imei->digit7 = (*(buffer + decoded) >> 4) & 0xf;
decoded++;
imei->digit8 = *(buffer + decoded) & 0xf;
imei->digit9 = (*(buffer + decoded) >> 4) & 0xf;
decoded++;
imei->digit10 = *(buffer + decoded) & 0xf;
imei->digit11 = (*(buffer + decoded) >> 4) & 0xf;
decoded++;
imei->digit12 = *(buffer + decoded) & 0xf;
imei->digit13 = (*(buffer + decoded) >> 4) & 0xf;
decoded++;
imei->digit14 = *(buffer + decoded) & 0xf;
imei->digit15 = (*(buffer + decoded) >> 4) & 0xf;
decoded++;
return decoded;
}
static int
encode_guti_eps_mobile_identity (
GutiEpsMobileIdentity_t * guti,
uint8_t * buffer)
{
uint32_t encoded = 0;
*(buffer + encoded) = 0xf0 | ((guti->oddeven & 0x1) << 3) | (guti->typeofidentity & 0x7);
encoded++;
*(buffer + encoded) = 0x00 | ((guti->mccdigit2 & 0xf) << 4) | (guti->mccdigit1 & 0xf);
encoded++;
*(buffer + encoded) = 0x00 | ((guti->mncdigit3 & 0xf) << 4) | (guti->mccdigit3 & 0xf);
encoded++;
*(buffer + encoded) = 0x00 | ((guti->mncdigit2 & 0xf) << 4) | (guti->mncdigit1 & 0xf);
encoded++;
IES_ENCODE_U16 (buffer, encoded, guti->mmegroupid);
*(buffer + encoded) = guti->mmecode;
encoded++;
IES_ENCODE_U32 (buffer, encoded, guti->mtmsi);
return encoded;
}
static int
encode_imsi_eps_mobile_identity (
ImsiEpsMobileIdentity_t * imsi,
uint8_t * buffer)
{
uint32_t encoded = 0;
*(buffer + encoded) = 0x00 | (imsi->digit1 << 4) | (imsi->oddeven << 3) | (imsi->typeofidentity);
encoded++;
*(buffer + encoded) = 0x00 | (imsi->digit3 << 4) | imsi->digit2;
encoded++;
*(buffer + encoded) = 0x00 | (imsi->digit5 << 4) | imsi->digit4;
encoded++;
*(buffer + encoded) = 0x00 | (imsi->digit7 << 4) | imsi->digit6;
encoded++;
*(buffer + encoded) = 0x00 | (imsi->digit9 << 4) | imsi->digit8;
encoded++;
*(buffer + encoded) = 0x00 | (imsi->digit11 << 4) | imsi->digit10;
encoded++;
*(buffer + encoded) = 0x00 | (imsi->digit13 << 4) | imsi->digit12;
encoded++;
if (imsi->oddeven != EPS_MOBILE_IDENTITY_EVEN) {
*(buffer + encoded) = 0x00 | (imsi->digit15 << 4) | imsi->digit14;
} else {
*(buffer + encoded) = 0xf0 | imsi->digit14;
}
encoded++;
return encoded;
}
static int
encode_imei_eps_mobile_identity (
ImeiEpsMobileIdentity_t * imei,
uint8_t * buffer)
{
uint32_t encoded = 0;
*(buffer + encoded) = 0x00 | (imei->digit1 << 4) | (imei->oddeven << 3) | (imei->typeofidentity);
encoded++;
*(buffer + encoded) = 0x00 | (imei->digit3 << 4) | imei->digit2;
encoded++;
*(buffer + encoded) = 0x00 | (imei->digit5 << 4) | imei->digit4;
encoded++;
*(buffer + encoded) = 0x00 | (imei->digit7 << 4) | imei->digit6;
encoded++;
*(buffer + encoded) = 0x00 | (imei->digit9 << 4) | imei->digit8;
encoded++;
*(buffer + encoded) = 0x00 | (imei->digit11 << 4) | imei->digit10;
encoded++;
*(buffer + encoded) = 0x00 | (imei->digit13 << 4) | imei->digit12;
encoded++;
*(buffer + encoded) = 0x00 | (imei->digit15 << 4) | imei->digit14;
encoded++;
return encoded;
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.