text
stringlengths
1
1.05M
var group__Corr = [ [ "arm_correlate_f32", "group__Corr.html#ga22021e4222773f01e9960358a531cfb8", null ], [ "arm_correlate_fast_opt_q15", "group__Corr.html#ga40a0236b17220e8e22a22b5bc1c53c6b", null ], [ "arm_correlate_fast_q15", "group__Corr.html#gac8de3da44f58e86c2c86156276ca154f", null ], [ "arm_correlate_fast_q31", "group__Corr.html#gabecd3d7b077dbbef43f93e9e037815ed", null ], [ "arm_correlate_opt_q15", "group__Corr.html#gad71c0ec70ec69edbc48563d9a5f68451", null ], [ "arm_correlate_opt_q7", "group__Corr.html#ga746e8857cafe33ec5d6780729c18c311", null ], [ "arm_correlate_q15", "group__Corr.html#ga5ec96b8e420d68b0e626df0812274d46", null ], [ "arm_correlate_q31", "group__Corr.html#ga1367dc6c80476406c951e68d7fac4e8c", null ], [ "arm_correlate_q7", "group__Corr.html#ga284ddcc49e4ac532d52a70d0383c5992", null ] ];
<filename>ods-main/src/main/java/cn/stylefeng/guns/onlineaccess/modular/mapper/UserMapper.java package cn.stylefeng.guns.onlineaccess.modular.mapper; import cn.stylefeng.guns.onlineaccess.modular.result.UserResult; import cn.stylefeng.guns.sys.modular.user.entity.SysUser; import com.baomidou.mybatisplus.core.mapper.BaseMapper; import com.baomidou.mybatisplus.extension.plugins.pagination.Page; import java.util.List; public interface UserMapper extends BaseMapper<UserResult> { List<UserResult> getProjectByUserIdAndProjectIdResult(Page page, Long projectId, Long userId); List<UserResult> getUserById(Page page,Long id); }
#! /bin/bash #Creating cloudformation stack STACK_NAME=$1 VPC_CIDR=$2 CIDR_SUBNET01=$3 CIDR_SUBNET02=$4 CIDR_SUBNET03=$5 REGION=$6 SUBNET_01=$STACK_NAME-subnet01 SUBNET_02=$STACK_NAME-subnet02 SUBNET_03=$STACK_NAME-subnet03 VPC_NAME=$STACK_NAME-vpc INTERNETGATEWAY=$STACK_NAME-internetgateway ROUTETABLE=$STACK_NAME-routetable if [[ -z "$1" || -z "$2" || -z "$3" || -z "$4" || -z "$5" || -z "$6" ]] then echo "Please enter all parameters in order ( Stack Name, VPC CIDR block, CIDR block for 3 subnets, Region )" exit 1 else aws cloudformation create-stack --stack-name $STACK_NAME --template-body file://csye6225-cf-networking.json --region $REGION --parameters ParameterKey=VPCName,ParameterValue=$VPC_NAME ParameterKey=VPCCIDR,ParameterValue=$VPC_CIDR ParameterKey=Subnet01CIDR,ParameterValue=$CIDR_SUBNET01 ParameterKey=Subnet02CIDR,ParameterValue=$CIDR_SUBNET02 ParameterKey=Subnet03CIDR,ParameterValue=$CIDR_SUBNET03 ParameterKey=Region,ParameterValue=$REGION ParameterKey=Subnet01Name,ParameterValue=$SUBNET_01 ParameterKey=Subnet02Name,ParameterValue=$SUBNET_02 ParameterKey=Subnet03Name,ParameterValue=$SUBNET_03 ParameterKey=InternetGatewayName,ParameterValue=$INTERNETGATEWAY ParameterKey=RouteTableName,ParameterValue=$ROUTETABLE if [ $? -eq 0 ]; then aws cloudformation wait stack-create-complete --stack-name $STACK_NAME --region $REGION if [ $? -eq 0 ]; then echo "Stack created successfully" else echo "Stack creation unsuccessful" fi else echo "Stack creation unsuccessful" fi fi
<reponame>telota/CIL-ACE export default { install(Vue, options) { Vue.prototype.$handlers = { create_item: (entity, data) => { const d = data const item = {}; // Coin and Type -------------------------------------------------------------------- if(['coins', 'types'].includes(entity)) { item.id = d?.id ? d.id : null item.public = d?.dbi?.public ? d.dbi.public : null item.description = d?.dbi?.description ? d.dbi.description : null item.source = d?.source?.link ? d.source.link : null item.comment_public = d?.comment ? d.comment : null item.comment_private = d?.dbi?.comment ? d.dbi.comment : null item.created_at = d?.created_at ? d.created_at : null item.updated_at = d?.updated_at ? d.updated_at : null item.creator = d?.dbi?.creator ? d.dbi.creator : null item.editor = d?.dbi?.editor ? d.dbi.editor : null item.mint = d?.mint?.id ? d.mint.id : null item.issuer = d?.issuer?.id ? d.issuer.id : null item.authority = d?.authority?.kind?.id ? d.authority.kind.id : null item.authority_person = d?.authority?.person?.id ? d.authority.person.id : null item.authority_group = d?.authority?.group?.id ? d.authority.group.id : null item.material = d?.material?.id ? d.material.id : null item.denomination = d?.denomination?.id ? d.denomination.id : null item.standard = d?.standard?.id ? d.standard.id : null item.period = d?.date?.period?.id ? d.date.period.id : null item.date_start = d?.dbi?.date_start ? d.dbi.date_start : null item.date_end = d?.dbi?.date_end ? d.dbi.date_end : null item.date_text_de = d?.date?.text?.de ? d.date.text.de : null item.date_text_en = d?.date?.text?.en ? d.date.text.en : null item.links = !d?.web_references?.[0] ? [] : d.web_references.map((v) => { return { link: v.link } }) item.groups = d?.dbi?.groups?.[0] ? d.dbi.groups : [] item.persons = !d?.persons?.[0] ? [] : d.persons.map((v) => { return { id: v.id, function: v.function.id } }); ['citations', 'literature'].forEach((key) => { item [key] = !d?.[key]?.[0] ? [] : d[key].map((v) => { return { id: v.id, page: v.quote.page, number: v.quote.number, plate: v.quote.plate, picture: v.quote.picture, annotation: v.quote.annotation, comment_de: v.quote.comment.de, comment_en: v.quote.comment.en, this: key == 'citations' ? 1 : 0 } }); }); ['obverse', 'reverse'].forEach ( (key) => { const s = key.slice(0, 1) item[s + '_design'] = d?.[key]?.design?.id ? d[key].design.id : null item[s + '_legend'] = d?.[key]?.legend?.id ? d[key].legend.id : null //item [s+'_legend_direction'] = !d ? null : (d[key] ? (d[key].legend.direction ? d[key].legend.direction.id : null) : null); item[s + '_monograms'] = !d?.[key]?.monograms ? [] : d[key].monograms.map((v) => { return { id: v.id, position: v.position.id, image: v.link, side: key == 'obverse' ? 0 : 1 } }) item[s + '_symbols'] = !d?.[key]?.symbols ? [] : d[key].symbols.map((v) => { return { id: v.id, position: v.position.id, side: key == 'obverse' ? 0 : 1 } }) if(entity === 'coins') { item[s + '_die'] = d?.[key]?.die?.id ? d[key].die.id : null item[s + '_controlmarks'] = !d?.[key]?.controlmarks ? [] : d[key].controlmarks.map ((v) => { return { id: v.id, name: v.name, image: v.link, count: v.count, side: key == 'obverse' ? 0 : 1 } }) item[s + '_countermark_en'] = d?.[key]?.countermark?.text?.en ? d[key].countermark.text.en : null item[s + '_countermark_de'] = d?.[key]?.countermark?.text?.de ? d[key].countermark.text.de : null item[s + '_undertype_en'] = d?.[key]?.undertype?.text?.en ? d[key].undertype.text.en : null item[s + '_undertype_de'] = d?.[key]?.undertype?.text?.de ? d[key].undertype.text.de : null } } ) item.public = !d?.dbi?.public ? 0 : d.dbi.public } // Coin specific -------------------------------------------------------------------- if(entity === 'coins') { item.types = !d?.types ? [] : d.types.map (v => { return { type: v.id } }) item.inherited = !d?.dbi?.inherited?.id_type ? inheritance_object() : d.dbi.inherited item.provenience = !d?.owner?.provenience ? null : d.owner.provenience item.owner_original = !d?.owner?.original?.id ? null : d.owner.original.id item.owner_unsure = !d?.owner?.original?.is_unsure ? 0 : 1 item.collection = !d?.owner?.original?.collection_nr ? null : d.owner.original.collection_nr item.owner_reproduction = !d?.owner?.reproduction?.id ? null : d.owner.reproduction.id item.plastercast = !d?.owner?.reproduction?.collection_nr ? null : d.owner.reproduction.collection_nr item.diameter_min = !d?.diameter?.value_min ? null : d.diameter.value_min.toFixed(2) item.diameter_max = !d?.diameter?.value_max ? null : d.diameter.value_max.toFixed(2) item.diameter_ignore = !d?.diameter?.ignore ? 0 : 1 item.weight = !d?.weight?.value ? null : d.weight.value.toFixed(2) item.weight_ignore = !d?.weight?.ignore ? 0 : 1 item.axis = !d?.axis ? null : d.axis item.centerhole = !d?.centerhole?.value ? 0 : d.centerhole.value item.images = !d?.dbi?.images ? [] : d.dbi.images /*item.literature_type = !d ? [] : (d.type_literature ? d.type_literature.map (v => { return { id: v.id, //string: 'Type ' + v.id_type + ':&ensp;' + v.title + ', ' + v.quote.text.de +'&emsp;( <a href="' + v.link + '" target="_blank">' + v.id + '</a> )', comment_de: v.quote.comment.de, comment_en: v.quote.comment.en }}) : []);*/ item.findspot = d?.findspot?.id ? d.findspot.id : null item.hoard = d?.hoard?.id ? d.hoard.id : null item.forgery = d?.is_forgery ? 1 : 0 } // Type specific -------------------------------------------------------------------- else if(entity === 'types') { item.coins = !d?.coins ? [] : d.coins.map ((v) => { return { coin: v.id } }) //item.inherited = {} item.name = !d?.dbi?.name ? null : d.dbi.name item.image = !d?.images?.[0]?.id ? null : d.images[0].id item.images = !d?.images ? [] : d.images item.variations = !d?.variations ? [] : d.variations.map((v) => { return { de: v.text.de, en: v.text.en, comment: v.comment } }) item.findspots = !d?.dbi?.findspots ? [] : d.dbi.findspots item.hoards = !d?.dbi?.hoards ? [] : d.dbi.hoards } return item }, copy_item_data: (item, copy, key) => { // Detect Copy Mode const mode = (item.types != undefined ? 'c' : 't') + (copy.types != undefined ? 'c' : 't') // Write Inheritance if(mode === 'ct' && inheritance_keys.includes(key)) { item.inherited[key] = 1 } // Copy Value if (key === 'date') { ['date_start', 'date_end', 'date_text_de', 'date_text_en'].forEach((k) => { item[k] = copy[k] })} else { item[key] = copy[key] } return item }, show_item_data: (language, entity, item_data, key, section) => { const d = item_data // Coin and Type -------------------------------------------------------------------- if(['coins', 'types'].includes(entity)) { // Production if (key === 'mint') { return to_string.mint(d?.mint, language) } else if (key === 'issuer') { return to_string.individual(d?.issuer) } else if (key === 'authority') { return to_string.basic(d?.authority?.kind, language) } else if (key === 'authority_person') { return to_string.individual(d?.authority?.person) } else if (key === 'authority_group') { return to_string.individual(d?.authority?.group) } else if (key === 'date') { return to_string.date(d?.date, d?.dbi?.date_start, d?.dbi?.date_end, language) } else if (key === 'period') { return to_string.basic(d?.date?.period, language) } // Basics else if (['material', 'denomination', 'standard'].includes(key)) { return to_string.basic(d?.[key], language) } else if (key === 'weight') { return to_string.weight(d?.weight, entity, language) } else if (key === 'diameter') { return to_string.diameter(d?.diameter, entity, language) } else if (key === 'axis') { return to_string.axis(entity === 'coins' ? d?.axis : d?.axes) } else if (key === 'centerhole') { return to_string.centerhole(d?.centerhole) } // Depiction else if (['design', 'legend'].includes(key)) { return to_string.design_legend(key, d?.[section]?.[key], language) } else if (['monograms', 'symbols'].includes(key)) { return !d?.[section]?.[key] ? '--' : d[section][key].map((data) => { return to_string.monogram_symbol(key, data, language) }).join('\n') } else if (key === 'controlmarks') { return !d?.[section]?.[key] ? '--' : d[section][key].map((data) => { return to_string.controlmark(data, language) }).join('\n') } else if (['countermark', 'undertype'].includes(key)){ return to_string.countermark_undertype(key, d?.[section], language) } // Owners else if (['owner_original', 'owner_reproduction'].includes(key)) { return to_string.owner(d?.owner?.[key.split('_').pop()], language) } else if (key === 'collection_id') { return to_string.simple_text(d?.owner?.original?.collection_nr) } else if (key === 'plastercast_id') { return to_string.simple_text(d?.owner?.reproduction?.collection_nr) } else if (key === 'provenience') { return to_string.simple_text(d?.owner?.provenience) } // Individuals else if (key === 'persons') { return !d?.persons ? '--' : d.persons.map((data) => { return to_string.individual(data, language) }).join('\n') } // References else if (['citations', 'literatur'].includes(key)) { return !d?.[key] ? '--' : d[key].map((data) => { return to_string.reference(data, language) }).join('\n') } else if (key === 'web_references') { return !d?.[key] ? '--' : d[key].map((data) => { return make_element.resource_link(data.link, language) }).join('\n')} else if (key === 'objectgroups') { return !d?.dbi?.groups ? '--' : d.dbi.groups.map((data) => { return to_string.objectgroup(data, language) }).join('\n')} // Hoard and Findsport else if (['hoards', 'findspots'].includes(key)) { return !d?.[key] ? '--' : d[key].map((data) => { return to_string.hoard_findspot(data, language) }).join('\n') } else if (['hoard', 'findspot'].includes(key)) { return to_string.hoard_findspot(d?.[key], language) } // About else if (key === 'comment_public') { return to_string.simple_text(d?.comment) } else if (key === 'comment_private') { return to_string.simple_text(d?.dbi?.comment) } else if (key === 'name_private') { return to_string.simple_text(d?.dbi?.name) } else if (key === 'description_private') { return to_string.simple_text(d?.dbi?.description) } else if (key === 'source') { return d?.source?.link ? make_element.resource_link(d.source.link, true) : '--'} // System History else if (key === 'system') { let color = 'grey' if (d?.dbi?.public === 1) { color = 'green' } else if (d?.dbi?.public === 2) { color = 'blue_sec' } else if (d?.dbi?.public === 3) { color = 'red' } const created = format_date(language, d?.created_at, true) + '&ensp;(&nbsp;' + (d?.dbi?.creator_name ? d.dbi.creator_name : '???') + '&nbsp;) ' const edited = format_date(language, d?.updated_at, true) + '&ensp;(&nbsp;' + (d?.dbi?.editor_name ? d.dbi.editor_name : '???') + '&nbsp;) ' const state = '<span class="' + color + '--text"> &#11044;</span>' return state + '&emsp;' + created + ',&ensp;' + edited } // Card Header & Footer else if (key === 'card_header') { const header = [] if (d?.diameter?.value_max) { let diameter = '&bigodot;&nbsp;' + format_decimal(language, d.diameter.value_max) + '&nbsp;mm' diameter += (d.diameter?.count ? (' (' + d.diameter.count + ')') : '') header.push(diameter) } if (d?.weight?.value) { let weight = '&#9878;&nbsp;' + format_decimal(language, d.weight.value) + '&nbsp;g' weight += (d.weight?.count ? (' (' + d.weight.count + ')') : '') header.push(weight) } return header.join('&ensp;') } else if (key === 'card_footer') { const footer = [] if (d?.date?.text?.[language] || d?.date?.text?.de) { footer.push(d?.date?.text?.[language] ? d.date.text[language] : d.date.text.de) } if (d?.material?.text?.[language]) { footer.push(d.material.text[language]) } if (d?.denomination?.text?.[language]) { footer.push(d.denomination.text[language].split('(')[0].trim()) } if (d?.owner?.city) { footer.push(d.owner.city.split('(')[0].trim()) } return footer.join('; ') } } else { return 'invalid entity' } }, // Formatters format: { resource_link: (link, showLink) => { return make_element.resource_link(link, showLink) }, image_link: (link, size, raw) => { return format_image_path(link, size, raw) }, digilib_link: (link, size) => { return link ? format_digilib_link(link, size)[size ? 'scaler' : 'viewer'] : '' }, date: (language, date, minutes) => { return format_date(language, date, minutes) }, number: (language, number) => { return format_number(language, number) }, year: (language, year) => { return format_year(language, year) }, file_format: (path, UpperCase) => { return file_format(path, UpperCase) }, img_placeholder:(link) => { return format_image_placeholder(link) }, counter: (array) => { return format_counter(array) }, inherited: () => { return inheritance_object() }, image_tile: (link, size) => { return make_element.image_tile(link, size) }, stringify_data: to_string, geonames_link: (link) => { return link ? (link + make_element.resource_link(urls.geonames + link)) : '--' }, nomisma_link: (link) => { return link ? (link + make_element.resource_link(urls.nomisma + link)) : '--' }, zotero_link: (link) => { return link ? (link + make_element.resource_link(urls.zotero + link)) : '--' }, cn_public_link: (item) => { if (item?.id && (item?.public === 1 || item?.dbi?.public === 1)) { return '<a href="' + urls.cn + (item.kind?.toLowerCase() === 'coin' ? 'coins?id=' : 'types/') + item.id + '" target="_blank">&nbsp;&ast;</a>' } else { return '' } }, cn_coin_type_relation: (entity, item) => { if (entity === 'coins') { const length = item.types?.length if (length) { const inherited = item.types?.find(type => type.inherited === 1) return { id: inherited?.id ? inherited.id : item.types[0].id, inherited: inherited?.id ? 1 : 0 } } else { return null } } else if (entity === 'types') { const length = item.coins?.length return length ? length : 0 } }, creation: (language, d) => { const created = format_date(language, d?.created_at, true) + '&ensp;(' + (d?.creator ? d.creator : (d?.id_creator ? d.id_creator : '???')) + ')' const edited = format_date(language, d?.updated_at, true) + '&ensp;(' + (d?.editor ? d.editor : (d?.id_editor ? d.id_editor : '???')) + ')' return created + ',&ensp;' + edited }, cn_entity: (entity, id) => { entity = (entity.slice(-1) === 's' ? entity.slice(0, -1) : entity) return 'cn ' + entity + (id ? (' ' + id) : '') } }, // Constants constant: { inheritance_keys: inheritance_keys, baseURL: baseURL, digilib: digilib, image_formats: image_formats, placeholder_directory: placeholder_directory, placeholder_fallback: placeholder_fallback, url: urls }, // Rules rules: { link: (v) => (v ? (v.slice(0, 7) === 'http://' || v.slice(0, 8) === 'https://') : true) || 'A valid links starts with \'http\' or \'https\'.', required: (v) => !!v || 'Input required.', numeric: (v) => { const pattern = /^(0|[1-9][0-9]*)$/ return pattern.test(v) || 'Must be numeric.' }, id: (v) => { const pattern = /^(null|[1-9][0-9]*)$/ return (v !== null ? pattern.test(v) : true) || 'ID must be numeric.' }, numeric_nz: (v) => (v === null ? true : v > 0) || 'Must be numeric and not zero (use dots for decimals).', date: (v) => { const pattern = /^-?([1-9][0-9]*)$/ return v === null ? true : pattern.test(v) || 'Must be numeric and not zero (there is no Year 0).' }, axis: (v) => (v ? (v> 0 && v < 13) : true) || 'Axis mus be between 1 and 12.' } } } } // ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------- // General Constants const inheritance_keys = [ 'id_type', 'mint', 'issuer', 'authority', 'authority_person', 'authority_group', 'date', 'period', 'material', 'denomination', 'standard', 'o_design', 'o_legend', 'o_symbols', 'o_monograms', 'r_design', 'r_legend', 'r_symbols', 'r_monograms', 'persons', 'comment_private', 'comment_public' ] const inheritance_object = () => { const item = {} inheritance_keys.forEach((key) => { item[key] = null }) return item } const image_formats = ['jpg', 'jpeg', 'png', 'gif', 'svg', 'tif', 'tiff'] const placeholder_directory = 'https://data.corpus-nummorum.eu/placeholder/' const placeholder_fallback = 'https://data.corpus-nummorum.eu/placeholder/placeholder/placeholder_not_found.png' const tiff = ['tiff', 'tif'] const baseURL = 'https://data.corpus-nummorum.eu/' const digilib = { viewer: 'https://digilib.bbaw.de/digitallibrary/digilib.html?fn=silo10/thrakien/', scaler: 'https://digilib.bbaw.de/digitallibrary/servlet/Scaler?fn=silo10/thrakien/' } const urls = { cn: 'https://www.corpus-nummorum.eu/', geonames: 'https://www.geonames.org/', nomisma: 'http://nomisma.org/id/', zotero: 'https://zotero.org/groups/163139/items/' } // Stringify object property const to_string = { mint: (data, language) => { return !data?.id ? '--' : (format_id(data.id) + data.text[language] + (data.region?.id ? (', ' + data.region.text[language]) : '') + make_element.resource_link(data.link)) }, individual: (data, language) => { // works for persons, issuer, authority_person, authority_group return !data?.id ? '--' : (format_id(data.id) + data.name + //(person.alias ? (' | ' + person.alias) : '') + make_element.resource_link(data.link)) + (!data.function?.id ? '' : ('<br />( ' + data.function.id + ' )&ensp;' + data.function.text[language])) }, date: (date, start, end, language) => { return '[&nbsp;' + (start ? start : '--') + '&nbsp;|&nbsp;' + (end ? end : '--') + '&nbsp;]&ensp;' + (!date?.text ? '--' : (date.text[language] ? date.text[language] : (date.text.de ? date.text.de : '--'))) }, basic: (data, language) => { // works for authority, period, material, denomination, standard return !data?.id ? '--' : (format_id(data.id) + data.text[language] + make_element.resource_link(data.link)) }, design_legend: (key, data, language) => { return !data?.id ? '--' : (format_id(data.id) + (key === 'design' ? data.text[language] : ('<span class="font-weight-thin">' + data.string + '</span>'))) }, monogram_symbol: (key, data, language) => { return !data?.id ? '--' : '<div class="d-flex component-wrap align-start pr-2">' + make_element.image_link(data.link, 40) + '<div>' + format_id(data.id) + data.position.text[language] + ': ' + (key === 'monograms' ? data.combination.replaceAll(' ', '') : data.text[language]) + '</div>' + '</div>' }, controlmark: (data, language) => { return !data?.id ? '--' : '<div class="d-flex component-wrap align-start pr-2">' + make_element.image_link(data.link, 40) + '<div>' + format_id(data.id) + data.count + 'x ' + (data.name ? data.name : '--') + '</div>' + '</div>' }, countermark_undertype (key, data, language) { return data?.[key]?.text?.[language === 'de' ? 'de' : 'en'] ? data[key].text[language === 'de' ? 'de' : 'en'] : '--' }, text: (data, language) => { return !data?.text ? '--' : ( data.text[language] ? data.text[language] : ( data.text.en ? data.text.en : '--')) }, simple_text: (string) => { return string ? string : '--' }, weight: (data, entity, language) => { const add = entity === 'types' ? ( data.count ? (' (&nbsp;' + data.count + '&nbsp;) ') : '' ) : ( data.ignore ? (' (&nbsp;<i>ignore</i>&nbsp;) ') : '' ) return !data?.value ? '--' : ( format_decimal(language, data.value) + '&nbsp;g' + add) }, diameter: (data, entity, language) => { if (!data?.value_min && !data?.value_max) { return '--' } else { const add = entity === 'types' ? ( data?.count ? (' (&nbsp;' + data.count + '&nbsp;) ') : '' ) : ( data?.ignore ? (' (&nbsp;<i>ignore</i>&nbsp;) ') : '' ) if ( data.value_min && data.value_max && data.value_min != data.value_max ) { return format_decimal(language, data.value_min) + '&ndash;' + format_decimal(language, data.value_max) + '&nbsp;mm' + add } else if ( data.value_min && data.value_max && data.value_min === data.value_max ) { return format_decimal(language, data.value_max) + '&nbsp;mm' + add } else { return format_decimal(language, (data.value_max ? data.value_max : data.value_min)) + '&nbsp;mm' + add } } }, axis: (data) => { if (data === null) { return '--' } else if (Number.isInteger(data)) { return '&#9719;&nbsp;' + data } else { return '&#9719;&nbsp;' + (data.map((v) => { return v.value + '&ensp;(&nbsp;' + v.count + '&nbsp;)' }).join(',&ensp;')) } }, centerhole: (data) => { return !data?.value ? '--' : (format_id(data.value) + (data.value === 1 ? 'O' : (data.value === 2 ? 'R' : 'O&nbsp;/&nbsp;R'))) }, owner: (data, language) => { if (!data.id) { return '--' } else { return format_id(data.id) + (data.name ? data.name : '--') + make_element.resource_link(data.link) + (data.is_unsure ? '&nbsp;(?)' : '') + (data.city ? (', ' + data.city) : '') + (data.country ? (', ' + data.country) : '') } }, reference: (data, language) => { if (!data.id) { return '--' } else { language = language === 'de' ? 'de' : 'en' return '<div>' + format_id(data.id) + (data.title ? data.title : '--') + make_element.resource_link(data.link) + (data.quote?.text?.[language] ? (', ' + data.quote.text[language]) : '') + '</div>' + (data.quote?.comment?.[language] ? ('<div class="mt-1"><i>' + data.quote.comment[language] + '</i></div>') : '') } }, objectgroup: (data, language) => { if (!data.id) { return '--' } else { language = language === 'de' ? 'de' : 'en' return '<div>' + format_id(data.id) + (data.name ? data.name : '--') + '</div>' + (data.text?.[language] ? ('<div class="mt-1"><i>' + data.text[language] + '</i></div>') : '') } }, hoard_findspot: (data, language) => { if (!data) { return '--' } else { return format_id(data.id) + (data.name ? data.name : '--') + make_element.resource_link(data.link) + (data.country ? (', ' + data.country) : '') } }, rgew: (material, language) => { }, } // Handy HTML Generators for specific elements like a tags or images const make_element = { resource_link: (link, showLink) => { return !link ? '' : ((showLink ? '' : '&nbsp;') + '<a href="' + link + '" target="_blank">' + (showLink ? link.split('?')[0] + ('&nbsp;') : '') + '&#10064;</a>') }, image_link: (link, size) => { if (link) { const format = extract_file_format(link) const img_size = parseInt(size) - (format === 'svg' ? 4 : 0) return '<a href="' + format_image_path(link) + '" target="_blank" class="mr-2 mt-1">' + '<div class="white d-flex justify-center align-center" style="width: ' + size + 'px; height: ' + size + 'px;">' + '<img src="' + format_image_path(link, img_size) + '" loading="lazy" style="width: ' + img_size + 'px; height: ' + img_size + 'px; object-fit: contain">' + '</div>' + '</a>' } else { return '' } }, image_tile: (link, size) => { if (link) { const format = extract_file_format(link) const img_size = parseInt(size) - (format === 'svg' ? 4 : 0) return '<div class="white d-flex justify-center align-center" style="width: ' + size + 'px; height: ' + size + 'px;">' + '<img src="' + format_image_path(link, img_size) + '" loading="lazy" style="width: ' + img_size + 'px; height: ' + img_size + 'px; object-fit: contain">' + '</div>' } else { return '' } } } // Formatters and other helpers const format_typeName = (id, mint) => { if (id) { return 'cn.' + (!mint ? 'unknown' : (mint.split('/').pop().toLowerCase())) + '.' + id } else { return 'Unknown Type' } } const format_coinName = (id, mint) => { if (id) { return (!mint ? 'unknown' : mint) + ' CN_' + id } else { return 'Unknown Coin' } } const format_id = (id) => { return !id ? '' : ('(&nbsp;' + id + '&nbsp;)&ensp;') } const format_date = (language, date, giveminutes) => { language = language ? language : 'en' const year = date ? date.slice(0,4) : '----' const month = date ? date.slice(5,7) : '--' const day = date ? date.slice(8,10) : '--' const hour = date && giveminutes ? date.slice(11,13) : '--' const minutes = date && giveminutes ? date.slice(14,16) : '--' if (language === 'de') { return day + '.' + month + '.' + year + (giveminutes ? (', ' + hour + ':' + minutes) : '') } else { return month + '/' + day + '/' + year + (giveminutes ? (', ' + hour + ':' + minutes) : '') } } const format_year = (language, year) => { if (year) { language = language ? language : 'en' year = String(year) if (year.slice(0, 1) === '-') { return year.slice(1) + '&nbsp;' + (language === 'de' ? 'v. Chr.' : 'BC') } else { return year + '&nbsp;' + (language === 'de' ? 'n. Chr.' : 'AD') } } else { return '--' } } const format_number = (language, number) => { language = language ? language : 'en' if (parseInt(number)) { number = String(number).split('').reverse().join('') const delimiter = language === 'de' ? '.' : ',' const splits = [] for (let i = 0; i <= number.length; i = i + 3) { const to_push = number.substr(i, (i+3)).split('').reverse().join('') if(to_push) { splits.push(to_push)} } return splits.reverse().join(delimiter) } else { return number } } const format_decimal = (language, number) => { if (language === 'de' && number) { number = number.toString().replaceAll('.', ',')} return number } const format_counter = (array) => { return '&ensp;(&nbsp;' + (array?.[0] ? array.length : '-') + '&nbsp;)' } const format_image_path = (link, size, raw) => { if(link) { link = link.trim() // Link is external if(link.slice(0,4) === 'http' && link.slice(0, baseURL.length) != baseURL) { return link } // Link is internal else { const format = extract_file_format(link) // file is TIFF and shall be given as digilib link if (tiff.includes(format) && !raw) { return link ? format_digilib_link(link, size)[size ? 'scaler' : 'viewer'] : '' } // file is no TIFF or shall be given as raw else { if (link.slice(0, baseURL.length) === baseURL) { return link } else { return baseURL + (!link.includes('storage/') ? 'storage/' : '') + link } } } } else { return '' } } const format_image_placeholder = (link) => { if (!link) { return placeholder_directory + 'placeholder_not_found.png' } else { const extension = extract_file_format(link) return placeholder_directory + 'placeholder_' + (image_formats.includes(extension) ? extension : 'not_supported') + '.png' } } const format_digilib_link = (link, scale) => { if (link) { if (link.includes('storage/')) { link = link.split('storage/').pop() } else if (link.slice(0, baseURL.length) === baseURL) { link = link.slice(baseURL.length) } scale = scale ? (Number.isInteger(scale) ? scale : 500) : 500 return { scaler: digilib.scaler + link.trim() + '&dw=' + scale + '&dh=' + scale, viewer: digilib.viewer + link.trim() } } } const extract_file_format = (path, UpperCase) => { path = path ? path.trim().split('.').pop() : null return !path ? null : (UpperCase ? path.toUpperCase() : path.toLowerCase()) }
<reponame>abhinitro/todo-backend const con= require("./../_helpers/db"); const { body } = require('express-validator'); const util = require('util'); let table='todos'; const query = util.promisify(con.query).bind(con); module.exports={ findOne, findAll, findById, create, update, destroy }; async function findOne(obj){ } /** * * @param {String} id */ async function findById(id){ let sql=`SELECT * FROM ${table} where id=${id}`; let model =await query(sql); console.log(typeof model); return model; } async function findAll(id){ let sql=`SELECT * FROM ${table} where create_user_id=${id}`; let model =await query(sql); return model; } async function create(obj){ let title=obj.title; let description=obj.description; let date=obj.date; let create_user_id=obj.create_user_id; let sql = `INSERT INTO ${table} (title,description,date,create_user_id) VALUES ('${title}', '${description}','${date}','${create_user_id}')`; let model =await query(sql); return model; } async function update(obj){ let id=obj.id; if(obj.hasOwnProperty("state_id")){ let sql=`UPDATE ${table} SET state_id="${1}"WHERE id=${id}`; let model =await query(sql); return model; } let title=obj.title; let description=obj.description; let date=obj.date; let sql=`UPDATE ${table} SET title="${title}",description="${description}",date="${date}" WHERE id=${id}`; let model =await query(sql); return model; } async function destroy(id){ let sql2=`DELETE FROM bucket_todos_lists WHERE todo_id=${id}`; let model2=await query(sql2); let sql=`DELETE FROM ${table} WHERE id=${id}`; let model =await query(sql); return model; }
/** * @author ooooo * @date 2021/1/15 18:53 */ #ifndef CPP_0947__SOLUTION1_H_ #define CPP_0947__SOLUTION1_H_ #include <iostream> #include <vector> #include <queue> #include <unordered_map> #include <unordered_set> using namespace std; class Solution { public: int ans = 0; void bfs(int rootIndex, unordered_set<int> &visited, unordered_map<int, vector<int>> &x, unordered_map<int, vector<int>> &y, vector<vector<int>> &stones) { queue<int> q; q.push(rootIndex); while (!q.empty()) { auto i = q.front(); q.pop(); if (visited.count(i)) continue; ans++; visited.insert(i); // 取对应的x值 for (auto &j:x[stones[i][0]]) { if (!visited.count(j)) q.push(j); } // 取对应的y值 for (auto &j:y[stones[i][1]]) { if (!visited.count(j)) q.push(j); } } // 除去自己,自己不能删除 ans--; } int removeStones(vector<vector<int>> &stones) { unordered_map<int, vector<int>> x, y; for (int i = 0; i < stones.size(); ++i) { x[stones[i][0]].push_back(i); y[stones[i][1]].push_back(i); } unordered_set<int> visited; for (int i = 0; i < stones.size(); ++i) { if (visited.count(i)) continue; bfs(i, visited, x, y, stones); } return ans; } }; #endif //CPP_0947__SOLUTION1_H_
using System; using System.Collections.Generic; public class ComponentRegistry { private static Dictionary<string, Type> components; public static void Init() { components = new Dictionary<string, Type>(); } public static void Register(string name, Type type) { if (components == null) throw new InvalidOperationException("Init must be called before registering components!"); components[name.ToLower()] = type; } public static Type Get(string name) { if (components == null) throw new InvalidOperationException("Init must be called before accessing component dictionary!"); if (components.TryGetValue(name.ToLower(), out Type type)) { return type; } else { throw new KeyNotFoundException($"Component with name '{name}' does not exist in the registry."); } } public static bool Exists(string name, out Type type) { if (components == null) throw new InvalidOperationException("Init must be called before accessing component dictionary!"); else return components.TryGetValue(name.ToLower(), out type); } }
(function (global, factory) { typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory() : typeof define === 'function' && define.amd ? define(factory) : (global.StarFilled16 = factory()); }(this, (function () { 'use strict'; var _16 = { elem: 'svg', attrs: { xmlns: 'http://www.w3.org/2000/svg', viewBox: '0 0 16 16', width: 16, height: 16, }, content: [ { elem: 'path', attrs: { d: 'M8 1L5.7 5.6l-5.1.7 3.7 3.6-.8 5.1L8 12.6l4.6 2.4-.9-5.1 3.7-3.6-5.1-.7L8 1z', }, }, ], name: 'star--filled', size: 16, }; return _16; })));
<reponame>quiram/java-util<filename>src/main/java/com/github/quiram/utils/ToPairsCollector.java package com.github.quiram.utils; import org.apache.commons.lang3.tuple.Pair; import java.util.ArrayList; import java.util.List; import java.util.Optional; import java.util.Set; import java.util.function.BiConsumer; import java.util.function.BinaryOperator; import java.util.function.Function; import java.util.function.Supplier; import java.util.stream.Collector; import static java.util.Collections.emptySet; class ToPairsCollector<T> implements Collector<T, ToPairsCollector<T>.Container, List<Pair<T, T>>> { public class Container { List<T> originalItems; List<Pair<T, T>> list; Optional<T> item; } @Override public Supplier<Container> supplier() { final Container container = new Container(); container.originalItems = new ArrayList<>(); container.list = new ArrayList<>(); container.item = Optional.empty(); return () -> container; } @Override public BiConsumer<Container, T> accumulator() { return ((container, item) -> { container.originalItems.add(item); if (container.item.isPresent()) { final Pair<T, T> pair = Pair.of(container.item.get(), item); container.list.add(pair); container.item = Optional.empty(); } else { container.item = Optional.of(item); } }); } @Override public Function<Container, List<Pair<T, T>>> finisher() { return container -> { if (!container.item.isPresent()) { return container.list; } throw new RuntimeException("An even number of elements was expected, but got " + container.originalItems); }; } @Override public BinaryOperator<Container> combiner() { return ($1, $2) -> { throw new RuntimeException("This collector doesn't support parallel streams."); }; } @Override public Set<Characteristics> characteristics() { return emptySet(); } }
#!/usr/bin/env bash MASON_NAME=freetype MASON_VERSION=2.5.5 MASON_LIB_FILE=lib/libfreetype.a MASON_PKGCONFIG_FILE=lib/pkgconfig/freetype2.pc . ${MASON_DIR}/mason.sh function mason_load_source { mason_download \ https://nongnu.askapache.com/freetype/freetype-old/freetype-${MASON_VERSION}.tar.bz2 \ c857bfa638b9c71e48baacd1cb12be446b62c333 mason_extract_tar_bz2 export MASON_BUILD_PATH=${MASON_ROOT}/.build/freetype-${MASON_VERSION} } function mason_compile { ./configure --prefix=${MASON_PREFIX} \ --enable-static \ --disable-shared ${MASON_HOST_ARG} \ --with-zlib=yes \ --with-bzip2=no \ --with-harfbuzz=no \ --with-png=no \ --with-quickdraw-toolbox=no \ --with-quickdraw-carbon=no \ --with-ats=no \ --with-fsref=no \ --with-fsspec=no \ make -j${MASON_CONCURRENCY} make install } function mason_ldflags { : # We're only using the full path to the archive, which is output in static_libs } function mason_cflags { echo "-I${MASON_PREFIX}/include/freetype2" } function mason_clean { make clean } mason_run "$@"
#!/bin/bash dconf load /org/pantheon/terminal/settings/ <<COLORS [/] name='Monokai Vivid' cursor-color='#fb0007' foreground='#f9f9f9' background='rgba(18,18,18,.95)' palette='#121212:#fa2934:#98e123:#fff30a:#0443ff:#f800f8:#01b6ed:#ffffff:#838383:#f6669d:#b1e05f:#fff26d:#0443ff:#f200f6:#51ceff:#ffffff' COLORS
import * as React from 'react'; import { ReactElement, ReactNode } from 'react'; import { MainLayout } from './layouts/main'; export interface MyViewProps { name: string; title: string; } const Homepage = ({ name, ...props }: MyViewProps): ReactElement => ( <div> <h1>this is {props.title}</h1> <div>Hello {name}</div> <div>next div</div> </div> ); export default Homepage;
<filename>src/main/java/org/xtwy/user/UserController.java package org.xtwy.user; import com.google.protobuf.ByteString; import org.springframework.stereotype.Controller; import org.xtwy.media.Remote; import org.xtwy.pb.protocol.EmailProbuf.Email; import org.xtwy.pb.protocol.ResponseMsgProbuf.ResponseMsg; import org.xtwy.pb.protocol.UserProbuf.User; import com.hzins.thrift.demo.Content; import com.hzins.thrift.demo.ThriftResponse; @Controller public class UserController { @Remote("saveUser") public Object saveUser(User user){ //使用mybatis把user数据插入到数据库 User.Builder newUser = user.newBuilder().setPhone("123456"); newUser.setId(12); newUser.setUserName("ssss"); return ResponseMsg.newBuilder().setResponse(newUser.build().toByteString()).build(); } @Remote("getEmailByUser") public Object getEmailByUser(User user){ //使用mybatis把user数据插入到数据库 Email.Builder email = Email.newBuilder().setContent("test").setFromUser("zhangsan").setId(12).setSubject("test"); ResponseMsg response = ResponseMsg.newBuilder().setResponse(email.build().toByteString()).build(); return response; } @Remote("httpGetEmailByUser") public Object getEmail(String email){ email = email+"hhhh"; ResponseMsg response = ResponseMsg.newBuilder().setResponse(ByteString.copyFromUtf8(email)).build(); return response; } @Remote("ThriftGetEmailByContent") public Object getEmail(Content content){ System.out.println(content.getPhone()); content.setId(1); content.setPhone("15626519062"); content.setIdIsSet(true); return content; } }
#!/bin/bash # # this comes from: https://github.com/python-zk/kazoo/blob/master/ensure-zookeeper-env.sh # set -e HERE=`pwd` ZOO_BASE_DIR="${HERE}/zookeeper" ZOOKEEPER_VERSION=${ZOOKEEPER_VERSION:-3.5.4-beta} ZOOKEEPER_PATH="${ZOO_BASE_DIR}/${ZOOKEEPER_VERSION}" ZOO_MIRROR_URL="https://archive.apache.org" function download_zookeeper(){ mkdir -p $ZOO_BASE_DIR cd $ZOO_BASE_DIR curl --silent -C - $ZOO_MIRROR_URL/dist/zookeeper/zookeeper-$ZOOKEEPER_VERSION/zookeeper-$ZOOKEEPER_VERSION.tar.gz | tar -zx mv zookeeper-$ZOOKEEPER_VERSION $ZOOKEEPER_VERSION chmod a+x $ZOOKEEPER_PATH/bin/zkServer.sh } if [ ! -d "$ZOOKEEPER_PATH" ]; then download_zookeeper echo "Downloaded zookeeper $ZOOKEEPER_VERSION to $ZOOKEEPER_PATH" else echo "Already downloaded zookeeper $ZOOKEEPER_VERSION to $ZOOKEEPER_PATH" fi export ZOOKEEPER_VERSION export ZOOKEEPER_PATH cd $HERE # Yield execution $*
/** * Copyright 2016 Floating Market B.V. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.plotprojects.titanium; import java.util.*; import android.util.Log; import com.plotprojects.retail.android.*; @SuppressWarnings("unchecked") //required for Kroll public final class JsonUtil { private final static String LOG_TAG = "PLOT/Titanium"; private final static String KEY_ID = "identifier"; private final static String KEY_MATCH_ID = "matchIdentifier"; private final static String KEY_MESSAGE = "message"; private final static String KEY_DATA = "data"; private final static String KEY_GEOFENCE_LATITUDE = "geofenceLatitude"; private final static String KEY_GEOFENCE_LONGITUDE = "geofenceLongitude"; private final static String KEY_TRIGGER = "trigger"; private final static String KEY_DWELLING_MINUTES = "dwellingMinutes"; private final static String KEY_NAME = "name"; private final static String KEY_MATCH_RANGE = "matchRange"; private final static String KEY_HANDLER_TYPE = "notificationHandlerType"; private final static String KEY_DATE_OPENED = "dateOpened"; private final static String KEY_DATE_SENT = "dateSent"; private final static String KEY_DATE_HANDLED = "dateHandled"; private final static String KEY_IS_OPENED = "isOpened"; private final static String KEY_IS_HANDLED = "isHandled"; public static HashMap<String, Object> notificationToMap(FilterableNotification notification) { HashMap<String, Object> jsonNotification = new HashMap<String, Object>(); jsonNotification.put(KEY_ID, notification.getId()); jsonNotification.put(KEY_MESSAGE, notification.getMessage()); jsonNotification.put(KEY_DATA, notification.getData()); if (!Double.isNaN(notification.getGeofenceLatitude())) { jsonNotification.put(KEY_GEOFENCE_LATITUDE, notification.getGeofenceLatitude()); } else { jsonNotification.put(KEY_GEOFENCE_LATITUDE, null); } if (!Double.isNaN(notification.getGeofenceLongitude())) { jsonNotification.put(KEY_GEOFENCE_LONGITUDE, notification.getGeofenceLongitude()); } else { jsonNotification.put(KEY_GEOFENCE_LONGITUDE, null); } jsonNotification.put(KEY_TRIGGER, notification.getTrigger()); jsonNotification.put(KEY_DWELLING_MINUTES, notification.getDwellingMinutes()); jsonNotification.put(KEY_MATCH_RANGE, notification.getMatchRange()); jsonNotification.put(KEY_HANDLER_TYPE, notification.getHandlerType()); return jsonNotification; } public static HashMap<String, Object>[] notificationsToMap(List<FilterableNotification> notifications) { HashMap<String, Object>[] result = new HashMap[notifications.size()]; int i = 0; for (FilterableNotification notification : notifications) { result[i] = notificationToMap(notification); i++; } return result; } private static Map<String, FilterableNotification> indexFilterableNotification(List<FilterableNotification> notifications) { Map<String, FilterableNotification> result = new HashMap<String, FilterableNotification>(); for (FilterableNotification n : notifications) { result.put(n.getId(), n); } return result; } public static List<FilterableNotification> getNotifications(Object[] jsonNotifications, List<FilterableNotification> notifications) { Map<String, FilterableNotification> notificationsIndexed = indexFilterableNotification(notifications); List<FilterableNotification> result = new ArrayList<FilterableNotification>(); for (Object obj: jsonNotifications) { if (!(obj instanceof Map)) { throw new IllegalArgumentException("notifications must contains objects"); } Map<String, String> jsonNotification = (Map<String, String>) obj; String id = jsonNotification.get(KEY_ID); FilterableNotification notification = notificationsIndexed.get(id); if (notification == null) { Log.w(LOG_TAG, String.format("Couldn't find notification with id '%s' in Notification Filter", id)); continue; } notification.setMessage(jsonNotification.get(KEY_MESSAGE)); notification.setData(jsonNotification.get(KEY_DATA)); result.add(notification); } return result; } public static HashMap<String, Object> geotriggerToMap(Geotrigger geotrigger) { HashMap<String, Object> jsonGeotrigger = new HashMap<String, Object>(); jsonGeotrigger.put(KEY_ID, geotrigger.getId()); jsonGeotrigger.put(KEY_NAME, geotrigger.getName()); jsonGeotrigger.put(KEY_DATA, geotrigger.getData()); if (!Double.isNaN(geotrigger.getGeofenceLatitude())) { jsonGeotrigger.put(KEY_GEOFENCE_LATITUDE, geotrigger.getGeofenceLatitude()); } else { jsonGeotrigger.put(KEY_GEOFENCE_LATITUDE, null); } if (!Double.isNaN(geotrigger.getGeofenceLongitude())) { jsonGeotrigger.put(KEY_GEOFENCE_LONGITUDE, geotrigger.getGeofenceLongitude()); } else { jsonGeotrigger.put(KEY_GEOFENCE_LONGITUDE, null); } jsonGeotrigger.put(KEY_TRIGGER, geotrigger.getTrigger()); jsonGeotrigger.put(KEY_DWELLING_MINUTES, geotrigger.getDwellingMinutes()); jsonGeotrigger.put(KEY_MATCH_RANGE, geotrigger.getMatchRange()); return jsonGeotrigger; } public static HashMap<String, Object>[] geotriggersToMap(List<Geotrigger> geotriggers) { HashMap<String, Object>[] result = new HashMap[geotriggers.size()]; int i = 0; for (Geotrigger geotrigger : geotriggers) { result[i] = geotriggerToMap(geotrigger); i++; } return result; } private static Map<String, Geotrigger> indexGeotrigger(List<Geotrigger> geotriggers) { Map<String, Geotrigger> result = new HashMap<String, Geotrigger>(); for (Geotrigger geotrigger : geotriggers) { result.put(geotrigger.getId(), geotrigger); } return result; } public static List<Geotrigger> getGeotriggers(Object[] jsonGeotriggers, List<Geotrigger> geotriggers) { Map<String, Geotrigger> geotriggersIndexed = indexGeotrigger(geotriggers); List<Geotrigger> result = new ArrayList<Geotrigger>(); for (Object obj: jsonGeotriggers) { if (!(obj instanceof Map)) { throw new IllegalArgumentException("geotriggers must contain objects"); } Map<String, String> jsonGeotrigger = (Map<String, String>) obj; String id = jsonGeotrigger.get(KEY_ID); Geotrigger geotrigger = geotriggersIndexed.get(id); if (geotrigger == null) { Log.w(LOG_TAG, String.format("Couldn't find geotrigger with id '%s' in Geotrigger Handler", id)); continue; } result.add(geotrigger); } return result; } public static HashMap<String, Object> notificationTriggerToMap(NotificationTrigger notification) { HashMap<String, Object> jsonNotification = new HashMap<String, Object>(); jsonNotification.put(KEY_ID, notification.getId()); jsonNotification.put(KEY_MESSAGE, notification.getMessage()); jsonNotification.put(KEY_DATA, notification.getData()); if (!Double.isNaN(notification.getGeofenceLatitude())) { jsonNotification.put(KEY_GEOFENCE_LATITUDE, notification.getGeofenceLatitude()); } else { jsonNotification.put(KEY_GEOFENCE_LATITUDE, null); } if (!Double.isNaN(notification.getGeofenceLongitude())) { jsonNotification.put(KEY_GEOFENCE_LONGITUDE, notification.getGeofenceLongitude()); } else { jsonNotification.put(KEY_GEOFENCE_LONGITUDE, null); } jsonNotification.put(KEY_TRIGGER, notification.getTrigger()); jsonNotification.put(KEY_DWELLING_MINUTES, notification.getDwellingMinutes()); jsonNotification.put(KEY_MATCH_RANGE, notification.getMatchRange()); return jsonNotification; } public static HashMap<String, Object>[] notificationTriggersToMap(List<NotificationTrigger> notifications) { HashMap<String, Object>[] result = new HashMap[notifications.size()]; int i = 0; for (NotificationTrigger notification : notifications) { result[i] = notificationTriggerToMap(notification); i++; } return result; } public static HashMap<String, Object> sentNotificationToMap(SentNotification notification) { HashMap<String, Object> jsonNotification = new HashMap<String, Object>(); jsonNotification.put(KEY_ID, notification.getId()); jsonNotification.put(KEY_MATCH_ID, notification.getMatchId()); jsonNotification.put(KEY_MESSAGE, notification.getMessage()); jsonNotification.put(KEY_DATA, notification.getData()); if (!Double.isNaN(notification.getGeofenceLatitude())) { jsonNotification.put(KEY_GEOFENCE_LATITUDE, notification.getGeofenceLatitude()); } else { jsonNotification.put(KEY_GEOFENCE_LATITUDE, null); } if (!Double.isNaN(notification.getGeofenceLongitude())) { jsonNotification.put(KEY_GEOFENCE_LONGITUDE, notification.getGeofenceLongitude()); } else { jsonNotification.put(KEY_GEOFENCE_LONGITUDE, null); } jsonNotification.put(KEY_TRIGGER, notification.getTrigger()); jsonNotification.put(KEY_DWELLING_MINUTES, notification.getDwellingMinutes()); jsonNotification.put(KEY_MATCH_RANGE, notification.getMatchRange()); jsonNotification.put(KEY_HANDLER_TYPE, notification.getHandlerType()); jsonNotification.put(KEY_DATE_SENT, notification.getDateSent()); jsonNotification.put(KEY_DATE_OPENED, notification.getDateOpened()); jsonNotification.put(KEY_IS_OPENED, notification.isOpened()); return jsonNotification; } public static HashMap<String, Object>[] sentNotificationsToMap(List<SentNotification> notifications) { HashMap<String, Object>[] result = new HashMap[notifications.size()]; int i = 0; for (SentNotification notification : notifications) { result[i] = sentNotificationToMap(notification); i++; } return result; } public static HashMap<String, Object> sentGeotriggerToMap(SentGeotrigger geotrigger) { HashMap<String, Object> jsonGeotrigger = new HashMap<String, Object>(); jsonGeotrigger.put(KEY_ID, geotrigger.getId()); jsonGeotrigger.put(KEY_MATCH_ID, geotrigger.getMatchId()); jsonGeotrigger.put(KEY_DATA, geotrigger.getData()); if (!Double.isNaN(geotrigger.getGeofenceLatitude())) { jsonGeotrigger.put(KEY_GEOFENCE_LATITUDE, geotrigger.getGeofenceLatitude()); } else { jsonGeotrigger.put(KEY_GEOFENCE_LATITUDE, null); } if (!Double.isNaN(geotrigger.getGeofenceLongitude())) { jsonGeotrigger.put(KEY_GEOFENCE_LONGITUDE, geotrigger.getGeofenceLongitude()); } else { jsonGeotrigger.put(KEY_GEOFENCE_LONGITUDE, null); } jsonGeotrigger.put(KEY_TRIGGER, geotrigger.getTrigger()); jsonGeotrigger.put(KEY_DWELLING_MINUTES, geotrigger.getDwellingMinutes()); jsonGeotrigger.put(KEY_MATCH_RANGE, geotrigger.getMatchRange()); jsonGeotrigger.put(KEY_DATE_SENT, geotrigger.getDateSent()); jsonGeotrigger.put(KEY_DATE_HANDLED, geotrigger.getDateHandled()); jsonGeotrigger.put(KEY_IS_OPENED, geotrigger.isHandled()); return jsonGeotrigger; } public static HashMap<String, Object>[] sentGeotriggersToMap(List<SentGeotrigger> geotriggers) { HashMap<String, Object>[] result = new HashMap[geotriggers.size()]; int i = 0; for (SentGeotrigger geotrigger : geotriggers) { result[i] = sentGeotriggerToMap(geotrigger); i++; } return result; } }
int factorial(int n) { int fact = 1; for (int i=1; i<=n; i++) { fact *= i; } return fact; } int result = factorial(5); cout << result; // returns 120
from tkinter import* import website import tkinter.font as font from PIL import ImageTk,Image import os import sqlite3 import webbrowser def main(): cgnc=Tk() cgnc.title('Show') cgnc.iconbitmap("logo/spectrumlogo.ico") f=font.Font(family='Bookman Old Style',size=10,weight='bold') f1=font.Font(family='Bookman Old Style',size=10) db=sqlite3.connect("mark_list.db") #cursor c=db.cursor() #query the database c.execute("SELECT *,oid FROM mark_list") records=c.fetchall() l=len(c.fetchall()) ch=records[l-1][4] ma=records[l-1][5] co=records[l-1][6] us=records[l-1][0] #commit_changes db.commit() #close connection db.close() def cgpa(): cg1=((ch+ma+co)/3)/9.5 cg="{:.2f}".format(cg1) db=sqlite3.connect("mark_list.db") c=db.cursor() #query the database c.execute("SELECT *,oid FROM mark_list") records=c.fetchall() l=len(c.fetchall()) n6=records[l-1][1] c.execute("""UPDATE mark_list SET cgpa=? WHERE name=?""",(cg,n6)) #commit_changes db.commit() #close connection db.close() entry.delete(0,END) entry.insert(0,cg) def grad(): av=((ch+ma+co)/3) if av<=100 and av>=90: gr='O' elif av<90 and av>=80: gr='E' elif av<80 and av>=70: gr='A' elif av<70 and av>=60: gr='B' elif av<60 and av>=50: gr='C' elif av<50 and av>=40: gr='D' elif av<40: gr='F' db=sqlite3.connect("mark_list.db") c=db.cursor() #query the database c.execute("SELECT *,oid FROM mark_list") records=c.fetchall() l=len(c.fetchall()) n6=records[l-1][1] c.execute("""UPDATE mark_list SET grade=? WHERE name=?""",(gr,n6)) #commit_changes db.commit() #close connection db.close() entry.delete(0,END) entry.insert(0,gr) #buttons cgpa=Button(cgnc,text='CGPA',bg='yellow',fg='black',borderwidth=3,padx=25,pady=20,command=cgpa,font=f) cgpa.grid(row=0,column=0) grade=Button(cgnc,text='GRADE',bg='yellow',fg='black',borderwidth=3,padx=20,pady=20,command=grad,font=f) grade.grid(row=0,column=1) Label(cgnc,text="\n").grid(row=1) def new(): db=sqlite3.connect("mark_list.db") #cursor c=db.cursor() #insert into tabels c.execute("INSERT INTO mark_list VALUES(:user_name,:name,:registration_no,:branch,:chemistry,:math,:computer,:cgpa,:grade)", { 'user_name':us, 'name':' ', 'registration_no':' ', 'branch':' ', 'chemistry':0, 'math':0, 'computer':0, 'cgpa':0, 'grade':' ' }) #commit_changes db.commit() #close connection db.close() cgnc.destroy() import input_details input_details.main() def close(): os._exit(1) new_input=Button(cgnc,text='New Input',bg='yellow',fg='black',borderwidth=3,padx=10,pady=20,command=new,font=f) new_input.grid(row=2,column=0) close=Button(cgnc,text='Close',bg='yellow',fg='black',borderwidth=3,command=close,padx=20,pady=20,font=f) close.grid(row=2,column=1) Label(cgnc,text="\n").grid(row=3) entry=Entry(cgnc,borderwidth=3,width=44) entry.grid(row=4,column=0,columnspan=2,padx=20) def show_en(): show_ent=Toplevel() show_ent.geometry("600x450") db=sqlite3.connect("mark_list.db") #cursor c=db.cursor() #query the database c.execute("SELECT *,oid FROM mark_list") records=c.fetchall() f=font.Font(family='Bookman Old Style',size=10,weight='bold') l=len(c.fetchall()) Label(show_ent,text="Username",font=f,fg='red').grid(row=0,column=0) Label(show_ent,text="Name",font=f,fg='red').grid(row=0,column=1) Label(show_ent,text="Registration ID",font=f,fg='red').grid(row=0,column=2) Label(show_ent,text="Branch",font=f,fg='red').grid(row=0,column=3) Label(show_ent,text="Chemistry",font=f,fg='red').grid(row=0,column=4) Label(show_ent,text="Math",font=f,fg='red').grid(row=0,column=5) Label(show_ent,text="Computer",font=f,fg='red').grid(row=0,column=6) Label(show_ent,text="Cgpa",font=f,fg='red').grid(row=0,column=7) Label(show_ent,text="Grade",font=f,fg='red').grid(row=0,column=8) r=1 r1=0 for record in records: if(records[l-1][0]==record[0]): l1=list(record) for c in range(0,9): Label(show_ent,text=l1[c],fg='blue',font=f1).grid(row=r1+1,column=c) r+=1 r=r+1 r1=r1+1 #commit_changes db.commit() #close connection db.close() show=Button(cgnc,text='Show Entries',bg='yellow',fg='black',borderwidth=3,command=show_en,padx=84,pady=5,font=f) show.grid(row=5,column=0,columnspan=2,padx=40) fo=font.Font(family='36 DAYS',size=10) def call(url): webbrowser.open_new(url) Label(cgnc,text="\nVisit our club website:",fg='blue',font=fo).grid(row=6,column=0,columnspan=2) l=Label(cgnc,text="https://spectrumcet.com/",fg='blue',font=fo) l.bind("<Button-1>",lambda x:call('https://spectrumcet.com/')) l.grid(row=7,column=0,columnspan=2) mainloop() if __name__=='__main__': main()
#!/usr/bin/env bash # Copyright 2017 The Kubernetes Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. set -o errexit set -o nounset set -o pipefail CUSTOM_HEADER=${PWD}/hack/boilerplate.go.txt GOBIN=${PWD}/bin "${PWD}"/hack/generate-groups.sh \ client,lister,informer \ github.com/spaghettifunk/pinot-operator/pkg/client \ github.com/spaghettifunk/pinot-operator/api \ pinot:v1alpha1 \ --go-header-file "${CUSTOM_HEADER}"
<gh_stars>0 module Eye::Process::Data def logger_tag full_name end def app_name self[:application] end def group_name (self[:group] == '__default__') ? nil : self[:group] end def group_name_pure self[:group] end def full_name @full_name ||= [app_name, group_name, self[:name]].compact.join(':') end def status_data(opts = {}) p_st = self_status_data(opts) if children.present? p_st.merge(subtree: Eye::Utils::AliveArray.new(children.values).map { |c| c.status_data(opts) }) elsif self[:monitor_children] && self.up? p_st.merge(subtree: [{ name: '=loading children=' }]) else # common state p_st end end def self_status_data(opts) h = { name: name, state: state, type: (self.class == Eye::ChildProcess ? :child_process : :process), resources: Eye::SystemResources.resources(pid) } if @states_history h[:state_changed_at] = @states_history.last_state_changed_at.to_i h[:state_reason] = @states_history.last_reason.to_s end h[:debug] = debug_data if opts[:debug] h[:procline] = Eye::SystemResources.args(self.pid) if opts[:procline] h[:current_command] = scheduler_current_command if scheduler_current_command h end def debug_data { queue: scheduler_actions_list, watchers: @watchers.keys, timers: timers_data } end def timers_data if actor = Thread.current[:celluloid_actor] actor.timers.timers.map(&:interval) end rescue [] end def sub_object?(obj) return false if self.class == Eye::ChildProcess self.children.values.each { |child| return true if child == obj } false end def environment_string s = [] @config[:environment].each { |k, v| s << "#{k}=#{v}" } s * ' ' end def shell_string(dir = true) str = '' str += "cd #{self[:working_dir]} && " if dir str += environment_string str += ' ' str += self[:start_command] str += ' &' if self[:daemonize] str end end
def shift_words(s): shifted_s = "" for c in s: if c.isalpha(): shifted_s += chr(ord(c)+1) else: shifted_s += c return shifted_s print(shift_words("hello world")) # Output: ifmmp xpsme
const NYT_ID = '<-- Client ID -->'; const rootElement = document.getElementById('root'); const searchForm = document.getElementById('search-form'); const filterArticles = doc => { return (doc.document_type === 'article' && !!((doc.multimedia[0] || {}).url)); } function addArticles() { const data = JSON.parse(this.responseText); loader.style.display = 'none'; const docs = data.response.docs.filter(filterArticles).map(doc => ` <a class="${!!(doc.snippet.length >= 150) ? 'card-medium' : 'card-small'}" href="${doc.web_url}" target="_blank"> <article> <figure> <img src="//www.nytimes.com/${doc.multimedia[0].url}"> </figure> <p>${doc.snippet}</p> </article> </a> `); const docList = docs.join(''); rootElement.innerHTML = ` <div class="results"> ${docList.trim()} </div> <div class="copy"> <small>${data.copyright}</small> </div> `; } function articlesError(err) { loader.style.display = 'none'; alert('Error') } function searchForArticles(query) { loader.style.display = 'block'; rootElement.innerHTML = '' const articleRequest = new XMLHttpRequest(); articleRequest.onload = addArticles; articleRequest.onerror = articlesError; articleRequest.open('GET', `http://api.nytimes.com/svc/search/v2/articlesearch.json?q=${query}&api-key=${NYT_ID}`); articleRequest.send(); } function submitSearch(e) { e.preventDefault(); searchForArticles(e.target[0].value); } searchForm.addEventListener('submit', submitSearch);
package org.webutils.dao.impl; import java.util.List; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.orm.ibatis.support.SqlMapClientDaoSupport; import org.springframework.stereotype.Repository; import org.webutils.beans.Person; import org.webutils.dao.PersonDao; import com.ibatis.sqlmap.client.SqlMapClient; @Repository public class SqlMapPersonDaoImpl extends SqlMapClientDaoSupport implements PersonDao { private static final String NAMESPACE = "person."; @Autowired @Qualifier("sqlMapClient1") public void injectSqlMapClient(SqlMapClient sqlMapClient) { setSqlMapClient(sqlMapClient); } @Override @SuppressWarnings("unchecked") public List<Person> selectAll() { return getSqlMapClientTemplate().queryForList(NAMESPACE + "selectAll"); } }
def base10_to_base2(number): base2_number = "" while number != 0: rem = number % 2 base2_number = str(rem) + base2_number number = number // 2 return base2_number
<filename>index.js import { memoFetch, setExpirationTime, setMaxMemo, setConfigPath, } from './lib/memoFetch.js'; export { memoFetch, setExpirationTime, setMaxMemo, setConfigPath };
<gh_stars>0 (window["webpackJsonp"] = window["webpackJsonp"] || []).push([[228],{ /***/ "../../src/components/icon/assets/sortLeft.js": /*!*******************************************************************************!*\ !*** /Users/chanderprall/projects/eui/src/components/icon/assets/sortLeft.js ***! \*******************************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { "use strict"; Object.defineProperty(exports, "__esModule", { value: true }); exports.icon = void 0; __webpack_require__(/*! core-js/modules/es6.object.assign */ "../../node_modules/core-js/modules/es6.object.assign.js"); var _react = _interopRequireDefault(__webpack_require__(/*! react */ "../../node_modules/react/index.js")); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _extends() { _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; }; return _extends.apply(this, arguments); } var EuiIconSortLeft = function EuiIconSortLeft(props) { return _react.default.createElement("svg", _extends({ width: 16, height: 16, viewBox: "0 0 16 16", xmlns: "http://www.w3.org/2000/svg" }, props), _react.default.createElement("path", { d: "M4.308 7h8.136c.307 0 .556.224.556.5s-.249.5-.556.5H4.308l4.096 4.096a.5.5 0 0 1-.707.707L3.454 8.561a1.494 1.494 0 0 1-.433-.925.454.454 0 0 1 0-.272c.03-.338.175-.666.433-.925l4.243-4.242a.5.5 0 1 1 .707.707L4.308 7z", fillRule: "evenodd" })); }; var icon = EuiIconSortLeft; exports.icon = icon; EuiIconSortLeft.__docgenInfo = { "description": "", "methods": [], "displayName": "EuiIconSortLeft" }; /***/ }) }]); //# sourceMappingURL=228.bundle.js.map
/** * 类初始化、实例化是两个不同的东西 * <p> * 类初始化只有一次,并且相关信息是共享的。 * 类实例化可以多次。 * * 类初始化 * 1、new getstatic putstatic invokestatic,指令会导致类初始化,final修改的静态常量除外,因为是在编译器已经初始化完毕。 * 2、反射:通过 reflect进行方法调用的时候,发现类未初始化,需要进行一次初始化。 * 3、初始化一个类的时候,发现其父类未初始化,需要进行父类的初始化。 * 4、JVM启动的时候,用户需要指定一个Main(包含main()这个类),虚拟机会先初始化这个主类。 * 5、jdk1.7动态语言支持,如果一个java.lang.invoke.MethodHandler实例最后的解析结果是REF_getStatic REF_putstatic REF_invokestatic的方法句柄, * 如果这些方法句柄对应的类未初始化,则需要先触发初始化。 * * * JVM定义:有切只有这5中场景才能进行类初始化,统称为对一个类的主动引用。 * 其他场景下,所有引用类的方式都不会触发初始化,统称为被动引用。(子类直接调用父类的静态方法,不会触发子类的初始化,形如Son.Papa.PAPA_CONSTANTS) * * * ------------------------------------------------------------ * 类实例化顺序(类静态变量、静态代码块、非静态代码块、构造方法代码块) * * 如果有继承关系,需要进行递归操作。 * * ^ Object | * | 目标类的父类的父类的父类 | * | 目标类的父类的父类 | * | 目标类的父类 | * | 目标类 * * 左边:不断向上查找,一直找到Object超类。 * 右边:从超类开始向下实例化,一直到目标类。 * * * * @author yang.zhou 2019.11.28.10 */ package org.glamey.training.jvm.loader.init;
<reponame>bopopescu/drawquest-web from jinja2 import Markup, contextfunction from canvas.templatetags.jinja_base import jinja_context_tag @jinja_context_tag def viewer_is_following(context, user): viewer = context['request'].user return viewer.is_following(user)
<gh_stars>0 // <NAME>, Geometric Tools, Redmond WA 98052 // Copyright (c) 1998-2016 // Distributed under the Boost Software License, Version 1.0. // http://www.boost.org/LICENSE_1_0.txt // http://www.geometrictools.com/License/Boost/LICENSE_1_0.txt // File Version: 3.0.0 (2016/06/19) #include <GTEnginePCH.h> #include <Mathematics/GteEdgeKey.h> namespace gte { template<> EdgeKey<true>::EdgeKey(int v0, int v1) { V[0] = v0; V[1] = v1; } template<> EdgeKey<false>::EdgeKey(int v0, int v1) { if (v0 < v1) { // v0 is minimum V[0] = v0; V[1] = v1; } else { // v1 is minimum V[0] = v1; V[1] = v0; } } }
<reponame>gcusnieux/jooby package org.jooby.filewatcher; import org.junit.Test; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.nio.file.StandardWatchEventKinds; import java.nio.file.WatchEvent.Kind; import static org.junit.Assert.*; public class FileEventOptionsTest { public static class MyHandler implements FileEventHandler { @Override public void handle(final Kind<Path> kind, final Path path) throws IOException { } } @Test public void defaults() throws IOException { Path source = Paths.get("."); MyHandler handler = new MyHandler(); FileEventOptions options = new FileEventOptions(source, MyHandler.class); assertEquals(handler, options.handler(type -> handler)); assertEquals(source, options.path()); assertEquals("**/*", options.filter().toString()); assertEquals(true, options.filter().matches(null)); assertArrayEquals(new Object[]{StandardWatchEventKinds.ENTRY_CREATE, StandardWatchEventKinds.ENTRY_DELETE, StandardWatchEventKinds.ENTRY_MODIFY}, options.kinds()); assertEquals("HIGH", options.modifier().name()); assertEquals(true, options.recursive()); assertEquals( ". {kinds: [ENTRY_CREATE, ENTRY_DELETE, ENTRY_MODIFY], filter: **/*, recursive: true, modifier: HIGH}", options.toString()); } @Test public void withInstance() throws IOException { Path source = Paths.get("."); MyHandler handler = new MyHandler(); FileEventOptions options = new FileEventOptions(source, handler); assertEquals(handler, options.handler(type -> handler)); assertEquals(source, options.path()); assertEquals("**/*", options.filter().toString()); assertEquals(true, options.filter().matches(null)); assertArrayEquals(new Object[]{StandardWatchEventKinds.ENTRY_CREATE, StandardWatchEventKinds.ENTRY_DELETE, StandardWatchEventKinds.ENTRY_MODIFY}, options.kinds()); assertEquals("HIGH", options.modifier().name()); assertEquals(true, options.recursive()); assertEquals( ". {kinds: [ENTRY_CREATE, ENTRY_DELETE, ENTRY_MODIFY], filter: **/*, recursive: true, modifier: HIGH}", options.toString()); } @Test public void mkdir() throws IOException { Path source = Paths.get("target/" + FileEventOptions.class.getSimpleName()); new FileEventOptions(source, MyHandler.class); assertTrue(Files.exists(source)); } @Test public void filter() throws IOException { assertEquals("[**/*.java]", new FileEventOptions(Paths.get("."), MyHandler.class) .includes("**/*.java") .filter() .toString()); assertEquals("[**/*.java, **/*.kt]", new FileEventOptions(Paths.get("."), MyHandler.class) .includes("**/*.java") .includes("**/*.kt") .filter() .toString()); } @Test public void modifier() throws IOException { assertEquals("LOW", new FileEventOptions(Paths.get("."), MyHandler.class) .modifier(() -> "LOW") .modifier() .name()); } @Test public void recursive() throws IOException { assertEquals(false, new FileEventOptions(Paths.get("."), MyHandler.class) .recursive(false) .recursive()); } @Test public void kind() throws IOException { assertArrayEquals(new Object[]{StandardWatchEventKinds.ENTRY_CREATE}, new FileEventOptions(Paths.get("."), MyHandler.class) .kind(StandardWatchEventKinds.ENTRY_CREATE) .kinds()); assertArrayEquals(new Object[]{StandardWatchEventKinds.ENTRY_MODIFY}, new FileEventOptions(Paths.get("."), MyHandler.class) .kind(StandardWatchEventKinds.ENTRY_MODIFY) .kinds()); assertArrayEquals(new Object[]{StandardWatchEventKinds.ENTRY_DELETE}, new FileEventOptions(Paths.get("."), MyHandler.class) .kind(StandardWatchEventKinds.ENTRY_DELETE) .kinds()); assertArrayEquals( new Object[]{StandardWatchEventKinds.ENTRY_MODIFY, StandardWatchEventKinds.ENTRY_CREATE}, new FileEventOptions(Paths.get("."), MyHandler.class) .kind(StandardWatchEventKinds.ENTRY_MODIFY) .kind(StandardWatchEventKinds.ENTRY_CREATE) .kinds()); } }
<reponame>InsideZhou/southern-quiet package me.insidezhou.southernquiet.util; import org.springframework.scheduling.annotation.Async; @Async public class AsyncRunner { public void run(Runnable runnable) { runnable.run(); } }
def validate_matriculation_number(student_matric, predefined_matricno): errors = [] if student_matric != predefined_matricno: errors.append("The matriculation numbers do not match") print("<script> alert('The matriculation numbers do not match'); </script>") return errors
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/1024+0+512-N-VB-ADJ-ADV/model --tokenizer_name model-configs/1536-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/1024+0+512-N-VB-ADJ-ADV/512+512+512-NER-1 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function remove_all_but_named_entities_first_third_full --eval_function last_element_eval
#!/usr/bin/env bash # # Run the Docker image using the --regrid command line argument. Run this script from the root # directory of the input files. # # Identify the directory of this script, which is also the root directory of the project. SCRIPT=$(readlink -f "$0") SCRIPT_PATH=$(dirname "${SCRIPT}") SCRIPT_NAME=`basename "$0"` PROJECT_ROOT=$(readlink --canonicalize "${SCRIPT_PATH}/../..") docker container prune --force docker run \ -u $(id -u):$(id -g) \ --name "ereefs-ncaggregate" \ --memory=7.5GB \ -v `pwd`:/data/ \ --env EXECUTION_ENVIRONMENT=regrid \ --env DB_TYPE=file \ --env DB_PATH=/data/db \ ereefs-ncaggregate --regrid --input=/data/orig/ --output=/data/out/ --cache=/data/regrid-mapper.dat # Specifying variables is also supported. For example: # ereefs-ncaggregate --regrid --input=/data/orig --output=/data/out --cache=/data/regrid-mapper.dat --variables=temp,salt
from pandas import Series import numpy as np from pandas.tseries.offsets import DateOffset from pandas.tseries.frequencies import to_offset class CustomTimeSeries: def __init__(self, start_time, freq, periods): self.start_time = pd.to_datetime(start_time) self.freq = to_offset(freq) self.periods = periods self.rng = pd.date_range(start=self.start_time, periods=self.periods, freq=self.freq) self.data = Series(np.arange(len(self.rng)), index=self.rng) def subset_by_interval(self, start_time): start_time = pd.to_datetime(start_time) return self.data[start_time:] def subset_by_timestamp(self, timestamp): timestamp = pd.to_datetime(timestamp) return self.data[timestamp] # Example usage cts = CustomTimeSeries("2023-01-01 10:11:12", "us", 2000) result_interval = cts.subset_by_interval("2023-01-01 10:11:12.001") result_timestamp = cts.subset_by_timestamp("2023-01-01 10:11:12.001001") print(result_interval) print(result_timestamp)
#!/usr/bin/env bash set -e if [ $# -ne 1 ]; then echo "Usage: sh 21_relocate_WS_messages.sh <data-root>" echo "Unpacks labelled messages for reallocating WS data" exit fi DATA_DIR=$1 rm -rf "$DATA_DIR/21 WS Migration data for Coda" mkdir -p "$DATA_DIR/21 WS Migration data for Coda" cd ../relocate_ws_messages DATASET_NAMES=( "WUSC_KEEP_II_Aisha" "WUSC_KEEP_II_Aisha_Empirical_Expectations" "WUSC_KEEP_II_Aisha_Normative_Expectations" "WUSC_KEEP_II_Aisha_Parenthood" "WUSC_KEEP_II_Aisha_Reference_Groups" "WUSC_KEEP_II_Aisha_Reference_Groups_Others" "WUSC_KEEP_II_Aisha_Sanctions" "WUSC_KEEP_II_Amina" "WUSC_KEEP_II_Amina_Empirical_Expectations" "WUSC_KEEP_II_Amina_Normative_Expectations" "WUSC_KEEP_II_Amina_Parenthood" "WUSC_KEEP_II_Amina_Reference_Groups" "WUSC_KEEP_II_Amina_Reference_Groups_Others" "WUSC_KEEP_II_Amina_Sanctions" "WUSC_KEEP_II_Demogs_Age" "WUSC_KEEP_II_Demogs_Gender" "WUSC_KEEP_II_Demogs_Locations" "WUSC_KEEP_II_Mohamed" "WUSC_KEEP_II_Mohamed_Empirical_Expectations" "WUSC_KEEP_II_Mohamed_Normative_Expectations" "WUSC_KEEP_II_Mohamed_Parenthood" "WUSC_KEEP_II_Mohamed_Reference_Groups" "WUSC_KEEP_II_Mohamed_Reference_Groups_Others" "WUSC_KEEP_II_Mohamed_Sanctions" "WUSC_KEEP_II_Zamzam" "WUSC_KEEP_II_Zamzam_Empirical_Expectations" "WUSC_KEEP_II_Zamzam_Normative_Expectations" "WUSC_KEEP_II_Zamzam_Parenthood" "WUSC_KEEP_II_Zamzam_Reference_Groups" "WUSC_KEEP_II_Zamzam_Reference_Groups_Others" "WUSC_KEEP_II_Zamzam_Sanctions" ) for DATASET in "${DATASET_NAMES[@]}" do pipenv run python relocate.py "$DATA_DIR/20 Data for WS Migration/${DATASET}.json" "$DATA_DIR/21 WS Migration data for Coda" done
mcs -reference:"/opt/monogame-pipeline/MonoGame.Framework.dll" -reference:"/opt/monogame-pipeline/Tao.Sdl.dll" Game.cs
#! /bin/sh LIB_URL=https://storage.googleapis.com/tensorio-build/ios/release/2.0/xcodebuild/12C33/tag/2.0.9/lib LIB_TENSORFLOW_URL=$LIB_URL/libtensorflow-core.a # -> tensorflow # LIB_PROTOBUF_URL=$LIB_URL/libprotobuf.a # -> libprotobuf.a # LIB_NSYNC_URL=$LIB_URL/nsync.a # -> libnsync.a SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" FRAMEWORK_DIR=tensorflow.framework echo "Downloading tensorflow static lib at $LIB_TENSORFLOW_URL to $FRAMEWORK_DIR/tensorflow" cd $SCRIPT_DIR/$FRAMEWORK_DIR curl -o tensorflow $LIB_TENSORFLOW_URL echo "Download complete"
#!/bin/bash TMP=$(mktemp -d) || exit 1 trap 'rc=$?; rm -rf $TMP; exit $rc' EXIT cd "$TMP" || exit 1 get_prop() { crudini --get "$1" '' "$2" 2>/dev/null || echo -n "$3"; } get_db_prop() { get_prop "/etc/xroad/db.properties" "$@"; } abort() { local rc=$?; echo -e "FATAL: $*" >&2; exit $rc; } while getopts "F" opt ; do case ${opt} in F) FORCE_RESTORE=true ;; \?) echo "Invalid option $OPTARG -- did you use the correct wrapper script?" exit 2 ;; esac done shift $(($OPTIND - 1)) DUMP_FILE=$1 USER=$(get_db_prop 'username' 'centerui') SCHEMA=$(get_db_prop 'schema' "$USER") PASSWORD=$(get_db_prop 'password' 'centerui') DATABASE=$(get_db_prop 'database' 'centerui_production') HOST=$(get_db_prop 'host' '127.0.0.1') PORT=$(get_db_prop 'port' 5432) MASTER_USER=postgres root_properties=/etc/xroad.properties export PGOPTIONS="-c client-min-messages=warning -c search_path=$SCHEMA,public" if [ "$SCHEMA" == "public" ]; then echo "FATAL: Restoring to the 'public' schema is not supported." >&2 exit 1 fi local_psql() { su -l -c "psql -qtA ${*@Q}" postgres } remote_psql() { psql -h "$HOST" -p "$PORT" -qtA "$@" } psql_dbuser() { PGDATABASE="$DATABASE" PGUSER="$USER" PGPASSWORD="$PASSWORD" remote_psql "$@" } detect_bdr() { [[ "$(psql_dbuser -c 'select bdr.bdr_version()' 2>/dev/null)" == "1.0."* ]]; } if [[ -f ${root_properties} && $(get_prop ${root_properties} postgres.connection.password) != "" ]]; then master_passwd=$(get_prop ${root_properties} postgres.connection.password) MASTER_USER=$(get_prop ${root_properties} postgres.connection.user 'postgres') function psql_master() { PGUSER="$MASTER_USER" PGPASSWORD="$master_passwd" remote_psql "$@" } else function psql_master() { local_psql "$@" } fi if psql_dbuser -c "\q" &>/dev/null; then echo "Database and user exists, skipping database creation." else psql_master <<EOF || abort "Creating database '$DATABASE' on '$HOST:$POST' failed." CREATE DATABASE "${DATABASE}" ENCODING 'UTF8'; REVOKE ALL ON DATABASE "${DATABASE}" FROM PUBLIC; DO \$\$ BEGIN CREATE ROLE "${USER}" LOGIN PASSWORD '${PASSWORD}'; EXCEPTION WHEN OTHERS THEN RAISE NOTICE 'user $USER already exists'; END\$\$; GRANT CREATE,TEMPORARY,CONNECT ON DATABASE "${DATABASE}" TO "${USER}"; \c "${DATABASE}" CREATE EXTENSION hstore; REVOKE CREATE ON SCHEMA public FROM PUBLIC; EOF fi if ! detect_bdr; then # restore dump { cat <<EOF BEGIN; DROP SCHEMA IF EXISTS "$SCHEMA" CASCADE; EOF cat "$DUMP_FILE" echo "COMMIT;" } | psql_dbuser >/dev/null || abort "Restoring database failed." else echo "BDR 1.0 detected. BDR 1.0 is deprecated and support will be removed in a future X-Road release." { cat <<EOF REVOKE CONNECT ON DATABASE "$DATABASE" FROM "$USER"; SELECT pg_terminate_backend(pid) FROM pg_stat_activity WHERE datname='$DATABASE' and usename='$USER'; SET ROLE "$USER"; BEGIN; DROP SCHEMA IF EXISTS "$SCHEMA" CASCADE; EOF # Change statements like # CREATE SEQUENCE centerui.anchor_urls_id_seq # START WITH 1 # ... # USING bdr; # to # CREATE SEQUENCE <name> # USING bdr; # since BDR does not support most of the parameters (makes restore to fail) sed -r -e '/^CREATE SEQUENCE /{:a;/;$/!{;N;ba};P;iUSING bdr;' -e ';d}' "$DUMP_FILE" cat <<EOF COMMIT; -- wait for changes to propagate before updating sequences SELECT bdr.wait_slot_confirm_lsn(NULL, NULL); SELECT pg_sleep(5); BEGIN; SELECT "$SCHEMA".fix_sequence('$SCHEMA'); COMMIT; RESET ROLE; GRANT CONNECT ON DATABASE "$DATABASE" TO "$USER"; EOF } | psql_master -d "$DATABASE" >/dev/null || abort "Restoring database failed." fi
import java.time.LocalDate; import java.util.ArrayList; import java.util.List; public class MostRecentDate { public static void main (String [] args) { List<LocalDate> pastDates = new ArrayList<>(); for (int i = 0; i < 10; i++) { pastDates.add(LocalDate.now().minusDays(i)); } System.out.println("10 Most Recent Dates:"); pastDates.stream().forEach(d -> System.out.println(d)); } }
schema = { "type": "object", "properties": { "disableEmptyOutputs": {"type": "boolean"}, "outputs": { "type": "array", "items": { "type": "object", "properties": { "name": {"type": "string"}, "xrandr": { "type": "array", "items": {"type": "string"}, }, "workspaces": { "type": "array", "items": {"type": "string"}, }, } } } } }
#!/bin/sh DIR=$(dirname "$0") cd $DIR ../../third_party/vendor/bin/phpunit --bootstrap bootstrap.php .
import Main from '@/components/system-setting/main' function map2Router(menus) { const routers = [] for (let i = 0; i < menus.length; i++) { let sub_menus = [] if (menus[i].children) { sub_menus = map2Router(menus[i].children) } const temp = { name: menus[i].name, path: menus[i].path.charAt(0) !== "/"?"/" + menus[i].path: menus[i].path, component: menus[i].fid === 0 ? Main : () => import ('@/' + menus[i].component), icon: menus[i].icon, meta: { icon: menus[i].icon, title: menus[i].title,menuId: menus[i].id}, children: sub_menus, redirect: menus[i].redirect ? menus[i].redirect : '' } routers.push(temp) } return routers } const state = { menus: [] } const mutations = { SET_USER_MENU: (state, menus) => { state.menus = state.menus.concat(menus) }, RSET_USER_MENU: (state, menus) => { state.menus = [] } } const actions = { // get user info setUsermenu({ commit, state }, menus) { return new Promise((resolve, reject) => { commit('SET_USER_MENU', map2Router(menus)) }) }, resetUserMenu({ commit, state }, menus) { commit('RSET_USER_MENU') } } export default { namespaced: true, state, mutations, actions }
val PlayVersion = "2.5.0" val AkkaVersion = "2.4.11" val branch = { val rev = "git rev-parse --abbrev-ref HEAD".!!.trim if (rev == "HEAD") { // not on a branch, get the hash "git rev-parse HEAD".!!.trim } else rev } lazy val docs = project .in(file(".")) .enablePlugins(LightbendMarkdown) .settings(forkedTests: _*) .settings( resolvers += Resolver.typesafeIvyRepo("releases"), scalaVersion := "2.11.7", libraryDependencies ++= Seq( "com.typesafe.akka" %% "akka-stream-testkit" % AkkaVersion % "test", "org.apache.cassandra" % "cassandra-all" % "3.0.2" % "test", "junit" % "junit" % "4.12" % "test", "com.novocode" % "junit-interface" % "0.11" % "test", "org.scalatest" %% "scalatest" % "2.2.4" % Test, "com.typesafe.play" %% "play-netty-server" % PlayVersion % Test, "com.typesafe.play" %% "play-logback" % PlayVersion % Test ), javacOptions ++= Seq("-encoding", "UTF-8", "-source", "1.8", "-target", "1.8", "-parameters", "-Xlint:unchecked", "-Xlint:deprecation"), testOptions in Test += Tests.Argument("-oDF"), testOptions += Tests.Argument(TestFrameworks.JUnit, "-v", "-a"), // This is needed so that Java APIs that use immutables will typecheck by the Scala compiler compileOrder in Test := CompileOrder.JavaThenScala, markdownDocsTitle := "Lagom", markdownDocPaths += { // What I'd really like to do here is trigger the unidoc task in the root project externally, // however I tried that and for some reason it doesn't work. So instead we'll just depend on // it being run manually. val javaUnidocTarget = parentDir / "target" / "javaunidoc" streams.value.log.info(s"Serving javadocs from $javaUnidocTarget. Rerun unidoc in root project to refresh") javaUnidocTarget -> "api" }, markdownApiDocs := Seq( "api/index.html" -> "Java" ), markdownUseBuiltinTheme := false, markdownTheme := Some("lagom.LagomMarkdownTheme"), markdownGenerateTheme := Some("bare"), markdownGenerateIndex := true, markdownSourceUrl := Some(url(s"https://github.com/lagom/lagom/edit/$branch/docs/manual/")), markdownS3CredentialsHost := "downloads.typesafe.com.s3.amazonaws.com", markdownS3Bucket := Some("downloads.typesafe.com"), markdownS3Prefix := "rp/lagom/", markdownS3Region := awscala.Region0.US_EAST_1, excludeFilter in markdownS3PublishDocs ~= { _ || "*.scala" || "*.java" || "*.sbt" || "*.conf" || "*.md" || "*.toc" } ).dependsOn(serviceIntegrationTests, persistenceJdbc, kafkaBroker, immutables % "test->compile", theme % "run-markdown") lazy val parentDir = Path.fileProperty("user.dir").getParentFile // Depend on the integration tests, they should bring everything else in lazy val serviceIntegrationTests = ProjectRef(parentDir, "integration-tests-javadsl") lazy val persistenceJdbc = ProjectRef(parentDir, "persistence-jdbc-javadsl") lazy val kafkaBroker = ProjectRef(parentDir, "kafka-broker") // Needed to compile test classes using immutables annotation lazy val immutables = ProjectRef(parentDir, "immutables") // for forked tests, necessary for Cassandra def forkedTests: Seq[Setting[_]] = Seq( fork in Test := true, concurrentRestrictions in Global += Tags.limit(Tags.Test, 1), javaOptions in Test ++= Seq("-Xms256M", "-Xmx512M"), testGrouping in Test <<= definedTests in Test map singleTestsGrouping ) // group tests, a single test per group def singleTestsGrouping(tests: Seq[TestDefinition]) = { // We could group non Cassandra tests into another group // to avoid new JVM for each test, see http://www.scala-sbt.org/release/docs/Testing.html val javaOptions = Seq("-Xms256M", "-Xmx512M") tests map { test => new Tests.Group( name = test.name, tests = Seq(test), runPolicy = Tests.SubProcess(javaOptions)) } } lazy val theme = project .in(file("theme")) .enablePlugins(SbtWeb, SbtTwirl) .settings( name := "lagom-docs-theme", scalaVersion := "2.11.7", resolvers += Resolver.typesafeIvyRepo("releases"), libraryDependencies ++= Seq( "com.lightbend.markdown" %% "lightbend-markdown-server" % LightbendMarkdownVersion, "org.webjars" % "jquery" % "1.9.0", "org.webjars" % "prettify" % "4-Mar-2013" ), pipelineStages in Assets := Seq(uglify), LessKeys.compress := true )
package chantools import ( "fmt" "reflect" "testing" "unsafe" ) func TestLock(t *testing.T) { c := make(chan int, 10) c <- 1 c <- 2 c <- 3 ChanDebug(c) } func TestNoRecv(t *testing.T) { c := make(chan int, 10) c <- 1 c <- 2 c <- 3 data, length, _ := ChanRead(c, 10) if length != 3 { t.Fail() } if data != 0 { t.Fail() } } func TestRecv(t *testing.T) { c := make(chan int, 10) c <- 1 c <- 2 c <- 3 data, len, size := ChanRead(c, 0) if data == 0 { t.Fail() } if size != 4*3 { t.Fail() } // cast to Slice tmp := &reflect.SliceHeader{ Data: data, Cap: len, Len: len, } slice := *(*[]int)(unsafe.Pointer(tmp)) if slice[0] != 1 { t.Fail() } if slice[1] != 2 { t.Fail() } if slice[2] != 3 { t.Fail() } } type TestStruct struct { A byte B uint32 C uint64 } func TestRecvStruct(t *testing.T) { var ( one TestStruct two TestStruct ) one.A = 10 one.B = 20 one.C = 30 two.A = 40 two.B = 50 two.C = 60 c := make(chan TestStruct, 10) c <- one c <- two data, len, _ := ChanRead(c, 0) if data == 0 { t.Fail() } // cast to Slice tmp := &reflect.SliceHeader{ Data: data, Cap: len, Len: len, } slice := *(*[]TestStruct)(unsafe.Pointer(tmp)) oneR := slice[0] fmt.Println(oneR) if oneR.A != 10 || oneR.B != 20 || oneR.C != 30 { t.Fail() } twoR := slice[1] if twoR.A != 40 || twoR.B != 50 || twoR.C != 60 { t.Fail() } }
<gh_stars>100-1000 package sort import ( "fmt" "math/rand" "testing" "time" ) func TestShellSort(t *testing.T) { data1 := []int{1} ShellSort(data1) fmt.Println(data1) data2 := []int{3, 1} ShellSort(data2) fmt.Println(data2) data3 := []int{3, 9, 0, 1, 3, 2, 5, 7, 7, 4} ShellSort(data3) fmt.Println(data3) var data []int rand.Seed(time.Now().Unix()) for i := 0; i < length; i++ { data = append(data, rand.Intn(1000)) } ShellSort(data) fmt.Println(data) }
#!/bin/bash src=/lustre/atlas1/stf008/scratch/fwang2/xattr rm -rf $src mkdir -p $src lfs setstripe -c 1 $src mkdir -p $src/t1 lfs setstripe -c 2 $src/t1 mkdir -p $src/t2 lfs setstripe -c 3 $src/t2 echo "aaa" > $src/t1/a.txt echo "bbb" > $src/t2/b.txt echo "src ..." lfs getstripe $src lfs getstripe $src/t1/a.txt dst=/lustre/atlas2/stf008/scratch/fwang2 rm -rf $dst/xattr $HOME/pcircle/pcp.py -p $src $dst echo "dest ..." lfs getstripe $dst/xattr lfs getstripe $dst/xattr/t1/a.txt
<reponame>GabrielCM08/Estudos var numeros = [10, 18, 1, 15]; var maiorQue10 = function(item) { return item > 10; } var novosNumeros = numeros.filter(maiorQue10); console.log(novosNumeros); // → [18, 15] //------------------------------------------------------------------------------- var nomes = ["Bruno", "Zezinho", "Fulano", "Douglas"]; var convidados = nomes.filter(function(item){ return item.charAt(0) == "Z"; }); console.log(convidados); // → ["Zezinho"] //------------------------------------------------------------------------------- var numeros = [10, 18, 1, 15, 2, 12, 21, 33, 100]; var pares = function(item) { return !(item % 2); } var impares = function(item) { return item % 2; } var numerosPar = numeros.filter(pares); var numerosImpar = numeros.filter(impares); console.log(numerosPar); // → [10, 18, 2, 12, 100] console.log(numerosImpar); // → [1, 15, 21, 33] //------------------------------------------------------------------------------- var filmes = [ {titulo: 'Titanic', duracao: 195, nota: 7.5}, {titulo: 'The Avengers', duracao: 203, nota: 9.5}, {titulo: 'Bean', duracao: 90, nota: 6.5} ] var notaCorte = 8; var bons = function(item) { return item.nota >= notaCorte; } var ruins = function(item) { return item.nota < notaCorte; } var filmesBons = filmes.filter(bons); var filmesRuins = filmes.filter(ruins); console.log(filmesBons); // → [{titulo: "The Avengers", duracao: 203, nota: 9.5}] console.log(filmesRuins); // → [{titulo: "Titanic", duracao: 195, nota: 7.5}, // {titulo: "Bean", duracao: 90, nota: 6.5}]
import kue from 'kue'; import * as shell from 'shelljs'; const queue = kue.createQueue(); const sendEmail = (data: any, done: any) => { console.log(data); done(); }; queue.process('email', function (job, done) { sendEmail(job.data, done); }); queue.process('convertme', function (job, done) { const { file, basedUploadUrl, userFolder } = job.data; const filename = file.filename.split('.')[0]; shell.cd(basedUploadUrl); const proc = shell.exec(`./convertme.sh yonekura ${userFolder} ${filename} 1`, { async: true }, (code: number, stdout: string, stderr: string) => { console.log(stdout); }); proc.stdout.on('end', done); proc.stdout.on('error', done); proc.stdout.on('close', done); // done(); }); kue.app.listen(5555); export default queue;
<reponame>levsthings/somnolence const HtmlWebpackPlugin = require('html-webpack-plugin') const CopyWebpackPlugin = require('copy-webpack-plugin') const webpack = require('webpack') const path = require('path') const babelConfig = require('./.babelrc.js') module.exports = { entry: { App: [ './src/js/index.js' ] }, output: { path: path.resolve(__dirname, './public'), publicPath: '/', filename: '[name].[hash].bundle.js' }, module: { rules: [ { test: /\.pug$/, use: [ { loader: 'html-loader' }, { loader: 'pug-html-loader' } ] }, { test: /\.jsx?$/, exclude: /node_modules/, use: { loader: 'babel-loader', options: { presets: babelConfig.presets, plugins: babelConfig.prod.plugins } } }, { test: /\.(png|svg|jpe?g|gif|cur)$/, use: [ { loader: 'file-loader', options: { name: '[name].[hash].[ext]', outputPath: 'img/' } }, { loader: 'image-webpack-loader', options: { optipng: { optimizationLevel: 7, interlaced: false }, mozjpeg: { quality: 60, progressive: true }, gifsicle: { optimizationLevel: 7, interlaced: false }, pngquant: { quality: '65-90', speed: 4 } } } ] } ] }, plugins: [ new CopyWebpackPlugin([ { from: './src/misc', to: './' } ]), new HtmlWebpackPlugin({ filename: 'index.html', template: './src/templates/index.pug', inject: 'body' }), new webpack.DefinePlugin({ 'process.env': { NODE_ENV: JSON.stringify('production') } }), new webpack.ContextReplacementPlugin(/moment[/\\]locale$/, /en/), new webpack.optimize.UglifyJsPlugin({ uglifyOptions: { ie8: false, ecma: 8, mangle: true, compress: { dead_code: true, passes: 3 }, output: { comments: false, beautify: false } } }) ], stats: 'errors-only' }
// https://www.codechef.com/FEB18/problems/CHEFCHR #include <algorithm> #include <iostream> using namespace std; int main() { int t; cin >> t; while (t--) { string s; cin >> s; int c = 0; for (int i = 0; i + 4 <= s.size(); i++) { string u = s.substr(i, 4); sort(u.begin(), u.end()); if (u == "cefh") c++; } if (c) cout << "lovely " << c << endl; else cout << "normal\n"; } }
#define NO_EXTERN #include "global_config.hpp" #undef NO_EXTERN #if ENABLE_DEBUG bool debug_trace = false; unsigned int debug_x, debug_y; #endif #include <signal.h> #include <iostream> void assert_fail(std::string text,std::string file,int line,std::string function){ std::cerr << "Assertion failed." << std::endl; std::cerr << file << ":" << line << ", in " << function << std::endl; std::cerr << "because " << text << std::endl; raise(SIGABRT); }
/* * Copyright (c) 2018-present the original author or authors. * * Permission is hereby granted, free of charge, to any person obtaining * a copy of this software and associated documentation files (the * "Software"), to deal in the Software without restriction, including * without limitation the rights to use, copy, modify, merge, publish, * distribute, sublicense, and/or sell copies of the Software, and to * permit persons to whom the Software is furnished to do so, subject to * the following conditions: * * The above copyright notice and this permission notice shall be * included in all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ package org.reap.rbac.web; import org.junit.Test; import org.reap.BaseTest; import org.reap.CoreException; import org.reap.rbac.common.ErrorCodes; import org.reap.rbac.domain.User; import org.reap.rbac.web.AuthenticationController; import org.reap.support.Result; import org.springframework.beans.factory.annotation.Autowired; import static org.junit.Assert.*; /** * * @author 7cat * @since 1.0 */ public class AuthenticationControllerTest extends BaseTest { @Autowired private AuthenticationController authenticationController; @Test public void testAuthentication() { Result<User> success = authenticationController.authentication("user0000000001", "user0000000001"); assertTrue(success.isSuccess()); assertNotNull(success.getPayload().getOrg()); assertNotNull(success.getPayload().getRoles()); try { authenticationController.authentication("user0000000001", "user0000000002"); fail(); } catch (CoreException e) { assertEquals(ErrorCodes.USERNAME_OR_PASSWORD_IS_INCORRECT, e.getCode()); } } }
import { ChildEntity, Column } from 'typeorm'; import { Resource } from './resource.entity'; import { Field, ID, ObjectType, Int } from 'type-graphql'; @ChildEntity() @ObjectType() export class Music extends Resource { @Column({type: 'text', nullable: true}) @Field(type => String, {nullable: true}) src: string; @Column({nullable: true}) @Field({nullable: true}) arranger: string; @Column({nullable: true}) @Field({nullable: true}) lyricist: string; }
#!/bin/bash set -e if [ -z "$RELAY_NETS" ] then RELAY_NETS="$(ip addr show | awk '$1 == "inet" { print $2 ":" }' | xargs | sed 's/ //g')" fi opts=( dc_local_interfaces '0.0.0.0 ; ::0' dc_other_hostnames '' dc_relay_nets "${RELAY_NETS}" dc_eximconfig_configtype 'satellite' dc_readhost 'island.byu.edu' dc_smarthost 'mmgateway.byu.edu' dc_use_split_config 'true' dc_hide_mailname 'true' ) if [ "$GMAIL_USER" -a "$GMAIL_PASSWORD" ]; then # see https://wiki.debian.org/GmailAndExim4 opts+=( dc_eximconfig_configtype 'smarthost' dc_smarthost 'smtp.gmail.com::587' ) echo "*.gmail.com:$GMAIL_USER:$GMAIL_PASSWORD" > /etc/exim4/passwd.client else opts+=( dc_eximconfig_configtype 'internet' ) fi /etc/exim4/set-exim4-update-conf "${opts[@]}" if [ "$(id -u)" = '0' ]; then mkdir -p /var/spool/exim4 /var/log/exim4 || : chown -R Debian-exim:Debian-exim /var/spool/exim4 /var/log/exim4 || : fi exec "$@"
#. "$DOTFILES_DIR/lib/init_env.sh" libfunc() { # Short description here # Globals: # None # Arguments: # None # Outputs: # None # Returns: # None : }
use std::process::Command; use std::path::PathBuf; use clap::{App, Arg}; fn main() { let matches = App::new("my_program") .subcommand( App::new("open") .about("Open a path/URL with its default handler") .arg( Arg::new("paths") .about("Paths or URLs to open") .multiple_values(true) .required(true), ), ) .get_matches(); if let Some(open_matches) = matches.subcommand_matches("open") { if let Some(paths) = open_matches.values_of("paths") { for path in paths { open_path_or_url(path); } } } } fn open_path_or_url(path: &str) { if path.starts_with("http://") || path.starts_with("https://") { open_url(path); } else { open_path(path); } } fn open_url(url: &str) { if let Err(e) = open::that(url) { eprintln!("Failed to open URL: {}", e); } } fn open_path(path: &str) { let path_buf = PathBuf::from(path); if let Err(e) = open::that(&path_buf) { eprintln!("Failed to open path: {}", e); } }
/* * Copyright © 2021 Apple Inc. and the ServiceTalk project authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.servicetalk.encoding.api; /** * Extension of {@link NoopContentCodec} with custom implementations for {@link #equals(Object)} and {@link #hashCode()} * based on {@link #name()}. * This class can be used as is or can be extended to create a {@link ContentCodec} with custom behaviour. */ class CustomIdentityContentCodec extends NoopContentCodec { CustomIdentityContentCodec() { super(Identity.identity().name()); } }
package predictor import java.io.File import git.PullRequest import utils.Extensions._ object Csv { private val escapeChars = Map("\"" -> "\"\"", "\r" -> "\\r", "\n" -> "\\n") private val nf = java.text.NumberFormat.getInstance(java.util.Locale.ROOT) nf.setMaximumFractionDigits(6) nf.setGroupingUsed(false) def write(file: String, data: List[PullRequest]): Unit = write(new File(file), data) def write(file: File, data: List[PullRequest]): Unit = { val header = List( "age", "title", "target", "author", "coreMember", "intraBranch", "containsFix", "commitRatio", "pullRequestRatio", "comments", "reviewComments", "lastCommentMention", "additions", "deletions", "commits", "files", "hasTestCode", "important") val rows = for { pr <- data } yield List( pr.age, pr.title.getOrElse(""), pr.target, pr.author, pr.coreMember.getOrElse(false), pr.intraBranch.getOrElse(false), pr.containsFix.getOrElse(false), pr.contributedCommitRatio.getOrElse(0D), pr.pullRequestAcceptRatio.getOrElse(0D), pr.comments.getOrElse(0L), pr.reviewComments.getOrElse(0L), pr.lastCommentMention.getOrElse(false), pr.linesAdded.getOrElse(0L), pr.linesDeleted.getOrElse(0L), pr.commits.getOrElse(0L), pr.filesChanged.getOrElse(0L), pr.hasTestCode.getOrElse(false), false) val contents = header :: rows writeData(file, contents) } def readAsBoolean(file: File): List[List[Boolean]] = read(file).map(r => r.map(f => f.toBoolean)) def readAsDouble(file: File): List[List[Double]] = read(file).map(r => r.map(f => f.toDouble)) def read(file: String): List[List[String]] = read(new File(file)) def read(file: File): List[List[String]] = { val data = scala.io.Source.fromFile(file).mkString val rows = data.split("\n").map(r => r.trim).toList rows.map(r => r.split(",").map(f => f.trim.trim(List('"'))).toList) } def writeData(file: File, data: List[List[Any]]): Unit = { val contents = data.map(row => row.map(v => format(v)).mkString(",")).mkString("\n") + "\n" writeToFile(file, contents) } private def writeToFile(file: File, contents: String): Unit = { val dir: File = file.getParentFile dir.mkdirs() val writer = new java.io.PrintWriter(file) try writer.write(contents) finally writer.close() } private def format(value: Any): String = value match { case s: String => s""""${escape(s)}"""" case true => "1" case false => "0" case u: Unit => "" case b: Byte => nf.format(b) case c: Char => nf.format(c) case s: Short => nf.format(s) case i: Int => nf.format(i) case l: Long => nf.format(l) case f: Float => if (f.isNaN || f.isInfinity) "0" else nf.format(f) case d: Double => if (d.isNaN || d.isInfinity) "0" else nf.format(d) case _ => s""""$value"""" } private def escape(value: String): String = { escapeChars.foldLeft(value)((s,c) => s.replace(c._1, c._2)) } }
# --------------------------------------------------------------------------- # # Package : commons-math3 # Version : 3.6.1 # Source repo : https://github.com/apache/commons-math.git # Tested on : UBI: 8.4 # Script License: Apache License 2.0 # Maintainer's : Sapana Khemkar <Sapana.Khemkar@ibm.com> # Language : Java # # # Disclaimer: This script has been tested in root mode on given # ========== platform using the mentioned version of the package. # It may not work as expected with newer versions of the # package and/or distribution. In such case, please # contact "Maintainer" of this script. # # ---------------------------------------------------------------------------- #!/bin/bash set -e # Variables PACKAGE_URL=https://github.com/apache/commons-math.git PACKAGE_VERSION=MATH_3_6_1 PACKAGE_NAME=commons-math # install tools and dependent packages yum update -y yum install -y git wget tar # install java yum -y install java-1.8.0-openjdk-devel #install maven cd /opt/ rm -rf apache-maven* rm -rf maven wget https://www-eu.apache.org/dist/maven/maven-3/3.8.4/binaries/apache-maven-3.8.4-bin.tar.gz tar xzf apache-maven-3.8.4-bin.tar.gz ln -s apache-maven-3.8.4 maven export MVN_HOME=/opt/maven export PATH=${MVN_HOME}/bin:${PATH} mvn -version # Cloning the repository from remote to local cd /home rm -rf $PACKAGE_NAME git clone $PACKAGE_URL cd $PACKAGE_NAME git checkout $PACKAGE_VERSION # Build and test package mvn package mvn test exit 0
# Imports import numpy as np import tensorflow as tf from tensorflow import keras import matplotlib.pyplot as plt # Load the data # TODO # Preprocess the data # TODO # Create the model model = keras.Sequential() model.add(keras.layers.Dense(64, activation='relu', input_shape=(X_train.shape[1],))) model.add(keras.layers.Dropout(0.2)) model.add(keras.layers.Dense(64, activation='relu')) model.add(keras.layers.Dropout(0.2)) model.add(keras.layers.Dense(1, activation='sigmoid')) # Compile the model model.compile(optimizer='adam', loss='binary_crossentropy', metrics=['accuracy']) # Define callbacks callbacks_list = [ keras.callbacks.EarlyStopping( monitor='val_acc', patience=1, ), keras.callbacks.ModelCheckpoint( filepath='best_model.h5', monitor='val_loss', save_best_only=True, ) ] # Train the model model.fit(X_train, y_train, epochs=5, callbacks=callbacks_list, validation_data=(X_test, y_test)) # Test the model test_loss, test_acc = model.evaluate(X_test, y_test) print('Test accuracy:', test_acc)
import styled from 'styled-components' import Branding from './Branding' const Overlay = styled.div` position: fixed; top: 0; left: 0; right: 0; bottom: 0; z-index: 100; display: flex; flex-direction: column; align-items: center; justify-content: center; touch-action: none; background: var(--overlay-backgroud-color); pointer-events: none; animation: fade; animation-duration: 250ms; animation-fill-mode: forwards; ${Branding} { width: 70px; height: 70px; animation-duration: 2s; } @keyframes fade { from { opacity: 0; } to { opacity: 1; } } ` export default Overlay
package com.udacity.jwdnd.course1.cloudstorage.services; import com.udacity.jwdnd.course1.cloudstorage.model.Credential; import java.util.List; public interface CredentialsService { List<Credential> findAllCredentials(Integer UserId); void deleteCredential(Integer credentialId); void updateCredential(Credential credential); void insertCredential(Credential credential); }
class Argument { constructor(offset, value, transformedValue) { this.offset = offset this.value = value this.transformedValue = transformedValue } } module.exports = Argument
#/bin/$SHELL set -e case $1 in -a|--all) cluster="complete" ;; -d|--dask) cluster="dask" ;; -s|--spark) cluster="spark" ;; *) echo "Cluster type must by passed via flag --all/--dask/--spark" echo "Usage: ./bin/restart-cluster-service.sh [-a/-d/-s]" exit 1 ;; esac shift compose_yaml="docker-compose-${cluster}.yml" echo "Clean-deploying: ${compose_yaml}" docker system prune -f docker build . --force-rm -t cluster-base docker-compose -f $compose_yaml up --build
#!/bin/sh # Checks that the long description is PyPI compatible python setup.py --long-description | rst2html.py --strict > /dev/null
import React from "react"; import { useDispatch, useSelector } from "react-redux"; import clsx from "clsx"; import * as Yup from "yup"; import PropTypes from "prop-types"; import { Formik } from "formik"; import { Box, Button, Checkbox, FormHelperText, TextField, Typography, makeStyles } from "@material-ui/core"; import { register } from "../../../Redux/account"; import { Alert } from "@material-ui/lab"; // import { register } from "src/actions/accountActions"; const useStyles = makeStyles(() => ({ root: {} })); function RegisterForm({ className, onSubmitSuccess, ...rest }) { const classes = useStyles(); const dispatch = useDispatch(); const error = useSelector((state) => state.account.error); return ( <Formik initialValues={{ firstName: "", lastName: "", email: "", password: "", policy: false }} validationSchema={Yup.object().shape({ firstName: Yup.string().max(255).required("First name is required"), // lastName: Yup.string().max(255).required("Last name is required"), email: Yup.string() .email("Must be a valid email") .max(255) .required("Email is required"), password: Yup.string().min(7).max(255).required("Password is required"), policy: Yup.boolean().oneOf([true], "This field must be checked") })} onSubmit={async (values, { setErrors, setStatus, setSubmitting }) => { try { const user = await dispatch( register( values.firstName, values.lastName, values.email, values.password ) ); if (user) { onSubmitSuccess(user); setStatus({ success: false }); setSubmitting(false); } } catch (error) { setStatus({ success: false }); setErrors({ submit: error.message }); setSubmitting(false); } }} > {({ errors, handleBlur, handleChange, handleSubmit, isSubmitting, touched, values }) => ( <form className={clsx(classes.root, className)} onSubmit={handleSubmit} {...rest} > {error && ( <Box mt={2} mb={2}> <Alert severity="error">{error}</Alert> </Box> )} <TextField error={Boolean(touched.firstName && errors.firstName)} fullWidth helperText={touched.firstName && errors.firstName} label="<NAME>" margin="normal" name="firstName" onBlur={handleBlur} onChange={handleChange} type="firstName" value={values.firstName} variant="outlined" required /> <TextField error={Boolean(touched.lastName && errors.lastName)} fullWidth helperText={touched.lastName && errors.lastName} label="<NAME>" margin="normal" name="lastName" onBlur={handleBlur} onChange={handleChange} type="lastName" value={values.lastName} variant="outlined" /> <TextField error={Boolean(touched.email && errors.email)} fullWidth helperText={touched.email && errors.email} label="Email Address" margin="normal" name="email" onBlur={handleBlur} onChange={handleChange} type="email" value={values.email} variant="outlined" required /> <TextField error={Boolean(touched.password && errors.password)} fullWidth helperText={touched.password && errors.password} label="Password" margin="normal" name="password" onBlur={handleBlur} onChange={handleChange} type="password" value={values.password} variant="outlined" required /> <Box alignItems="center" display="flex" mt={2} ml={-1}> <Checkbox checked={values.policy} name="policy" onChange={handleChange} color="primary" /> <Typography variant="body2" color="textSecondary"> I have read the{" "} {/* <Link component="a" href="#" color="secondary"> Terms and Conditions </Link> */} <a href="https://veertly.com/terms-of-service/" target="_blank" rel="noopener noreferrer" > Terms of Service </a>{" "} &amp;{" "} <a href="https://veertly.com/privacy-policy" target="_blank" rel="noopener noreferrer" > Privacy Policy </a> </Typography> </Box> {Boolean(touched.policy && errors.policy) && ( <FormHelperText error>{errors.policy}</FormHelperText> )} <Box mt={2}> <Button color="primary" disabled={isSubmitting} fullWidth size="large" type="submit" variant="contained" > Create account </Button> </Box> </form> )} </Formik> ); } RegisterForm.propTypes = { className: PropTypes.string, onSubmitSuccess: PropTypes.func }; RegisterForm.default = { onSubmitSuccess: () => {} }; export default RegisterForm;
#!/bin/sh set -eo pipefail SCRIPTDIR="$(cd "$(dirname "$1")"; pwd -P)" PORT=8888 echo "Running from $SCRIPTDIR" docker build -t julia-binder . docker run --mount type=bind,src=$SCRIPTDIR/notebooks,dst=/home/jovyan/notebooks -p 127.0.0.1:$PORT:$PORT julia-binder
import json # make an API call response_data = # api call # parse JSON response data = json.loads(response_data) # access a specific key in the response value = data['key'] print(value)
#!/usr/bin/env bash ## Command Line Arguments NUM_BILLION_BALLS_PER_EXP=${1:-100} # $1 NUM_EXP=${2:-1} # $2 NUM_BILLIONS=$(($NUM_BILLION_BALLS_PER_EXP * $NUM_EXP)) echo "Running Buckets & Balls Simulation for WaysPerSkew=8" echo "(Throwing ${NUM_BILLIONS} Billion Balls)" mkdir -p raw_results; rm -rf raw_results/*; ## Run Empirical Results for Extra-Ways = 1,2,3,4,5,6 for extra_ways_per_skew in 1 2 3 4 5 6 do ## Run $NUM_EXP experiments for (( i=0; i<$NUM_EXP; i++ )) do stdbuf -oL ../../bin/mirage8WPS_kExtraWays_NBn.o $extra_ways_per_skew $NUM_BILLION_BALLS_PER_EXP $i \ > raw_results/Base8.ExtraWays${extra_ways_per_skew}.Exp${i}.out & done done ## Wait for completion of all experiments. exp_count=`ps aux | grep -i "mirage8WPS_kExtraWays_NBn.o" | grep -v "grep" | wc -l` while [ $exp_count -gt 0 ] do sleep 30 exp_count=`ps aux | grep -i "mirage8WPS_kExtraWays_NBn.o" | grep -v "grep" | wc -l` done ## Generate the Spills Result (Fig-7) ./get_spills.sh ## Generate the Bucket Probabilities Result (Fig-9, Fig-10) ./get_bucket_prob.sh
<reponame>connectedvehiclessitdat/fedgov-cv-webfragment-websocket package gov.usdot.cv.websocket.jms.format; import gov.usdot.asn1.generated.j2735.semi.IntersectionSituationData; import gov.usdot.asn1.generated.j2735.semi.SemiDialogID; import gov.usdot.asn1.generated.j2735.semi.VehSitDataMessage; import gov.usdot.cv.websocket.jms.format.IntersectionFormatter; import gov.usdot.cv.websocket.jms.format.VehicleFormatter; import gov.usdot.cv.websocket.jms.router.RoutableMessage; import net.sf.json.JSONObject; import org.apache.commons.codec.binary.Base64; import org.apache.commons.codec.binary.Hex; /** * Parsers VehSitDataMessages and extracts key values for mapping UI. */ public class MessageFormatter { public static String formatMessage(RoutableMessage message, String resultEncoding) { byte[] bytes = message.getRawMessage(); if (resultEncoding.equalsIgnoreCase("hex")) { return Hex.encodeHexString(bytes); } else if (resultEncoding.equalsIgnoreCase("base64")) { return Base64.encodeBase64String(bytes); } else if (resultEncoding.equalsIgnoreCase("full")) { if (message.getDialogId() == SemiDialogID.vehSitData.longValue()) { JSONObject json = VehicleFormatter.parseMessage((VehSitDataMessage)message.getMessage()); return json != null ? json.toString(): null; } else if (message.getDialogId() == SemiDialogID.intersectionSitDataDep.longValue()) { JSONObject json = IntersectionFormatter.formatMessage((IntersectionSituationData)message.getMessage()); return json != null ? json.toString(): null; } else { return null; } } else { return null; } } }
package de.unibi.agbi.biodwh2.reactome.entities; import org.neo4j.ogm.annotation.Relationship; import java.util.HashSet; import java.util.Set; /** * Created by manuel on 11.12.19. */ public abstract class ControlReference extends DatabaseObject { @Relationship(type = "Publication") public Set<Publication> literatureReference = new HashSet<>(); public ControlReference() { } public Set<Publication> getLiteratureReference() { return literatureReference; } }
#!/usr/bin/env bash # vim:ts=4:sts=4:sw=4:et # # Author: Hari Sekhon # Date: 2020-03-06 12:03:19 +0000 (Fri, 06 Mar 2020) # # https://github.com/harisekhon/bash-tools # # License: see accompanying Hari Sekhon LICENSE file # # If you're using my code you're welcome to connect with me on LinkedIn and optionally send me feedback to help steer this or other code I publish # # https://www.linkedin.com/in/harisekhon # # Cloudera Navigator fails to download some logs but silently fails without an error or outputting anything, not even the headers # so this script loop retries the adjacent script cloudera_navigator_audit_logs_download.sh and checks for zero byte CSVs audit logs and retries # until they're all downloaded. In practice, some logs repeatedly get zero bytes so this isn't entirely effective have to cut your losses on the # logs that refused to extract from Navigator. Ironically older logs output the headers but not logs, at least indicating that there are no logs # rather than just giving blank output which is almost certainly another Cloudera bug # # Tested on Cloudera Enterprise 5.10 set -euo pipefail [ -n "${DEBUG:-}" ] && set -x srcdir="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" # shellcheck disable=SC1090 . "$srcdir/lib/utils.sh" sleep_secs=60 if is_mac; then stat_bytes="stat -f %z" else stat_bytes="stat -c %s" fi time while true; do time "$srcdir/cloudera_navigator_audit_logs_download.sh" -k # want splitting of args # shellcheck disable=SC2086 # grep -c stops the pipe terminating early causing: # find: `stat' terminated by signal 13 num_zero_files="$(find . -maxdepth 1 -name 'navigator_audit_*.csv' -exec $stat_bytes {} \; | grep -c '^0$')" if [ "$num_zero_files" != 0 ]; then echo "$num_zero_files files detected that have silently errored resulting in zero byte files, sleeping for $sleep_secs before retrying downloads..." sleep $sleep_secs continue fi echo FINISHED break done
import type { SidebarConfig } from '@vuepress/theme-default' export const en: SidebarConfig = { '/guide/': [ { isGroup: true, text: '介绍', children: [ '/guide/README.md', '/guide/getting-started.md', ], }, { isGroup: true, text: '基础', children: [ '/guide/directory-structure.md', '/guide/config.md', '/guide/runtime-config.md', '/guide/route.md', '/guide/plugin.md', '/guide/template.md', '/guide/mock.md', '/guide/env.md', '/guide/css.md', ] }, { isGroup: true, text: '进阶', children: [ ] }, "/guide/contributing.md", "/guide/faq.md" ], '/reference/config/': [ '/reference/config/README.md' ], '/reference/api/': [ '/reference/api/README.md' ], '/reference/plugin/': [ { isGroup: true, text: 'Presets', children: [ ], }, { isGroup: true, text: 'Plugins', children: [ '/reference/plugin/plugins/access.md', '/reference/plugin/plugins/enums.md', '/reference/plugin/plugins/icon.md', '/reference/plugin/plugins/jest.md', '/reference/plugin/plugins/layout.md', '/reference/plugin/plugins/locale.md', '/reference/plugin/plugins/model.md', '/reference/plugin/plugins/request.md', '/reference/plugin/plugins/vuex.md', '/reference/plugin/plugins/qiankun.md', ], }, { isGroup: true, text: '插件开发', children: [ '/reference/plugin/dev/README.md', '/reference/plugin/dev/api.md' ], }, ], '/reference/cli/': [ '/reference/cli/README.md', ], }
package fracker_test import ( "github.com/onsi/ginkgo" "github.com/onsi/gomega" f "github.com/shopkeep/fracker" "log" "os" "testing" ) type TestClient struct { StubGet func(string) (f.Node, error) } func (self *TestClient) Get(key string) (f.Node, error) { return self.StubGet(key) } func TestFracker(t *testing.T) { gomega.RegisterFailHandler(ginkgo.Fail) ginkgo.RunSpecs(t, "Fracker") } func init() { var err error var null *os.File if null, err = os.OpenFile(os.DevNull, os.O_WRONLY, 0666); err != nil { panic(err) } log.SetOutput(null) }
""" Create a search engine that takes in a query and a large source code place and return the best match code. """ import re def search_query(query, source_code): matches = [] query_words = query.split() pattern = '|'.join(query_words) for line in source_code: if re.search(pattern, line): matches.append(line) # Sort the matches based on the number of occurances of the query words matches.sort(key=lambda x: len([w for w in query_words if w in x]), reverse=True) # Return the best match return matches[0] if __name__ == '__main__': query = "Find the best way to compare two strings" source_code = ["def compare_strings(str1, str2):", " return str1 == str2", ""] print(search_query(query, source_code)) # def compare_strings(s1, s2):
Create an agent in Dialogflow. Design the conversational flow using intents, entities, and contexts. Create a welcome intent for greeting customers and provide basic shopping advice using additional intents and entities. Connect the agent to a platform such as web, facebook messenger, etc. for user interaction.
#!/usr/bin/env bash set -eu -o pipefail function all_mount_points() { mount | sed 's/^.* on //' | sed 's/ type .*$//' ; } function is_mount_point() { local path="${1}" local fullpath=$( cd "${path}" && pwd ) all_mount_points | grep --quiet "^${fullpath}$" } cd "${1}" cp /etc/resolv.conf etc/ if ! is_mount_point proc ; then mount -t proc none proc ; fi if ! is_mount_point sys ; then mount -t sysfs /sys sys ; fi if ! is_mount_point dev ; then mount -o bind /dev dev ; fi mkdir -p lib/modules if ! is_mount_point lib/modules ; then mount -o bind,readonly /lib/modules lib/modules ; fi chroot . /bin/bash || true umount lib/modules umount dev umount sys umount proc cd - >/dev/null
package strategies import ( "context" "reflect" "regexp" "testing" "github.com/1pkg/gopium/gopium" ) func TestFilter(t *testing.T) { // prepare cctx, cancel := context.WithCancel(context.Background()) cancel() table := map[string]struct { filter filter ctx context.Context o gopium.Struct r gopium.Struct err error }{ "empty struct should be applied to empty struct with empty filter": { filter: filter{}, ctx: context.Background(), }, "non empty struct should be applied to itself with empty filter": { filter: filter{}, ctx: context.Background(), o: gopium.Struct{ Name: "test", Fields: []gopium.Field{ { Name: "test", Type: "test", }, }, }, r: gopium.Struct{ Name: "test", Fields: []gopium.Field{ { Name: "test", Type: "test", }, }, }, }, "non empty struct should be applied to itself on canceled context with empty filter": { filter: filter{}, ctx: cctx, o: gopium.Struct{ Name: "test", Fields: []gopium.Field{ { Name: "test", Type: "test", }, }, }, r: gopium.Struct{ Name: "test", Fields: []gopium.Field{ { Name: "test", Type: "test", }, }, }, err: context.Canceled, }, "non empty struct should be applied accordingly to filter name": { filter: filter{nregex: regexp.MustCompile(`^test-2$`)}, ctx: context.Background(), o: gopium.Struct{ Name: "test", Fields: []gopium.Field{ { Name: "test-1", Type: "test-1", Embedded: true, Exported: true, }, { Name: "test-2", Type: "test-2", Exported: true, }, { Name: "test-3", Type: "test-3", Embedded: true, }, }, }, r: gopium.Struct{ Name: "test", Fields: []gopium.Field{ { Name: "test-1", Type: "test-1", Embedded: true, Exported: true, }, { Name: "test-3", Type: "test-3", Embedded: true, }, }, }, }, "non empty struct should be applied accordingly to filter type": { filter: filter{tregex: regexp.MustCompile(`^test-2$`)}, ctx: context.Background(), o: gopium.Struct{ Name: "test", Fields: []gopium.Field{ { Name: "test-1", Type: "test-1", Embedded: true, Exported: true, }, { Name: "test-2", Type: "test-2", Exported: true, }, { Name: "test-3", Type: "test-3", Embedded: true, }, }, }, r: gopium.Struct{ Name: "test", Fields: []gopium.Field{ { Name: "test-1", Type: "test-1", Embedded: true, Exported: true, }, { Name: "test-3", Type: "test-3", Embedded: true, }, }, }, }, } for name, tcase := range table { t.Run(name, func(t *testing.T) { // exec r, err := tcase.filter.Apply(tcase.ctx, tcase.o) // check if !reflect.DeepEqual(r, tcase.r) { t.Errorf("actual %v doesn't equal to expected %v", r, tcase.r) } if !reflect.DeepEqual(err, tcase.err) { t.Errorf("actual %v doesn't equal to expected %v", err, tcase.err) } }) } }
<filename>src/boson/test/netpoller.cc #include <fcntl.h> #include <sys/socket.h> #include <unistd.h> #include <cstdio> #include <thread> #include <cstring> #include "boson/exception.h" #include "boson/system.h" #include "boson/internal/netpoller.h" #include "catch.hpp" using namespace boson; namespace { struct handler01 : public internal::net_event_handler<int> { int last_read_fd{-1}; int last_write_fd{-1}; event_status last_status {}; void read(int fd, int data, event_status status) override { last_read_fd = data; last_status = status; } void write(int fd, int data, event_status status) override { last_write_fd = data; last_status = status; } void callback() override {}; }; } TEST_CASE("Netpoller - FD Read/Write", "[netpoller][read/write]") { handler01 handler_instance; int pipe_fds[2]; ::pipe(pipe_fds); ::fcntl(pipe_fds[0], F_SETFL, ::fcntl(pipe_fds[0], F_GETFD) | O_NONBLOCK); ::fcntl(pipe_fds[1], F_SETFL, ::fcntl(pipe_fds[1], F_GETFD) | O_NONBLOCK); boson::internal::netpoller<int> loop(handler_instance); // Test 1 loop.signal_new_fd(pipe_fds[0]); loop.signal_new_fd(pipe_fds[1]); loop.register_read(pipe_fds[0], 1); loop.register_write(pipe_fds[1], 2); std::thread t1{[&loop]() { loop.wait(); }}; t1.join(); CHECK(handler_instance.last_write_fd == 2); CHECK(handler_instance.last_status == 0); // Test 2 std::thread t2{[&loop]() { loop.wait(); }}; size_t data{1}; ::write(pipe_fds[1], &data, sizeof(size_t)); t2.join(); CHECK(handler_instance.last_read_fd == 1); } TEST_CASE("Netpoller - FD Read/Write same FD", "[netpoller][read/write]") { #ifdef WINDOWS #else int sv[2] = {}; int rc = ::socketpair(AF_UNIX, SOCK_STREAM, 0, sv); REQUIRE(rc == 0); handler01 handler_instance; boson::internal::netpoller<int> loop(handler_instance); loop.signal_new_fd(sv[0]); loop.register_read(sv[0], 1); loop.register_write(sv[0], 2); loop.wait(0); CHECK(handler_instance.last_read_fd == -1); CHECK(handler_instance.last_write_fd == 2); CHECK(handler_instance.last_status == 0); // Write at the other end, it should work even though we suppressed the other event size_t data{1}; ::send(sv[1],&data, sizeof(size_t),0); handler_instance.last_write_fd = -1; loop.unregister_write(sv[0]); loop.wait(); CHECK(handler_instance.last_read_fd == 1); CHECK(handler_instance.last_write_fd == -1); // This is unexpected but a write event happens here CHECK(handler_instance.last_status == 0); handler_instance.last_read_fd = -1; handler_instance.last_write_fd = -1; loop.wait(0); CHECK(handler_instance.last_read_fd == -1); CHECK(handler_instance.last_write_fd == -1); CHECK(handler_instance.last_status == 0); loop.register_read(sv[0], 1); loop.register_write(sv[0], 2); loop.signal_fd_closed(sv[0]); ::shutdown(sv[0], SHUT_WR); ::shutdown(sv[1], SHUT_WR); ::close(sv[0]); ::close(sv[1]); loop.wait(0); CHECK(handler_instance.last_read_fd == 1); CHECK(handler_instance.last_write_fd == 2); CHECK(handler_instance.last_status == -EBADF); #endif }
<filename>bin/core/pageView-animation.js define(["css!bin/res/css/pageViewAnimation"], function() { var animations = [ 'slideInLeft', 'slideInRight', 'slideOutLeft', 'slideOutRight', 'fadeIn', 'fadeOut' ]; var Class = {}; Class.animate = function(name, onFinish) { var self = this; var css = 'animated '+name; this.$().addClass(css); this.$().one('webkitAnimationEnd mozAnimationEnd MSAnimationEnd oanimationend animationend', function() { self.$().removeClass(css); if(onFinish) { onFinish(); } }); } var funName = null; var genAnimateFunc = function(name) { return function(onFinish) { this.animate(name, onFinish); } } for(var i=0,i_sz=animations.length; i<i_sz; ++i) { var aniName = animations[i]; funName = 'animate' + aniName.charAt(0).toUpperCase() + aniName.substring(1); Class[funName] = genAnimateFunc(aniName); } return Class; });
/* * List.java * * Created on May 30, 2007, 9:36 PM * * From "Multiprocessor Synchronization and Concurrent Data Structures", * by <NAME> and <NAME>. * * This work is licensed under a Creative Commons Attribution-Share Alike 3.0 United States License. * http://i.creativecommons.org/l/by-sa/3.0/us/88x31.png */ package tamp.ch18.TinyTM.list.ofree; import tamp.ch18.TinyTM.list.INode; import java.util.Iterator; /** * @param <T> type * @author <NAME> */ public class List<T> implements Iterable<T> { protected INode<T> first; public List() { first = new TNode<T>(Integer.MIN_VALUE, null); INode<T> next = new TNode<T>(Integer.MAX_VALUE, null); first.setNext(next); } /** * Add an element to the list, if it is not already there. * * @param item object to be added * @return true iff list was modified */ public boolean add(T item) { int key = item.hashCode(); INode<T> newNode = new TNode<T>(item); INode<T> prevNode = this.first; INode<T> currNode = prevNode.getNext(); while (currNode.getKey() < key) { prevNode = currNode; currNode = prevNode.getNext(); } if (currNode.getKey() == key) { return false; } else { newNode.setNext(prevNode.getNext()); prevNode.setNext(newNode); return true; } } /** * Tests whether a value is in the list. * * @param item sought item * @return true iff presence was confirmed. */ public boolean contains(T item) { int key = item.hashCode(); INode prevNode = this.first; INode currNode = prevNode.getNext(); while (currNode.getKey() < key) { prevNode = currNode; currNode = prevNode.getNext(); } if (currNode.getKey() == key) { return true; } else { return false; } } /** * Removes an element from the list, if it is there. * * @param v the integer value to delete from the set * @return true iff v was removed * @throws java.lang.InstantiationException * @throws java.lang.IllegalAccessException */ public boolean remove(int v) throws InstantiationException, IllegalAccessException { INode<T> prevNode = this.first; INode<T> currNode = prevNode.getNext(); while (currNode.getKey() < v) { prevNode = currNode; currNode = prevNode.getNext(); } if (currNode.getKey() == v) { prevNode.setNext(currNode.getNext()); return true; } else { return false; } } public Iterator<T> iterator() { return new Iterator<T>() { INode<T> cursor; { cursor = first.getNext(); } public boolean hasNext() { return cursor.getKey() != Integer.MAX_VALUE; } public T next() { INode<T> node = cursor; cursor = cursor.getNext(); return node.getItem(); } public void remove() { throw new UnsupportedOperationException(); } }; } }
from django.apps import AppConfig class VersiondashboardConfig(AppConfig): name = 'versionDashboard'
<reponame>zrwusa/expo-bunny<gh_stars>1-10 import React, {Component} from 'react'; import {StyleProp, StyleSheet, Text, View, ViewStyle} from 'react-native'; // TODO: support web import {IMessage} from './types'; import {Video, VideoProps} from '../../../packages/expo-av/src'; import {SizeLabor, ThemeLabor} from '../../types'; import {withBunnyKit, WithBunnyKit} from '../../hooks/bunny-kit'; const makeStyles = (sizeLabor: SizeLabor, themeLabor: ThemeLabor) => { const {wp} = sizeLabor.designsBasedOn.iphoneX; return StyleSheet.create({ container: {}, video: { width: wp(240), height: wp(160), borderRadius: wp(13), margin: wp(3), resizeMode: 'cover', } }); }; export interface MessageVideoProps<TMessage extends IMessage> { currentMessage?: TMessage; videoContainerStyle?: StyleProp<ViewStyle>; videoStyle?: StyleProp<ViewStyle>; videoProps?: VideoProps; isDebug?: boolean; onMessageLoad?(currentMessage: TMessage): void; onMessageLoadStart?(currentMessage: TMessage): void; onMessageLoadEnd?(currentMessage: TMessage): void; onMessageReadyForDisplay?(currentMessage: TMessage): void; onMessageLoadError?(e: Error, currentMessage: TMessage): void; } class MessageVideo<TMessage extends IMessage> extends Component<MessageVideoProps<TMessage> & WithBunnyKit> { static defaultProps = { currentMessage: undefined, videoContainerStyle: {}, videoStyle: {}, videoProps: {}, onMessageLoad: undefined, onMessageLoadStart: undefined, onMessageLoadEnd: undefined, onMessageReadyForDisplay: undefined, onMessageLoadError: undefined, isDebug: false, }; render() { const { videoContainerStyle, videoProps, videoStyle, currentMessage, isDebug, } = this.props; isDebug && console.log('%c[ chat ]', 'background: #555; color: #bada55', '[level4]MessageVideo props', this.props); const {bunnyKit: {sizeLabor, themeLabor}} = this.props; const styles = makeStyles(sizeLabor, themeLabor); return ( <View style={[styles.container, videoContainerStyle]}> { currentMessage ? currentMessage.video ? <Video style={[styles.video, videoStyle]} useNativeControls resizeMode="contain" source={{uri: currentMessage.video}} onLoad={() => { isDebug && console.log('%c[ chat ]', 'background: #555; color: #bada55', 'MessageVideo onLoad'); this.props.onMessageLoad?.(currentMessage); }} onLoadStart={() => { isDebug && console.log('%c[ chat ]', 'background: #555; color: #bada55', 'MessageVideo onLoadStart'); this.props.onMessageLoadStart?.(currentMessage); }} onReadyForDisplay={() => { isDebug && console.log('%c[ chat ]', 'background: #555; color: #bada55', 'MessageVideo onReadyForDisplay'); this.props.onMessageLoadEnd?.(currentMessage); isDebug && console.log('%c[ chat ]', 'background: #555; color: #bada55', 'MessageVideo onMessageReadyForDisplay'); this.props.onMessageReadyForDisplay?.(currentMessage); }} onError={(e) => { isDebug && console.log('%c[ chat ]', 'background: #555; color: #bada55', 'MessageVideo onError', e); this.props.onMessageLoadError?.(new Error(e), currentMessage); }} {...videoProps} /> : <Text>{'currentMessage.video is undefined'}</Text> : <Text>{'currentMessage is undefined'}</Text> } </View> ); } } export default withBunnyKit(MessageVideo);
<reponame>josephlewis42/personal_codebase /* * RateLimiter.h * * Created on: Sep 27, 2013 * Author: <NAME> <<EMAIL>> */ #ifndef RATELIMITER_H_ #define RATELIMITER_H_ #include <thread> #include <chrono> class RateLimiter { private: std::chrono::milliseconds _msToWait; std::chrono::time_point<std::chrono::high_resolution_clock> _nextTime; bool _checkload; float _msPerLoop; public: /** * Provides a limiting mechanism to functions * * @param hz - the number of hertz to run this function. * @param loadcheck - whether or not to record the load of the RateLImiter between * wait() and finishedCriticalSection() */ RateLimiter(int hz, bool loadcheck=false); virtual ~RateLimiter(); /** * This function should be called within loops to slow them down if necessary * it will return when it is time to "wake up" * * if load checking is enabled, returns the proportion of time used to time * allotted for each loop, a number > 1 means that the time taken is * exceeding the time allotted. */ float wait(); /** * This function is called when the loop is finished with one iteration, * it gives the OS an opportunity to do some cleanup and go about doing other * things. * * */ void finishedCriticalSection(); }; #endif /* RATELIMITER_H_ */
const sleep = async (millis) => { await new Promise(r => setTimeout(r, millis)); } module.exports = { sleep };
#!/bin/bash # Unless explicitly stated otherwise all files in this repository are licensed # under the Apache License Version 2.0. # This product includes software developed at Datadog (https://www.datadoghq.com/). # Copyright 2019 Datadog, Inc. # Run unit tests in Docker set -e RUBY_VERSIONS=("2.5" "2.7") for ruby_version in "${RUBY_VERSIONS[@]}" do echo "Running tests against ruby${ruby_version}" docker build -t datadog-lambda-layer-ruby-test:$ruby_version \ -f scripts/Dockerfile_test . \ --quiet \ --build-arg image=ruby:$ruby_version docker run --rm -v `pwd`:/datadog-lambda-layer-ruby \ -w /datadog-lambda-layer-ruby \ datadog-lambda-layer-ruby-test:$ruby_version \ bundle exec rake test done
#!/bin/sh # Delete m4a/mp3s of which I have flacs # update the database mpc -q update --wait cd $MUSIC mpc search filename flac | sed 's/\.flac$//' | while read i; do mpc search filename "$i.m"; done | xargs -d '\n' rm # update again mpc -q update --wait
import {jpBasePath} from "./_base"; export const jpPathEdit = jpBasePath.appendPathSegment<{ personId: string }>("/edit/:personId");
<gh_stars>1-10 package gv package jleon4 import language.{ implicitConversions } trait StorageFactoryPackage { // format: OFF this: Any with ConfigPackage with Util with TypeClassPackage with StorageMapPackage with StorageMapInterpretationsPackage with StoragePackage with StorageInterpretationsPackage ⇒ // format: ON trait StorageFactory[-T] extends Any with TypeClass.WithTypeParams[T, StorageFactory.Ops] //noinspection TypeAnnotation object StorageFactory extends TypeClassCompanion[StorageFactory] { trait Ops extends Any { type Config <: StorageFactoryPackage.this.Config.Ops def config: Config final def storageMap = couldBe[StorageMap.Ops] { (config getUri "basePath") :: HNil } final def storage = couldBe[Storage.Ops] { storageMap :: HNil } } object Ops } implicit def storageFactoryOps[T: StorageFactory](self: T): StorageFactory.Ops = StorageFactory[T](self) }
<filename>components/Heading.js import styled from 'styled-components'; export default styled.h2` margin-bottom: 1.2em; text-transform: ${({ uppercase }) => uppercase ? 'uppercase' : ''}; `;
<reponame>shindakun/go-lytics package lytics import ( "github.com/bmizerany/assert" "testing" ) func TestParseLyticsURL(t *testing.T) { str := "this/:id/a/:value" params := map[string]string{ "id": "is", "value": "test", } res := parseLyticsURL(str, params) assert.Equal(t, res, "this/is/a/test") }
IF customers_for_rep(in_employee_id) > 0 THEN CALL calc_sales_rep_bonus(in_employee_id); ELSE CALL calc_nonrep_bonus(in_employee_id); END IF;
#!/usr/bin/env bash # # This file invokes cmake and generates the build system for Clang. # if [ $# -lt 3 -o $# -gt 5 ] then echo "Usage..." echo "gen-buildsys-clang.sh <path to top level CMakeLists.txt> <ClangMajorVersion> <ClangMinorVersion> <Architecture> [build flavor]" echo "Specify the path to the top level CMake file - <corert>/src/Native" echo "Specify the clang version to use, split into major and minor version" echo "Specify the target architecture." echo "Optionally specify the build configuration (flavor.) Defaults to DEBUG." exit 1 fi # Set up the environment to be used for building with clang. if command -v "clang-$2.$3" > /dev/null 2>&1 then export CC="$(command -v clang-$2.$3)" export CXX="$(command -v clang++-$2.$3)" elif command -v "clang$2$3" > /dev/null 2>&1 then export CC="$(command -v clang$2$3)" export CXX="$(command -v clang++$2$3)" elif command -v clang > /dev/null 2>&1 then export CC="$(command -v clang)" export CXX="$(command -v clang++)" else echo "Unable to find Clang Compiler" exit 1 fi build_arch="$4" if [ -z "$5" ]; then echo "Defaulting to DEBUG build." build_type="DEBUG" else # Possible build types are DEBUG, RELEASE build_type="$(echo $5 | awk '{print toupper($0)}')" if [ "$build_type" != "DEBUG" ] && [ "$build_type" != "RELEASE" ]; then echo "Invalid Build type, only debug or release is accepted." exit 1 fi fi OS=`uname` # Locate llvm # This can be a little complicated, because the common use-case of Ubuntu with # llvm-3.5 installed uses a rather unusual llvm installation with the version # number postfixed (i.e. llvm-ar-3.5), so we check for that first. # On FreeBSD the version number is appended without point and dash (i.e. # llvm-ar35). # Additionally, OSX doesn't use the llvm- prefix. if [ $OS = "Linux" -o $OS = "FreeBSD" -o $OS = "OpenBSD" -o $OS = "NetBSD" ]; then llvm_prefix="llvm-" elif [ $OS = "Darwin" ]; then llvm_prefix="" else echo "Unable to determine build platform" exit 1 fi desired_llvm_major_version=$2 desired_llvm_minor_version=$3 if [ $OS = "FreeBSD" ]; then desired_llvm_version="$desired_llvm_major_version$desired_llvm_minor_version" elif [ $OS = "OpenBSD" ]; then desired_llvm_version="" elif [ $OS = "NetBSD" ]; then desired_llvm_version="" else desired_llvm_version="-$desired_llvm_major_version.$desired_llvm_minor_version" fi locate_llvm_exec() { if command -v "$llvm_prefix$1$desired_llvm_version" > /dev/null 2>&1 then echo "$(command -v $llvm_prefix$1$desired_llvm_version)" elif command -v "$llvm_prefix$1" > /dev/null 2>&1 then echo "$(command -v $llvm_prefix$1)" else exit 1 fi } llvm_ar="$(locate_llvm_exec ar)" [[ $? -eq 0 ]] || { echo "Unable to locate llvm-ar"; exit 1; } llvm_link="$(locate_llvm_exec link)" [[ $? -eq 0 ]] || { echo "Unable to locate llvm-link"; exit 1; } llvm_nm="$(locate_llvm_exec nm)" [[ $? -eq 0 ]] || { echo "Unable to locate llvm-nm"; exit 1; } if [ $OS = "Linux" -o $OS = "FreeBSD" -o $OS = "OpenBSD" -o $OS = "NetBSD" ]; then llvm_objdump="$(locate_llvm_exec objdump)" [[ $? -eq 0 ]] || { echo "Unable to locate llvm-objdump"; exit 1; } fi cmake_extra_defines= if [[ -n "$LLDB_LIB_DIR" ]]; then cmake_extra_defines="$cmake_extra_defines -DWITH_LLDB_LIBS=$LLDB_LIB_DIR" fi if [[ -n "$LLDB_INCLUDE_DIR" ]]; then cmake_extra_defines="$cmake_extra_defines -DWITH_LLDB_INCLUDES=$LLDB_INCLUDE_DIR" fi if [[ -n "$CROSSCOMPILE" ]]; then if ! [[ -n "$ROOTFS_DIR" ]]; then echo "ROOTFS_DIR not set for crosscompile" exit 1 fi if [[ -z $CONFIG_DIR ]]; then CONFIG_DIR="$1/cross" fi export TARGET_BUILD_ARCH=$build_arch cmake_extra_defines="$cmake_extra_defines -C $CONFIG_DIR/tryrun.cmake" cmake_extra_defines="$cmake_extra_defines -DCMAKE_TOOLCHAIN_FILE=$CONFIG_DIR/toolchain.cmake" fi if [ $build_arch == "wasm" ]; then emcmake cmake \ "-DEMSCRIPTEN_GENERATE_BITCODE_STATIC_LIBRARIES=1" \ "-DCMAKE_TOOLCHAIN_FILE=$EMSCRIPTEN/cmake/Modules/Platform/Emscripten.cmake" \ "-DCLR_CMAKE_TARGET_ARCH=$build_arch" \ "-DCMAKE_BUILD_TYPE=$build_type" \ "$1/src/Native" else cmake \ "-DCMAKE_AR=$llvm_ar" \ "-DCMAKE_LINKER=$llvm_link" \ "-DCMAKE_NM=$llvm_nm" \ "-DCMAKE_OBJDUMP=$llvm_objdump" \ "-DCMAKE_RANLIB=$llvm_ranlib" \ "-DCMAKE_BUILD_TYPE=$build_type" \ "-DCLR_CMAKE_TARGET_ARCH=$build_arch" \ "-DOBJWRITER_BUILD=${__ObjWriterBuild}" \ "-DCROSS_BUILD=${__CrossBuild}" \ $cmake_extra_defines \ "$1/src/Native" fi
addHaxeLibPath() { if [ ! -d "$1/lib/haxe/std" ]; then addToSearchPath HAXELIB_PATH "$1/lib/haxe" fi } envHooks+=(addHaxeLibPath)
function formatDate(dateString) { const [month, day, year] = dateString.split('/'); const mName = ['January', 'February', 'March', 'April', 'May', 'June', 'July', 'August', 'September', 'October', 'November', 'December']; return `${mName[month-1]} ${day}, ${year}`; }
#include <fstream> #include <string_view> #include "gtest/gtest.h" #include "program.h" #include "types.h" #include "utils.h" #include "int.h" TEST(Utilities, ByteToBytes) { auto bytes = chia::utils::ByteToBytes('\1'); EXPECT_EQ(bytes[0], '\1'); } TEST(Utilities, Bytes) { EXPECT_EQ(chia::utils::Byte4bToHexChar(10), 'a'); EXPECT_EQ(chia::utils::HexCharToByte4b('b'), 11); chia::Bytes bytes(2); bytes[0] = 0xab; bytes[1] = 0xef; EXPECT_EQ(chia::utils::BytesToHex(bytes), "abef"); EXPECT_EQ(chia::utils::BytesFromHex("abef"), bytes); chia::Bytes empty; EXPECT_TRUE(chia::utils::ConnectBuffers(empty, empty).empty()); EXPECT_EQ(chia::utils::ConnectBuffers(bytes, bytes), chia::utils::BytesFromHex("abefabef")); EXPECT_EQ(chia::utils::ConnectBuffers(empty, bytes), chia::utils::BytesFromHex("abef")); EXPECT_EQ(chia::utils::ConnectBuffers(bytes, empty), chia::utils::BytesFromHex("abef")); } TEST(Utilities, IntBigEndianConvertion) { EXPECT_EQ(chia::Int(chia::utils::SerializeBytes(0x01, 0x02)).ToInt(), 0x0102); } std::string_view const s0 = "../clvm/calculate_synthetic_public_key.clvm.hex"; std::string_view const s0_treehash = "../clvm/calculate_synthetic_public_key.clvm.hex.sha256tree"; std::string_view const s1 = "../clvm/p2_delegated_puzzle_or_hidden_puzzle.clvm.hex"; std::string_view const s1_treehash = "../clvm/p2_delegated_puzzle_or_hidden_puzzle.clvm.hex.sha256tree"; TEST(CLVM_SHA256_treehash, LoadAndVerify_s0) { auto prog = chia::Program::LoadFromFile(s0); auto treehash_bytes = chia::utils::BytesFromHex(chia::utils::LoadHexFromFile(s0_treehash)); EXPECT_EQ(chia::utils::bytes_cast<32>(prog.GetTreeHash()), treehash_bytes); } TEST(CLVM_SHA256_treehash, LoadAndVerify_s1) { auto prog = chia::Program::LoadFromFile(s1); auto treehash_bytes = chia::utils::BytesFromHex(chia::utils::LoadHexFromFile(s1_treehash)); EXPECT_EQ(chia::utils::bytes_cast<32>(prog.GetTreeHash()), treehash_bytes); }
import React from 'react'; import styled from 'styled-components'; interface IProps {} const Styled = styled.button` min-width: 18px; height: 18px; `; const WarningVector = React.memo((props: any) => { return ( <svg width="1em" height="1em" viewBox="0 0 16 15" {...props}> <path d="M13.788 14.633c1.098 0 1.787-.79 1.787-1.787 0-.3-.074-.6-.235-.879L9.576 1.677a1.763 1.763 0 00-3.083 0L.736 11.975a1.798 1.798 0 00-.242.871c0 .996.689 1.787 1.787 1.787h11.507zm-5.75-4.892c-.38 0-.586-.22-.593-.608l-.103-3.985c-.007-.388.279-.666.689-.666.403 0 .703.286.696.674l-.103 3.977c-.007.395-.22.608-.586.608zm0 2.453c-.432 0-.813-.351-.813-.783 0-.44.374-.791.813-.791.447 0 .82.344.82.79 0 .44-.38.784-.82.784z" fill="#000" fillRule="nonzero" /> </svg> ); }); const Warning = (props: IProps & React.ButtonHTMLAttributes<HTMLButtonElement>) => { return ( <Styled className="icon waring-icon" {...props}> <WarningVector /> </Styled> ); }; export default React.memo(Warning);
package com.javatar.androidfirabasedatabase.model; /** * Created by gokhan on 12-Mar-17. */ public class User { public String name; public String email; public User() { } public User(String name, String email) { this.name = name; this.email = email; } }
<filename>tapestry-core/src/main/java/org/apache/tapestry5/internal/util/NotificationEventCallback.java // Copyright 2006, 2008 The Apache Software Foundation // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package org.apache.tapestry5.internal.util; import org.apache.tapestry5.ComponentEventCallback; /** * A {@link org.apache.tapestry5.ComponentEventCallback} used for notification events. Event handler methods may return * true (to abort the event) or false (to allow the event to continue bubbling up), but all other values are forbidden. */ public class NotificationEventCallback implements ComponentEventCallback { private final String eventType; private final String completeId; public NotificationEventCallback(String eventType, String completeId) { this.eventType = eventType; this.completeId = completeId; } public boolean handleResult(Object result) { if (result instanceof Boolean) return ((Boolean) result); throw new IllegalArgumentException( String.format("Event '%s' from %s received an event handler method return value of %s. This type of event does not support return values from event handler methods.", eventType, completeId, String .valueOf(result))); } }
<gh_stars>10-100 // deno-lint-ignore-file no-explicit-any import { Body, Controller, Get, Headers, Post, Query, Res, Response, UseInterceptors, } from "../../../mod.ts"; import type { Context } from "../../../mod.ts"; import { RoleService } from "../services/role.service.ts"; import { Add } from "../../decorators/add.ts"; import { RoleInfoDto } from "../dtos/role.dto.ts"; import { AsyncService } from "../../asyncModule/async.service.ts"; // import { LoggingInterceptor } from "../../interceptor/log.interceptor.ts"; import { TransformInterceptor } from "../../interceptor/transform.interceptor.ts"; import { ErrorsInterceptor } from "../../interceptor/errors.interceptor.ts"; import { CacheInterceptor } from "../../../modules/cache/mod.ts"; import { LoggerService } from "../services/logger.service.ts"; @Controller("/role") @UseInterceptors(CacheInterceptor) export class RoleController { constructor( private readonly roleService: RoleService, private readonly asyncService: AsyncService, private readonly loggerService: LoggerService, ) { this.loggerService.info("role"); } @Get("/info/:id") test( context: Context, @Add() name: string, @Query() params: any, @Query("age") age: string, ) { this.loggerService.info("info test", name, params, age); console.log(params, age); context.response.body = "role info " + name + " - " + JSON.stringify(params); } @Get("/info") async getInfo(@Res() res: Response, @Query() params: any) { console.log("params is ", params); res.body = "role get info " + JSON.stringify(params) + " - " + await this.roleService.info() + "-\n" + this.asyncService.info(); } @Get("/testInterceptor") @UseInterceptors(TransformInterceptor) testInterceptor(@Query() params: any) { console.log("params is ", params); return "role testInterceptor " + JSON.stringify(params); } @Get("/testErrorInterceptor") @UseInterceptors(ErrorsInterceptor) testErrorInterceptor() { throw new Error("testErrorInterceptor"); } @Get("/delay") delay(@Query("id") id: string) { return new Promise((resolve) => { setTimeout(() => { resolve("delay " + id); }, 1000); }); } @Post("/info") info( @Add() name: string, @Body() params: RoleInfoDto, @Headers() headers: any, @Headers("host") host: any, ) { console.log("ctx", name, params, headers, host); // res.body = "role info old " + name; return "role info " + name; } }