text
stringlengths
1
1.05M
import React from 'react' import { bindActionCreators } from 'redux' import { connect } from 'react-redux' import * as uiActions from '../actions/uiActions' import Card from './Card' const Home = props => { const handleSubmit = () => { let i = document.getElementsByName('search_input')[0] props.uiActions.searchMeme(i.value) } return( <div> <section className="hero is-info"> <div className="hero-body"> <h1 className="title is-1">MemeHunter</h1> <div className="field has-addons"> <div className="control"> <input name="search_input" className="input" type="text" placeholder="Find a meme"/> </div> <div className="control"> <a className="button is-warning" onClick={handleSubmit}> Search </a> </div> </div> </div> </section> <section className="section"> <div className="columns is-multiline"> { props.ui.memeList.map(m=>( <div key={m.id} className="column is-4"> <Card title={m.title} url={m.url}/> </div> )) } </div> </section> </div> ) } const mapStateToProps = state => ( { ui: state.ui } ); const mapDispatchToProps = dispatch => ( { uiActions: bindActionCreators(uiActions, dispatch) } ); export default connect( mapStateToProps, mapDispatchToProps)(Home)
/* Copyright (C) 2006 <NAME> This software is provided 'as-is', without any express or implied warranty. In no event will the authors be held liable for any damages arising from the use of this software. Permission is granted to anyone to use this software for any purpose, including commercial applications, and to alter it and redistribute it freely, subject to the following restrictions: 1. The origin of this software must not be misrepresented; you must not claim that you wrote the original software. If you use this software in a product, an acknowledgment in the product documentation would be appreciated but is not required. 2. Altered source versions must be plainly marked as such, and must not be misrepresented as being the original software. 3. This notice may not be removed or altered from any source distribution. <NAME> <EMAIL> */ package ikvm.internal; import cli.System.IFormatProvider; import cli.System.IFormattable; import ikvm.lang.CIL; import ikvm.lang.Internal; @Internal public final class Formatter { private Formatter() {} public static String ToString(Byte b, String format, IFormatProvider provider) { return CIL.box_sbyte(b.byteValue()).ToString(format, provider); } public static String ToString(Short s, String format, IFormatProvider provider) { return CIL.box_short(s.shortValue()).ToString(format, provider); } public static String ToString(Integer i, String format, IFormatProvider provider) { return CIL.box_int(i.intValue()).ToString(format, provider); } public static String ToString(Long l, String format, IFormatProvider provider) { return CIL.box_long(l.longValue()).ToString(format, provider); } public static String ToString(Float f, String format, IFormatProvider provider) { return CIL.box_float(f.floatValue()).ToString(format, provider); } public static String ToString(Double d, String format, IFormatProvider provider) { return CIL.box_double(d.doubleValue()).ToString(format, provider); } }
#!/bin/sh cert (){ org=$@; org=$(echo "$org" | sed 's/ /+/g'); b=$(curl -ks "https://crt.sh/?O=$org" | grep -Po "(?<=\?id=).*(?=\")"); for i in $b; do curl -ks "https://crt.sh/?id=$i" | grep -Po "(?<=DNS:).*?(?=<BR)"; done } cert $@
(function($) { 'use strict'; var ManageCart = function(options) { var ajaxOptions = { url: bptPurchaseTickets.ajaxurl, type: 'POST', dataType: 'json' }, shoppingCart = options.shoppingCart, addTickets, getCartContents, parseTicketForm, addShippingInfo, addBillingInfo, removeTicket, updateCart; if (!options.stage) { options.stage = 'getCartInfo'; } if (!options.form) { options.form = null; } this.getCartContents = function() { ajaxOptions.type = 'POST'; ajaxOptions.data = { action: 'bpt_get_cart_contents', nonce: bptPurchaseTickets.nonce }; $.ajax(ajaxOptions) .fail() .done(function(response) { for (var i in response.prices) { if (response.prices[i].quantity === '0') { delete response.prices[i]; } } shoppingCart.set({ prices: response.prices, cartValue: response.cartValue }); }) .always(function() { shoppingCart.set('loading', false); }); }; this.addPrices = function(form) { var prices; if (!form) { prices = shoppingCart.get('prices'); } else { prices = parseTicketForm(form); } for (var i = 0; i < prices.length; i++) { if (prices[i].quantity === '0') { delete prices[i]; } } ajaxOptions.type = 'POST'; ajaxOptions.data = { action: 'bpt_add_prices', prices: prices, nonce: bptPurchaseTickets.nonce, }; $.ajax(ajaxOptions) .done(function(data) { options.shoppingCart.set({ message: data.message, prices: data.prices, cartValue: data.cartValue }); if (data.message) { var messageDiv = $('#bpt-shopping-cart-message'); messageDiv.fadeIn(); window.setTimeout(function() { messageDiv.fadeOut(); }, 5000); } }) .always(function(data) { shoppingCart.set('loading', false); }) .fail(function(xhr) { shoppingCart.set({ error: xhr.responseText }); }); }; this.updateCart = updateCart = function(prices) { this.addPrices(); }; this.removeTicket = removeTicket = function(tickets) { var ticketsToRemove = {}; if (!Array.isArray(tickets)) { var priceID = tickets.priceID.toString(); ticketsToRemove[priceID] = tickets; } ajaxOptions.data = { action: 'bpt_remove_price', tickets: ticketsToRemove, nonce: bptPurchaseTickets.nonce, stage: 'removeTickets' }; shoppingCart.set('loading', true); $.ajax(ajaxOptions) .done(function(data) { options.shoppingCart.set({ message: data.message, prices: data.prices, cartValue: data.cartValue }); if (data.message) { var messageDiv = $('#bpt-shopping-cart-message'); messageDiv.fadeIn(); window.setTimeout(function() { messageDiv.fadeOut(); }, 5000); } }) .always(function(data) { shoppingCart.set('loading', false); }) .fail(function(xhr) { shoppingCart.set({ error: xhr.responseText }); }); }; /** * Parse the tickets selected in the given form. * @param object form The form object. It must be a jquery object. * @return array An array of selected tickets, each containing * a priceId, shippingMethod, quantity, value and * name parameter. */ parseTicketForm = function(form) { var eventId = form.data('event-id'), prices = form.find('select.bpt-price-qty'), shippingMethod = form.find('select.bpt-shipping-method').val(), parsedPrices = {}; prices.each(function(i, price) { price = $(price); var priceTd = price.parent(), priceValue = priceTd.siblings('td.bpt-price-value').data('price-value'), priceName = priceTd.siblings('td.bpt-price-name').data('price-name'), eventTitle = priceTd.siblings('td.bpt-price-name').data('event-title'); if (price.val() !== '0') { parsedPrices[price.data('price-id')] = { priceId: price.data('price-id'), shippingMethod: shippingMethod, quantity: price.val(), value: priceValue, name: priceName, eventTitle: eventTitle, eventId: eventId }; } }); return parsedPrices; }; }; $(document).on('bptEventListLoaded', function(event) { var eventForms = $('.add-to-cart'), template, shoppingCart, manageCart; $('div.bpt-event-list').prepend('<div id="bpt-shopping-cart"></div>'); $.ajax({ url: bptPurchaseTickets.templateUrl }) .fail(function(xhr) { template = 'Sorry, the shopping cart could not be loaded.'; }) .done(function(data) { template = $(data).html(); }) .always(function() { shoppingCart = new Ractive({ el: '#bpt-shopping-cart', template: template, data: { prices: [], shippingInfo: { firstName: '', lastName: '', email: '', phone: '', address: '', address2: '', city: '', state: '', country: '' }, billingInfo: { firstName: '', lastName: '', email: '', phone: '', address: '', address2: '', city: '', state: '', country: '' }, countries: [ 'Afghanistan', 'Aland Islands', 'Albania', 'Algeria', 'American Samoa', 'Andorra', 'Angola', 'Anguilla', 'Antarctica', 'Antigua And Barbuda', 'Argentina', 'Armenia', 'Aruba', 'Australia', 'Austria', 'Azerbaijan', 'Azores', 'Bahamas', 'Bahrain', 'Bangladesh', 'Barbados', 'Belarus', 'Belgium', 'Belize', 'Benin', 'Bermuda', 'Bhutan', 'Bolivia', 'Bosnia And Herzegovina', 'Botswana', 'Bouvet Island', 'Brazil', 'British Indian Ocean Territory', 'Brunei Darussalam', 'Bulgaria', 'Burkina Faso', 'Burundi', 'Cambodia', 'Cameroon', 'Canada', 'Cape Verde', 'Cayman Islands', 'Central African Republic', 'Chad', 'Chile', 'China', 'Christmas Island', 'Cocos (keeling) Islands', 'Colombia', 'Comoros', 'Congo', 'Congo, The Democratic Republic Of The', 'Cook Islands', 'Costa Rica', 'Cote Divoire', 'Croatia', 'Cyprus', 'Czech Republic', 'Denmark', 'Djibouti', 'Dominica', 'Dominican Republic', 'Ecuador', 'Egypt', 'El Salvador', 'Equatorial Guinea', 'Eritrea', 'Estonia', 'Ethiopia', 'Falkland Islands', 'Faroe Islands', 'Fiji', 'Finland', 'France', 'French Guiana', 'French Polynesia', 'French Southern Territories', 'Gabon', 'Gambia', 'Georgia', 'Germany', 'Ghana', 'Gibraltar', 'Greece', 'Greenland', 'Grenada', 'Guadeloupe', 'Guam', 'Guatemala', 'Guernsey', 'Guinea', 'Guinea-Bissau', 'Guyana', 'Haiti', 'Heard Island And Mcdonald Islands', 'Holy See', 'Honduras', 'Hong Kong', 'Hungary', 'Iceland', 'India', 'Indonesia', 'Iraq', 'Ireland', 'Isle Of Man', 'Israel', 'Italy', 'Jamaica', 'Japan', 'Jersey', 'Jordan', 'Kazakhstan', 'Kenya', 'Kiribati', 'Korea, Republic Of', 'Kosovo', 'Kyrgyzstan', 'Latvia', 'Lebanon', 'Lesotho', 'Liberia', 'Libyan Arab Jamahiriya', 'Liechtenstein', 'Lithuania', 'Luxembourg', 'Macao', 'Macedonia, The Former Yugoslav Republic Of', 'Madagascar', 'Madeira', 'Malawi', 'Malaysia', 'Maldives', 'Mali', 'Malta', 'Marshall Islands', 'Martinique', 'Mauritania', 'Mauritius', 'Mayotte', 'Mexico', 'Micronesia, Federated States Of', 'Moldova', 'Monaco', 'Mongolia', 'Montenegro', 'Montserrat', 'Morocco', 'Mozambique', 'Myanmar', 'Namibia', 'Nauru', 'Nepal', 'Netherlands', 'Netherlands Antilles', 'New Caledonia', 'New Zealand', 'Nicaragua', 'Niger', 'Nigeria', 'Niue', 'Norfolk Island', 'Northern Mariana Islands', 'Norway', 'Oman', 'Pakistan', 'Palau', 'Palestinian Territory, Occupied', 'Panama', 'Papua New Guinea', 'Paraguay', 'Peru', 'Philippines', 'Pitcairn', 'Poland', 'Portugal', 'Puerto Rico', 'Qatar', 'Réunion', 'Romania', 'Russian Federation', 'Rwanda', 'Saint Barthélemy', 'S<NAME>', 'Saint Kitts And Nevis', 'S<NAME>', 'S<NAME>', 'Saint Pierre And Miquelon', 'Saint Vincent And The Grenadines', 'Samoa', 'San Marino', 'Sao Tome And Principe', 'Saudi Arabia', 'Senegal', 'Serbia', 'Seychelles', 'Sierra Leone', 'Singapore', 'Slovakia', 'Slovenia', 'Solomon Islands', 'Somalia', 'South Africa', 'South Georgia And The South Sandwich Islands', 'Spain', 'Sri Lanka', 'Suriname', 'Svalbard And Jan Mayen', 'Swaziland', 'Sweden', 'Switzerland', 'Taiwan', 'Tajikistan', 'Tanzania, United Republic Of', 'Thailand', 'Timor-Leste', 'Togo', 'Tokelau', 'Tonga', 'Trinidad And Tobago', 'Tunisia', 'Turkey', 'Turkmenistan', 'Turks And Caicos Islands', 'Tuvalu', 'Uganda', 'Ukraine', 'United Arab Emirates', 'United Kingdom', 'United States', 'United States Minor Outlying Islands', 'Uruguay', 'Uzbekistan', 'Vanuatu', 'Venezuela', 'Vietnam', 'Virgin Islands, British', 'Virgin Islands, US', 'Wallis And Futuna', 'Western Sahara', 'Yemen', 'Zambia', 'Zimbabwe' ], showShipping: true, billingIsShipping: true, currency: function(currency) { if (currency === 'USD') { return '$'; } if (currency === 'CAD') { return 'CAD$'; } if (currency === 'EUR') { return '€'; } if (currency === 'GBP') { return '£'; } return currency; } } }); shoppingCart.on({ removeTicket: function(element) { var price = shoppingCart.get(element.keypath); price.quantity = '0'; manageCart.updateCart(); }, updateCart: function(element) { manageCart.addPrices(); }, checkout: function(element) { manageCart.addPrices(); shoppingCart.set('showShipping', true); }, billingIsShipping: function(element) { if (element.node.checked) { shoppingCart.set('billingIsShipping', true); shoppingCart.set('billingInfo', shoppingCart.get('shippingInfo')); return; } shoppingCart.set('billingIsShipping', false); } }); shoppingCart.observe('shippingInfo', function(newValue, oldValue, keypath) { if (shoppingCart.get('billingIsShipping')) { shoppingCart.set('billingInfo', shoppingCart.get('shippingInfo')); } }); var options = { shoppingCart: shoppingCart }; manageCart = new ManageCart(options); manageCart.getCartContents(); }); eventForms.each(function(i, form) { form = $(form); var submitButton = form.find('.bpt-submit'), postID = form.parent().parent().data('post-id'); submitButton.click(function(event) { event.preventDefault(); manageCart.addPrices(form); }); }); }); })(jQuery);
ion.sound({ sounds: [ {name: "animeShing"}, {name: "BASSDRUM"}, {name: "clapSnare"}, {name: "C_39_Mon"}, {name: "Chant_Hey_002"}, {name: "Chant_Noise"}, {name: "Chant_Who_001"}, {name: "clap3"}, {name: "clap5"}, {name: "fx3"}, {name: "fx4"}, {name: "kick"}, {name: "kick2"}, {name: "kick3"}, {name: "Skrillex"}, {name: "snare3"}, {name: "snareDry"}, {name: "sword-hit"}, {name: "snareLight"} ], // main config path: "sounds/", preload: true, multiplay: true, volume: 0.9 }); // play sound $(document).ready(function() { $(document).keydown(function(key) { switch (parseInt(key.which, 10)) { // kick drums: case 37: // 'left' arrow key $('div').toggleClass('do'); ion.sound.play("kick"); break; case 38: // 'up' arrow key $('div').toggleClass('dob'); ion.sound.play("kick2"); break; case 39: // 'right' arrow key $('div').toggleClass('doc'); ion.sound.play("BASSDRUM"); break; case 40: // 'down' arrow key $('div').toggleClass('dod'); ion.sound.play("kick3"); break; // snares & claps: case 49: // '1' key $('div').toggleClass('do3'); ion.sound.play("snareLight"); break; case 50: // '2' key $('div').toggleClass('do3a'); ion.sound.play("clapSnare"); break; case 51: // '3' key $('div').toggleClass('do3b'); ion.sound.play("clap3"); break; case 52: // '4' key $('div').toggleClass('do3'); ion.sound.play("clap5"); break; case 53: // '5' key $('div').toggleClass('do3a'); ion.sound.play("snare3"); break; case 54: // '6' key $('div').toggleClass('do3b'); ion.sound.play("snareDry"); break; // effects & fx: case 35: // 'end' key $('div').toggleClass('do2'); ion.sound.play("animeShing"); break; case 46: // 'delete' key $('div').toggleClass('do2a'); ion.sound.play("sword-hit"); break; case 34: // 'pagedown' key $('div').toggleClass('do2b'); ion.sound.play("Chant_Noise"); break; case 33: // 'pageup' key $('div').toggleClass('do2c'); ion.sound.play("fx3"); break; case 36: // 'home' key $('div').toggleClass('do2d'); ion.sound.play("fx4"); break; // vocals: case 13: // 'return/enter' key $('div').toggleClass('do1'); ion.sound.play("C_39_Mon"); break; case 16: // 'shift' key $('div').toggleClass('do1a'); ion.sound.play("Chant_Hey_002"); break; case 17: // 'ctrl' key $('div').toggleClass('do1b'); ion.sound.play("Chant_Who_001"); break; case 18: // 'alt' key $('div').toggleClass('do1c'); ion.sound.play("Skrillex"); break; } }); })
#!/bin/bash ######################################################### # Script Name: couchbase_ansible.sh # Author: Gonzalo Ruiz # Version: 0.2 # Date Created: 01st Marh 2015 # Last Modified: 29th November 2015 # Last Modified By: Gonzalo Ruiz # Description: # This script automates the installation of a multi VM Couchbase cluster using Ansible. It will # Configur this VM as an Ansible Controller # Configure SSH keys # Configure STorage on all the VMs using an Ansible Playbook # Download Couchbase Ansible roles from Ansible Galaxy # Install Couchbase using the couchbase.couchbase-server Ansible role # # Parameters : # 1 - i: IP Pattern # 2 - n: Number of nodes # 3 - r: Configure RAID # 4 - f: filesystem : ext4 or xfs # 5 - u: Couchbase user # 6 - p: Couchbase password # Note : # This script has only been tested on CentOS 6.5 and Ubuntu 12.04 LTS ######################################################### #---BEGIN VARIABLES--- IP_ADDRESS_SPACE='' NUMBER_OF_NODES='' NODE_LIST_IPS=() CONFIGURE_RAID='' FILE_SYSTEM='' USER_NAME='' USER_PASSWORD='' TEMPLATE_ROLE='couchbase' START_IP_INDEX=0 CB_USER='' CB_PWD='' CB_WEB_FQDN='' CB_WEB_PORT='' MOUNTPOINT='/datadrive' SSH_AZ_ACCOUNT_NAME='' SSH_AZ_ACCOUNT_KEY='' function usage() { echo "INFO:" echo "Usage: configure-ansible.sh [-i IP_ADDRESS_SPACE ] [-n NUMBER_OF_NODES ] [-r CONFIGURE_RAID ] [-f FILE_SYSTEM] [-u CB_USER] [-p CB_PWD] [-m] [-q] [-o] [-a] [-k]" echo "The -i (ipAddressSpace) parameters specifies the starting IP space for the vms.For instance if you specify 10.0.2.2, and 3 nodes, the script will find for the VMS 10.0.2.20, 10.0.2.21,10.0.2.22.Plase note that Azure reserves the first 4 IPs, so you will have to specify an IP space in which IP x.x.x0 is available" echo "The -n (numberOfNodes) parameter specifies the number of VMs" echo "The -r (configureRAID) parameter specifies whether you want to create a RAID with all the available data disks.Allowed values : true or false" echo "The -f (fileSystem) parameter specifies the file system you want to use.Allowed values : ext4 or xfs" echo "The -u (couchbaseUser) parameter specifies the Couchbase Admin user" echo "The -p (couchbasePassword) parameter specifies the Couchbase Password " echo "The -m (couchbaseAllocatedMemory) parameter specifies the percentage of memory allocated to Couchbase " echo "The -q (couchbaseFQDN) parameter specifies the fully qualified named assigned to the Azure public IP" echo "The -o (couchbaseAdminPort) parameter specifies the public Admin Port for the Couchbase Web Console" echo "The -a (azureStorageAccountName) parameter specifies the name of the storage account that contains the private keys" echo "The -k (azureStorageAccountKey) parameter specifies the key of the private storage account that contains the private keys" } function log() { # If you want to enable this logging add a un-comment the line below and add your account id #curl -X POST -H "content-type:text/plain" --data-binary "${HOSTNAME} - $1" https://logs-01.loggly.com/inputs/<key>/tag/es-extension,${HOSTNAME} echo "$1" } #---PARSE AND VALIDATE PARAMETERS--- if [ $# -ne 22 ]; then log "ERROR:Wrong number of arguments specified. Parameters received $#. Terminating the script." usage exit 1 fi while getopts :i:n:r:f:u:p:m:q:o:a:k: optname; do log "INFO:Option $optname set with value ${OPTARG}" case $optname in i) # IP address space IP_ADDRESS_SPACE=${OPTARG} ;; n) # Number of VMS NUMBER_OF_NODES=${OPTARG} IDX=${START_IP_INDEX} while [ "${IDX}" -lt "${NUMBER_OF_NODES}" ]; do NODE_LIST_IPS[$IDX]="${IP_ADDRESS_SPACE}${IDX}" IDX=$((${IDX} + 1)) done ;; r) # Configure RAID CONFIGURE_RAID=${OPTARG} if [[ "${CONFIGURE_RAID}" != "true" && "${CONFIGURE_RAID}" != "false" ]] ; then log "ERROR:Configure RAID (-r) value ${CONFIGURE_RAID} not allowed" usage exit 1 fi ;; f) # File system : ext4 or xfs FILE_SYSTEM=${OPTARG} if [[ "${FILE_SYSTEM}" != "ext4" && "${FILE_SYSTEM}" != "xfs" ]] ; then log "ERROR:File system (-f) ${FILE_SYSTEM} not allowed" usage exit 1 fi ;; u) # COUCHBASE ADMIN USER CB_USER=${OPTARG} ;; p) # COUCHBASE ADMIN PASSWORD CB_PWD=${OPTARG} ;; m) # RAM Allocation Percentage MEMORY_ALLOCATION_PERCENTAGE=${OPTARG} ;; q) # FQDN -REMOVE LAST POINT CB_WEB_FQDN=${OPTARG} CB_WEB_FQDN=$(echo ${CB_WEB_FQDN} | sed s'/[.]$//' ) ;; o) # Couchbase Web Console Port CB_WEB_PORT=${OPTARG} ;; a) # Azure Private Storage Account Name- SSH Keys SSH_AZ_ACCOUNT_NAME=${OPTARG} ;; k) # Azure Private Storage Account Key - SSH Keys SSH_AZ_ACCOUNT_KEY=${OPTARG} ;; \?) #Invalid option - show help log "ERROR:Option -${BOLD}$OPTARG${NORM} not allowed." usage exit 1 ;; esac done function check_OS() { OS=`uname` KERNEL=`uname -r` MACH=`uname -m` if [ -f /etc/redhat-release ] ; then DistroBasedOn='RedHat' DIST=`cat /etc/redhat-release |sed s/\ release.*//` PSUEDONAME=`cat /etc/redhat-release | sed s/.*\(// | sed s/\)//` REV=`cat /etc/redhat-release | sed s/.*release\ // | sed s/\ .*//` elif [ -f /etc/SuSE-release ] ; then DistroBasedOn='SuSe' PSUEDONAME=`cat /etc/SuSE-release | tr "\n" ' '| sed s/VERSION.*//` REV=`cat /etc/SuSE-release | tr "\n" ' ' | sed s/.*=\ //` elif [ -f /etc/debian_version ] ; then DistroBasedOn='Debian' if [ -f /etc/lsb-release ] ; then DIST=`cat /etc/lsb-release | grep '^DISTRIB_ID' | awk -F= '{ print $2 }'` PSUEDONAME=`cat /etc/lsb-release | grep '^DISTRIB_CODENAME' | awk -F= '{ print $2 }'` REV=`cat /etc/lsb-release | grep '^DISTRIB_RELEASE' | awk -F= '{ print $2 }'` fi fi OS=$OS DistroBasedOn=$DistroBasedOn readonly OS readonly DIST readonly DistroBasedOn readonly PSUEDONAME readonly REV readonly KERNEL readonly MACH log "INFO: Detected OS : ${OS} Distribution: ${DIST}-${DistroBasedOn}-${PSUEDONAME} Revision: ${REV} Kernel: ${KERNEL}-${MACH}" } function install_packages_ubuntu() { apt-get --yes --force-yes install software-properties-common apt-add-repository ppa:ansible/ansible apt-get --yes --force-yes update apt-get --yes --force-yes install ansible # install Git apt-get --yes --force-yes install git # install nginx - Reverse proxy for the Couchbase admin console apt-get --yes --force-yes install nginx } function install_packages_centos() { # install EPEL Packages - sshdpass #wget http://download.fedoraproject.org/pub/epel/6/x86_64/epel-release-6-8.noarch.rpm #rpm -ivh epel-release-6-8.noarch.rpm yum -y install epel-release # install ansible yum -y install ansible yum -y install libselinux-python # install Git yum -y install git # install nginx - Reverse proxy for the Couchbase admin console yum -y install nginx } function get_sshkeys() { apt-get -y update apt-get -y install python-pip pip install azure-storage apt-get -y update # Download both Private and Public Key python GetSSHFromPrivateStorageAccount.py ${SSH_AZ_ACCOUNT_NAME} ${SSH_AZ_ACCOUNT_KEY} id_rsa python GetSSHFromPrivateStorageAccount.py ${SSH_AZ_ACCOUNT_NAME} ${SSH_AZ_ACCOUNT_KEY} id_rsa.pub } function configure_ssh() { # copy ssh private key mkdir -p ~/.ssh mv id_rsa ~/.ssh # set permissions chmod 700 ~/.ssh chmod 600 ~/.ssh/id_rsa # copy root ssh key cat id_rsa.pub >> ~/.ssh/authorized_keys rm id_rsa.pub # set permissions chmod 600 ~/.ssh/authorized_keys if [[ "${DIST}" == "Ubuntu" ]]; then #restart sshd service - Ubuntu service ssh restart elif [[ "${DIST}" == "CentOS" ]] ; then # configure SELinux restorecon -Rv ~/.ssh #restart sshd service - CentOS service sshd restart fi } function configure_ansible() { # Copy ansible hosts file ANSIBLE_HOST_FILE=/etc/ansible/hosts ANSIBLE_CONFIG_FILE=/etc/ansible/ansible.cfg mv ${ANSIBLE_HOST_FILE} ${ANSIBLE_HOST_FILE}.backup mv ${ANSIBLE_CONFIG_FILE} ${ANSIBLE_CONFIG_FILE}.backup # Accept ssh keys by default printf "[defaults]\nhost_key_checking = False\n\n" >> "${ANSIBLE_CONFIG_FILE}" # Shorten the ControlPath to avoid errors with long host names , long user names or deeply nested home directories echo $'[ssh_connection]\ncontrol_path = ~/.ssh/ansible-%%h-%%r' >> "${ANSIBLE_CONFIG_FILE}" echo "\nscp_if_ssh=True" >> "${ANSIBLE_CONFIG_FILE}" # Generate a new ansible host file printf "[${TEMPLATE_ROLE}]\n" >> "${ANSIBLE_HOST_FILE}" printf "${IP_ADDRESS_SPACE}0 node_role=primary\n" >> "${ANSIBLE_HOST_FILE}" printf "${IP_ADDRESS_SPACE}[1:$(($NUMBER_OF_NODES - 1))] node_role=additional\n" >> "${ANSIBLE_HOST_FILE}" # Validate ansible configuration ansible ${TEMPLATE_ROLE} -m ping -v } function configure_storage() { log "INFO: Configuring Storage " log "WARNING: This process is not incremental, don't use it if you don't want to lose your existing storage configuration" # Run ansible template to configure Storage : Create RAID and Configure Filesystem ansible-playbook InitStorage_RAID.yml --extra-vars "target=${TEMPLATE_ROLE} file_system=${FILE_SYSTEM}" -v } function install_couchbase() { # Calculate Memory assigned to Couchbase # COUCHBASE_MEMORY=$(($(free|awk '/^Mem:/{print $2}')/1024*80/100)) # Role copied in /etc/ansible/roles/couchbase.couchbase-server/ ansible-galaxy install couchbaselabs.couchbase-server -p . log "INFO: ******** Installing Couchbase " # Run ansible template to Install and Initialise Couchbase ansible-playbook couchbase_setup.yml --extra-vars "target=${TEMPLATE_ROLE} file_system=${FILE_SYSTEM} couchbase_server_admin=${CB_USER} couchbase_server_password=${CB_PWD} mount_point=${MOUNTPOINT} memory_allocation_percentage=${MEMORY_ALLOCATION_PERCENTAGE}" -v } function configure_nginx() { # Create nginx folders mkdir -p /etc/nginx/ssl mkdir -p /etc/nginx/sites-enabled/ # Generate Self-signed certificate for the web console openssl req -x509 -nodes -days 1095 -newkey rsa:2048 -keyout /etc/nginx/ssl/nginx.key -out /etc/nginx/ssl/nginx.crt -subj "/C=US/ST=WA/L=Redmond/O=IT/CN=${CB_WEB_FQDN}" # CentOS - Configure SELinux & Update /etc/nginx/nginx.conf if [[ "${DIST}" == "CentOS" ]]; then yum -y install policycoreutils-python semanage port -a -t http_port_t -p tcp 16195 setsebool -P allow_ypbind 1 cp /etc/nginx/nginx.conf /etc/nginx/nginx.conf.back sed -i '/http {/a \ include /etc/nginx/sites-enabled/*;' /etc/nginx/nginx.conf fi # Generate the nginx Config file cat nginx | sed "s/{PORT}/${CB_WEB_PORT}/" | sed "s/{FQDN}/${CB_WEB_FQDN}/" | sed "s/{CB_SRV1}/${NODE_LIST_IPS[0]}/" >> /etc/nginx/sites-enabled/couchbaseconsole # Start nginx service service nginx start service nginx restart } InitializeVMs() { check_OS get_sshkeys configure_ssh if [[ "${DIST}" == "Ubuntu" ]]; then log "INFO:Installing Ansible for Ubuntu" install_packages_ubuntu elif [[ "${DIST}" == "CentOS" ]] ; then log "INFO:Installing Ansible for CentOS" install_packages_centos else log "ERROR:Unsupported OS ${ DIST}" exit 2 fi configure_ansible configure_storage install_couchbase # nginx will be a reverse proxy for the Couchbase admin console # It will use a self-signed certificate to expose the Web Admin console over https configure_nginx } InitializeVMs
import heapq import collections class ListNode: def __init__(self, val=0, next=None): self.val = val self.next = next def merge_k_lists(lists): ans = ListNode(0) current = ans mapper = collections.defaultdict(list) store = list() heapq.heapify(store) for i, l in enumerate(lists): if l: heapq.heappush(store, (l.val, i, l)) while store: val, idx, node = heapq.heappop(store) current.next = ListNode(val) current = current.next if node.next: heapq.heappush(store, (node.next.val, idx, node.next)) return ans.next
#!/usr/bin/python import re import sys import os import tempfile import urllib2 import urlparse import subprocess import tarfile import signal import threading import traceback import shutil import errno from contextlib import closing # To ensure it exists on the system import gzip import zipfile # # Useful script for installing multiple versions of MongoDB on a machine # Only really tested/works on Linux. # def dump_stacks(signal, frame): print "======================================" print "DUMPING STACKS due to SIGUSR1 signal" print "======================================" threads = threading.enumerate(); print "Total Threads: " + str(len(threads)) for id, stack in sys._current_frames().items(): print "Thread %d" % (id) print "".join(traceback.format_stack(stack)) print "======================================" def version_tuple(version): """Returns a version tuple that can be used for numeric sorting of version strings such as '2.6.0-rc1' and '2.4.0'""" RC_OFFSET = -100 version_parts = re.split(r'\.|-', version[0]) if version_parts[-1].startswith("rc"): rc_part = version_parts.pop() rc_part = rc_part.split('rc')[1] # RC versions are weighted down to allow future RCs and general # releases to be sorted in ascending order (e.g., 2.6.0-rc1, # 2.6.0-rc2, 2.6.0). version_parts.append(int(rc_part) + RC_OFFSET) else: # Non-RC releases have an extra 0 appended so version tuples like # (2, 6, 0, -100) and (2, 6, 0, 0) sort in ascending order. version_parts.append(0) return tuple(map(int, version_parts)) class MultiVersionDownloader : def __init__(self, install_dir, link_dir, platform): self.install_dir = install_dir self.link_dir = link_dir match = re.compile("(.*)\/(.*)").match(platform) self.platform = match.group(1) self.arch = match.group(2) self._links = None @property def links(self): if self._links is None: self._links = self.download_links() return self._links def download_links(self): # This href is for community builds; enterprise builds are not browseable. href = "http://dl.mongodb.org/dl/%s/%s" \ % (self.platform.lower(), self.arch) attempts_remaining = 5 timeout_seconds = 10 while True: try: html = urllib2.urlopen(href, timeout = timeout_seconds).read() break except Exception as e: print "fetching links failed (%s), retrying..." % e attempts_remaining -= 1 if attempts_remaining == 0 : raise Exception("Failed to get links after multiple retries") links = {} for line in html.split(): match = re.compile("http:.*/%s/mongodb-%s-%s-([^\"]*)\.(tgz|zip)" \ % (self.platform.lower(), self.platform.lower(), self.arch)).search(line) if match == None: continue link = match.group(0) version = match.group(1) links[version] = link return links def download_version(self, version): try: os.makedirs(self.install_dir) except OSError as exc: if exc.errno == errno.EEXIST and os.path.isdir(self.install_dir): pass else: raise urls = [] for link_version, link_url in self.links.iteritems(): if link_version.startswith(version): # If we have a "-" in our version, exact match only if version.find("-") >= 0: if link_version != version: continue elif link_version.find("-") >= 0: continue urls.append((link_version, link_url)) if len(urls) == 0: raise Exception("Cannot find a link for version %s, versions %s found." \ % (version, self.links)) urls.sort(key=version_tuple) full_version = urls[-1][0] url = urls[-1][1] extract_dir = url.split("/")[-1][:-4] file_suffix = os.path.splitext(urlparse.urlparse(url).path)[1] # only download if we don't already have the directory already_downloaded = os.path.isdir(os.path.join( self.install_dir, extract_dir)) if already_downloaded: print "Skipping download for version %s (%s) since the dest already exists '%s'" \ % (version, full_version, extract_dir) else: temp_dir = tempfile.mkdtemp() temp_file = tempfile.mktemp(suffix=file_suffix) data = urllib2.urlopen(url) print "Downloading data for version %s (%s)..." % (version, full_version) print "Download url is %s" % url with open(temp_file, 'wb') as f: f.write(data.read()) print "Uncompressing data for version %s (%s)..." % (version, full_version) if file_suffix == ".zip": # Support .zip downloads, used for Windows binaries. with zipfile.ZipFile(temp_file) as zf: zf.extractall(temp_dir) elif file_suffix == ".tgz": # Support .tgz downloads, used for Linux binaries. with closing(tarfile.open(temp_file, 'r:gz')) as tf: tf.extractall(path=temp_dir) else: raise Exception("Unsupported file extension %s" % file_suffix) temp_install_dir = os.path.join(temp_dir, extract_dir) shutil.move(temp_install_dir, self.install_dir) shutil.rmtree(temp_dir) os.remove(temp_file) self.symlink_version(version, os.path.abspath(os.path.join(self.install_dir, extract_dir))) def symlink_version(self, version, installed_dir): try: os.makedirs(self.link_dir) except OSError as exc: if exc.errno == errno.EEXIST and os.path.isdir(self.link_dir): pass else: raise for executable in os.listdir(os.path.join(installed_dir, "bin")): executable_name, executable_extension = os.path.splitext(executable) link_name = "%s-%s%s" % (executable_name, version, executable_extension) try: executable = os.path.join(installed_dir, "bin", executable) executable_link = os.path.join(self.link_dir, link_name) if os.name == "nt": # os.symlink is not supported on Windows, use a direct method instead. def symlink_ms(source, link_name): import ctypes csl = ctypes.windll.kernel32.CreateSymbolicLinkW csl.argtypes = (ctypes.c_wchar_p, ctypes.c_wchar_p, ctypes.c_uint32) csl.restype = ctypes.c_ubyte flags = 1 if os.path.isdir(source) else 0 if csl(link_name, source.replace('/', '\\'), flags) == 0: raise ctypes.WinError() os.symlink = symlink_ms os.symlink(executable, executable_link) except OSError as exc: if exc.errno == errno.EEXIST: pass else: raise CL_HELP_MESSAGE = \ """ Downloads and installs particular mongodb versions (each binary is renamed to include its version) into an install directory and symlinks the binaries with versions to another directory. This script only supports community builds, not enterprise builds. Usage: setup_multiversion_mongodb.py INSTALL_DIR LINK_DIR PLATFORM_AND_ARCH VERSION1 [VERSION2 VERSION3 ...] Ex: setup_multiversion_mongodb.py ./install ./link "Linux/x86_64" "2.0.6" "2.0.3-rc0" "2.0" "2.2" "2.3" Ex: setup_multiversion_mongodb.py ./install ./link "OSX/x86_64" "2.4" "2.2" After running the script you will have a directory structure like this: ./install/[mongodb-osx-x86_64-2.4.9, mongodb-osx-x86_64-2.2.7] ./link/[mongod-2.4.9, mongod-2.2.7, mongo-2.4.9...] You should then add ./link/ to your path so multi-version tests will work. Note: If "rc" is included in the version name, we'll use the exact rc, otherwise we'll pull the highest non-rc version compatible with the version specified. """ def parse_cl_args(args): def raise_exception(msg): print CL_HELP_MESSAGE raise Exception(msg) if len(args) == 0: raise_exception("Missing INSTALL_DIR") install_dir = args[0] args = args[1:] if len(args) == 0: raise_exception("Missing LINK_DIR") link_dir = args[0] args = args[1:] if len(args) == 0: raise_exception("Missing PLATFORM_AND_ARCH") platform = args[0] args = args[1:] if re.compile(".*\/.*").match(platform) == None: raise_exception("PLATFORM_AND_ARCH isn't of the correct format") if len(args) == 0: raise_exception("Missing VERSION1") versions = args return (MultiVersionDownloader(install_dir, link_dir, platform), versions) def main(): # Listen for SIGUSR1 and dump stack if received. try: signal.signal(signal.SIGUSR1, dump_stacks) except AttributeError: print "Cannot catch signals on Windows" downloader, versions = parse_cl_args(sys.argv[1:]) for version in versions: downloader.download_version(version) if __name__ == '__main__': main()
<filename>test.py #!/usr/bin/env python3 import sys import os import subprocess import shlex def cmd_run_echoed(cmd, **kwargs): print("[CMD] %s" % " ".join(map(shlex.quote, cmd))) cmd = subprocess.run(cmd, **kwargs) if cmd.returncode != 0: print(cmd.stdout.decode('utf-8'), file=sys.stdout) print(cmd.stderr.decode('utf-8'), file=sys.stderr) exit(cmd.returncode) return cmd def test(folder): sim_failed = 0 com_failed = 0 for entry in os.scandir(folder): porth_ext = '.porth' if entry.is_file() and entry.path.endswith(porth_ext): print('[INFO] Testing %s' % entry.path) txt_path = entry.path[:-len(porth_ext)] + ".txt" expected_output = None with open(txt_path, "rb") as f: expected_output = f.read() sim_output = cmd_run_echoed(["./porth.py", "sim", entry.path], capture_output=True).stdout if sim_output != expected_output: sim_failed += 1 print("[ERROR] Unexpected simulation output") print(" Expected:") print(" %s" % expected_output) print(" Actual:") print(" %s" % sim_output) # exit(1) com_output = cmd_run_echoed(["./porth.py", "com", "-r", "-s", entry.path], capture_output=True).stdout if com_output != expected_output: com_failed += 1 print("[ERROR] Unexpected compilation output") print(" Expected:") print(" %s" % expected_output) print(" Actual:") print(" %s" % com_output) # exit(1) print() print("Simulation failed: %d, Compilation failed: %d" % (sim_failed, com_failed)) if sim_failed != 0 or com_failed != 0: exit(1) def record(folder, mode='sim'): for entry in os.scandir(folder): porth_ext = '.porth' if entry.is_file() and entry.path.endswith(porth_ext): output = "" if mode == 'sim': output = cmd_run_echoed(["./porth.py", "sim", entry.path], capture_output=True).stdout elif mode == 'com': output = cmd_run_echoed(["./porth.py", "com", "-r", "-s", entry.path], capture_output=True).stdout else: print("[ERROR] Unknown record mode `%s`" % mode) exit(1) txt_path = entry.path[:-len(porth_ext)] + ".txt" print("[INFO] Saving output to %s" % txt_path) with open(txt_path, "wb") as txt_file: txt_file.write(output) def usage(exe_name): print("Usage: ./test.py [OPTIONS] [SUBCOMMAND]") print("OPTIONS:") print(" -f <folder> Folder with the tests. (Default: ./tests/)") print("SUBCOMMANDS:") print(" test Run the tests. (Default when no subcommand is provided)") print(" record [-com] Record expected output of the tests.") print(" help Print this message to stdout and exit with 0 code.") # TODO: test compiler errors # # It would be better if we had a different format for expected # outcomes of the test cases instead of just plan text files with # stdout. # # Something like a custom file format that contains: # # 1. Expected returncode # 2. Expected stdout # 3. Expected stderr # # This will simplify recording and replaying test cases and reduce the # amount of required flags. # # We could use something like JSON, but in a long term I plan to # rewrite test.py in Porth too, so it has to be something that is easy # to parse even in such a spartan language as Porth. # # I'm thinking about a simple binary format: # # ``` # |1 byte -- expected return code| # |8 bytes -- length of stdout| # |len(stdout) bytes -- the expected stdout encoded as UTF-8| # |8 bytes -- length of stderr| # |len(stderr) bytes -- the expected stderr encoded as UTF-8| # ``` # # Such format is easy to produce/parse in both Porth and Python (using # the bytes). # # Using binary format will also enable us to assert binary outputs of # the test programs. For instances, PPM pictures. if __name__ == '__main__': exe_name, *argv = sys.argv folder = "./tests/" subcmd = "test" while len(argv) > 0: arg, *argv = argv if arg == '-f': if len(argv) == 0: print("[ERROR] no <folder> is provided for option `-f`") exit(1) folder, *argv = argv else: subcmd = arg break if subcmd == 'record': mode = 'sim' while len(argv) > 0: arg, *argv = argv if arg == '-com': mode = 'com' else: print("[ERROR] unknown flag `%s`" % arg) exit(1) record(folder, mode) elif subcmd == 'test': test(folder) elif subcmd == 'help': usage(exe_name) else: usage(exe_name) print("[ERROR] unknown subcommand `%s`" % subcmd, file=sys.stderr) exit(1);
#!/bin/bash mkdir data_nuts_es cp $1 data_nuts_es/ ./sparql-generate.sh -q ../sparql-generate/nuts/nutsES.rqg -i "data_nuts_es/*.json" -v sed -i 's/(((/((/g' data_nuts_es/*.ttl sed -i 's/)))/))/g' data_nuts_es/*.ttl sed -i 's/\(MULTIPOLYGON((\)\([^(].*\)))/\1(\2)))/g' data_nuts_es/*.ttl
egrep "(\*.crit|mail\.[^n][^/]*)" /etc/syslog.conf | sed 's/^[^/]*//' | xargs setfacl --remove-all
/*!40101 SET NAMES utf8 */; # 模块列表 CREATE TABLE IF NOT EXISTS `module_list` ( `m_id` int(11) DEFAULT NULL comment '模块id', `m_name` varchar(255) DEFAULT NULL comment '模块名称', `git_address` varchar(255) DEFAULT NULL comment 'git仓库地址', `server_address` varchar(255) DEFAULT NULL comment '服务器地址', `server_user` varchar(255) DEFAULT NULL comment '服务器账户', `server_password` varchar(255) DEFAULT NULL comment '服务器密码', `directory` varchar(255) DEFAULT NULL comment '映射目录', `u_id` int(11) DEFAULT NULL comment '管理员id', `allot_u_id` int(11) DEFAULT NULL comment '分配人员id', `allot_level` int(11) DEFAULT NULL comment '分配权限', # 1:直接上线 2:需审核后上线 `is_deploy` int(11) DEFAULT NULL comment '是否已经部署', # 0:未部署 1:已部署 `create_time` varchar(20) DEFAULT NULL comment '创建时间' ) ENGINE=InnoDB DEFAULT CHARSET=utf8;
<gh_stars>1-10 import client from 'undefined'; import type {} from '@apollo/client'; import { readable } from 'svelte/store'; import type { Readable } from 'svelte/store'; import gql from 'graphql-tag'; export type Maybe<T> = T | null; export type Exact<T extends { [key: string]: unknown }> = { [K in keyof T]: T[K] }; export type MakeOptional<T, K extends keyof T> = Omit<T, K> & { [SubKey in K]?: Maybe<T[SubKey]> }; export type MakeMaybe<T, K extends keyof T> = Omit<T, K> & { [SubKey in K]: Maybe<T[SubKey]> }; /** All built-in and custom scalars, mapped to their actual values */ export type Scalars = { ID: string; String: string; Boolean: boolean; Int: number; Float: number; /** The `BigDecimal` scalar type represents signed fractional values with arbitrary precision. */ BigDecimal: any; /** The `Date` scalar type represents a date value as specified by [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601). */ Date: any; /** The `DateTime` scalar type represents a date/time value as specified by [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601). */ DateTime: any; }; /** Base quote path - calculation path from base currency to quote currency */ export type BaseQuotePathType = { __typename?: 'BaseQuotePathType'; /** Calculated quote of this path from base currency to quote currency */ pathRate: Scalars['BigDecimal']; /** Ordered list of rates used for this path, with source details */ sourceRates: Array<SourceRateType>; /** Weight of this path in the calculation of the final rate */ weight: Scalars['Float']; }; /** Converted `amount` from `baseCurrency` to a `quoteCurrency` on a specific `date`. */ export type ConvertedAmount = { __typename?: 'ConvertedAmount'; baseAmount: Scalars['BigDecimal']; baseCurrency: Scalars['String']; date: Scalars['Date']; quoteAmount: Scalars['BigDecimal']; quoteCurrency: Scalars['String']; }; /** Currency meta data - ISO 4217 */ export type CurrencyType = { __typename?: 'CurrencyType'; active: Scalars['Boolean']; code: Scalars['String']; decimalDigits: Scalars['Int']; name: Scalars['String']; numericCode?: Maybe<Scalars['String']>; }; /** Quote for a specific date (optionally with meta) */ export type DateQuote = { __typename?: 'DateQuote'; date: Scalars['Date']; meta?: Maybe<RateMetaType>; quote: Scalars['BigDecimal']; }; /** Rate fluctuation of a currency pair (`baseCurrency`/`quoteCurrency`) between two given dates */ export type Fluctuation = { __typename?: 'Fluctuation'; baseCurrency: Scalars['String']; dateFrom: Scalars['Date']; dateTo: Scalars['Date']; fluctuation: Scalars['BigDecimal']; fluctuationPercent: Scalars['BigDecimal']; quoteCurrency: Scalars['String']; rateFrom: Rate; rateTo: Rate; }; export type Query = { __typename?: 'Query'; /** Returns a converted amount for the given baseCurrency/quoteCurrency on a date */ convert: Array<ConvertedAmount>; /** Returns meta data for currencies */ currencies: Array<CurrencyType>; /** Returns the rate fluctuation for the given date range */ fluctuation: Array<Fluctuation>; /** Returns the rates for a specific date */ historical: Array<Rate>; /** Returns the latest rates */ latest: Array<Rate>; /** Returns the time series of rates for the given date range */ timeSeries: Array<TimeSeries>; }; export type QueryConvertArgs = { amount: Scalars['BigDecimal']; baseCurrency: Scalars['String']; date?: Maybe<Scalars['Date']>; quoteCurrencies?: Maybe<Array<Scalars['String']>>; }; export type QueryCurrenciesArgs = { currencyCodes?: Maybe<Array<Scalars['String']>>; includeHistorical?: Maybe<Scalars['Boolean']>; }; export type QueryFluctuationArgs = { baseCurrency?: Maybe<Scalars['String']>; dateFrom: Scalars['Date']; dateTo: Scalars['Date']; quoteCurrencies?: Maybe<Array<Scalars['String']>>; }; export type QueryHistoricalArgs = { baseCurrency?: Maybe<Scalars['String']>; date: Scalars['Date']; quoteCurrencies?: Maybe<Array<Scalars['String']>>; }; export type QueryLatestArgs = { baseCurrency?: Maybe<Scalars['String']>; quoteCurrencies?: Maybe<Array<Scalars['String']>>; }; export type QueryTimeSeriesArgs = { baseCurrency?: Maybe<Scalars['String']>; dateFrom: Scalars['Date']; dateTo: Scalars['Date']; quoteCurrencies?: Maybe<Array<Scalars['String']>>; }; /** Exchange rate (`quote`) from `baseCurrency` to a `quoteCurrency` on a specific `date` (1 `baseCurrency` = `quote` `quoteCurrency`) */ export type Rate = { __typename?: 'Rate'; baseCurrency: Scalars['String']; date: Scalars['Date']; meta?: Maybe<RateMetaType>; quote: Scalars['BigDecimal']; quoteCurrency: Scalars['String']; }; /** Rate meta data - sources and calculation for rate */ export type RateMetaType = { __typename?: 'RateMetaType'; calculated: Scalars['Boolean']; calculation: Array<BaseQuotePathType>; calculationDescription: Scalars['String']; calculationShortDescription: Scalars['String']; rateType: Scalars['String']; sourceIds: Array<Scalars['String']>; sourceNames: Scalars['String']; sourceShortNames?: Maybe<Scalars['String']>; sources: Array<SourceType>; }; /** Source rate - rate directly obtained from currency source */ export type SourceRateType = { __typename?: 'SourceRateType'; baseCurrency: Scalars['String']; date: Scalars['Date']; /** Timestamp when the rate was fetched from the source */ fetched: Scalars['DateTime']; /** If baseCurrency/quoteCurrency from the source were flipped for this calculation */ flipped: Scalars['Boolean']; /** The quote from the source */ quote: Scalars['BigDecimal']; quoteCurrency: Scalars['String']; source: SourceType; sourceId: Scalars['String']; }; /** Currency source information */ export type SourceType = { __typename?: 'SourceType'; id: Scalars['String']; name: Scalars['String']; shortName: Scalars['String']; }; /** Exchange rate time series for a given currency pair */ export type TimeSeries = { __typename?: 'TimeSeries'; baseCurrency: Scalars['String']; quoteCurrency: Scalars['String']; quotes: Array<DateQuote>; };
#!/bin/sh # Download and install vv mkdir /tmp/vv curl -L -H "Cache-Control: no-cache" -o /tmp/vv/vv.zip https://github.com/v2fly/v2ray-core/releases/latest/download/v2ray-linux-64.zip unzip /tmp/vv/vv.zip -d /tmp/vv install -m 755 /tmp/vv/v2ray /usr/local/bin/v2ray install -m 755 /tmp/vv/v2ctl /usr/local/bin/v2ctl # Remove temporary directory rm -rf /tmp/vv # vv new configuration install -d /usr/local/etc/vv cat << EOF > /usr/local/etc/vv/config.json { "inbounds": [ { "port": $PORT, "protocol": "vmess", "settings": { "clients": [ { "id": "$UUID", "alterId": 64 } ], "disableInsecureEncryption": true }, "streamSettings": { "network": "ws" } } ], "outbounds": [ { "protocol": "freedom" } ] } EOF # Run vv /usr/local/bin/v2ray -config /usr/local/etc/vv/config.json
//=============================================================================== // @ CollisionObject.cpp // ------------------------------------------------------------------------------ // Class for managing a simple object that can collide with other objects // // Copyright (C) 2008-2015 by <NAME> and <NAME>. // All rights reserved. // // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. // // This class has three main members: its position, a velocity, and a bounding // sphere. The velocity is used to update the position, and to keep it contained // within a [-1,1] box. The bounding sphere is for testing collision, and is // updated to be centered around the current position. // // When rendering, we use yellow for the normal case, and red if a collision has // been detected. //=============================================================================== //------------------------------------------------------------------------------- //-- Dependencies --------------------------------------------------------------- //------------------------------------------------------------------------------- #include <IvMatrix44.h> #include <IvRendererHelp.h> #include <IvXorshift.h> #include "CollisionObject.h" //------------------------------------------------------------------------------- //-- Static Members ------------------------------------------------------------- //------------------------------------------------------------------------------- static IvXorshift rng(12021966); //------------------------------------------------------------------------------- //-- Methods -------------------------------------------------------------------- //------------------------------------------------------------------------------- //------------------------------------------------------------------------------- // @ CollisionObject::CollisionObject() //------------------------------------------------------------------------------- // Constructor //------------------------------------------------------------------------------- CollisionObject::CollisionObject() { // set random position within [-1,1] box // reduced to make room for bounding sphere radius mPosition.Set( 1.6f*rng.RandomFloat() - 0.8f, 1.6f*rng.RandomFloat() - 0.8f, 1.6f*rng.RandomFloat() - 0.8f ); // set random velocity direction float r; do { mVelocity.Set( 2.0f*rng.RandomFloat()-1.0f, 2.0f*rng.RandomFloat()-1.0f, 2.0f*rng.RandomFloat()-1.0f ); r = mVelocity.LengthSquared(); } while ( r > 1.0f || r == 0.0f ); mVelocity.Normalize(); mVelocity *= 0.75f; // set bounding sphere mSphere.SetCenter( mPosition ); mSphere.SetRadius( 0.2f ); } // End of CollisionObject::CollisionObject() //------------------------------------------------------------------------------- // @ CollisionObject::~CollisionObject() //------------------------------------------------------------------------------- // Destructor //------------------------------------------------------------------------------- CollisionObject::~CollisionObject() { } // End of CollisionObject::~CollisionObject() //------------------------------------------------------------------------------- // @ CollisionObject::Update() //------------------------------------------------------------------------------- // Main update loop //------------------------------------------------------------------------------- void CollisionObject::Update( float dt ) { // update position mPosition += mVelocity*dt; // adjust velocity to keep us inside box float radius = mSphere.GetRadius(); if ( mPosition.x+radius > 1.0f && mVelocity.x > 0.0f ) mVelocity.x = -mVelocity.x; if ( mPosition.x-radius < -1.0f && mVelocity.x < 0.0f ) mVelocity.x = -mVelocity.x; if ( mPosition.y+radius > 1.0f && mVelocity.y > 0.0f ) mVelocity.y = -mVelocity.y; if ( mPosition.y-radius < -1.0f && mVelocity.y < 0.0f ) mVelocity.y = -mVelocity.y; if ( mPosition.z+radius > 1.0f && mVelocity.z > 0.0f ) mVelocity.z = -mVelocity.z; if ( mPosition.z-radius < -1.0f && mVelocity.z < 0.0f ) mVelocity.z = -mVelocity.z; // update sphere mSphere.SetCenter( mPosition ); // make sure we're tagged as not currently colliding // will be updated by ObjectDB mColliding = false; } // End of CollisionObject::Update() //------------------------------------------------------------------------------- // @ CollisionObject::Render() //------------------------------------------------------------------------------- // Render stuff //------------------------------------------------------------------------------- void CollisionObject::Render() { // translate sphere to world position IvMatrix44 xform; xform.Translation( mPosition ); ::IvSetWorldMatrix( xform ); // render in red if colliding, yellow otherwise if (mColliding) IvDrawSphere( mSphere.GetRadius(), kRed ); else IvDrawSphere( mSphere.GetRadius(), kYellow ); } // End of CollisionObject::Render() //------------------------------------------------------------------------------- // @ CollisionObject::HasCollision() //------------------------------------------------------------------------------- // Our simple collision test. // Returns true if the bounding spheres of the two objects are colliding //------------------------------------------------------------------------------- bool CollisionObject::HasCollision( const CollisionObject* other ) const { return mSphere.Intersect( other->mSphere ); }
#!/bin/bash # https://itch.io/docs/butler/installing.html # -L follows redirects # -O specifies output name curl -L -o butler.zip https://broth.itch.ovh/butler/windows-amd64/LATEST/archive/default unzip butler.zip -d ./butler rm butler.zip cd ./butler # GNU unzip tends to not set the executable bit even though it's set in the .zip chmod +x butler # just a sanity check run (and also helpful in case you're sharing CI logs) ./butler -V
#!/bin/bash usage="sress.sh directory" [ $# -lt 1 ] && echo "$usage" && exit 1 echo $0 $1 SAVEIFS=$IFS #IFS=$(echo -en "\n\b") i=0 allfiles=( ) allstat=( ) READPDF=readpdf if [ -f bin/readpdf ] ; then READPDF=bin/readpdf; fi for file in $1/*.pdf do ff=${file} if [[ -e "${ff}" ]] then allfiles[$i]=\"$file\" echo "Testing $file"; ${READPDF} "$ff" if [ $? != 0 ] then allstat[$i]=1 echo -e "\033[0;31m $file Failed!" else allstat[$i]=0 echo -e "\033[0;32m $file Passed!" fi echo -e "\033[0;37m" i=`expr $i + 1` fi done #echo ${allstat[@]} tput bold i=`expr $i - 1` for x in `seq 0 $i` do echo -en "\033[1;37m" echo -n ${allfiles[$x]} ':: ' if [[ ${allstat[$x]} == '0' ]] then echo -e "\033[1;32m PASSED!\033[0;37m" else echo -e "\033[1;31m FAILED!\033[0;37m" fi done tput sgr0 IFS=$SAVEIFS
<reponame>TheButlah/Battlecode-2018 package org.battlecode.bc18.api; import java.util.List; import org.battlecode.bc18.util.Utils; import bc.MapLocation; import bc.Unit; import bc.UnitType; public abstract class ARocket extends AStructure implements MyRocket { @Override public boolean canLaunchRocket(MapLocation destination) { return Utils.gc.canLaunchRocket(getID(), destination); } @Override public void launchRocket(MapLocation destination) { assert canLaunchRocket(destination); Utils.gc.launchRocket(getID(), destination); List<MyRobot> garrisonRobots = getGarrison(); for (MyRobot robot : garrisonRobots) { ((AUnit)robot).informOfDeath(); // Units launched into space are now considered to be dead } informOfDeath(); } //////////END OF API////////// /** * Constructor for ARocket. * @exception RuntimeException Occurs for unknown UnitType, unit already exists, unit doesn't belong to our player. */ protected ARocket(Unit unit) { super(unit); assert unit.unitType() == UnitType.Rocket; } }
import { Test, TestingModule } from '@nestjs/testing'; import { SingupController } from './singup.controller'; import { SingupService } from './singup.service'; describe('SingupController', () => { let controller: SingupController; beforeEach(async () => { const module: TestingModule = await Test.createTestingModule({ controllers: [SingupController], providers: [SingupService], }).compile(); controller = module.get<SingupController>(SingupController); }); it('should be defined', () => { expect(controller).toBeDefined(); }); });
source ../scripts/#kata-scripts.sh source ../104-local-rebase-onto-main/init-functions.sh init-exercise() { cloned-exercise-repo local feature="fuel-estimation" work-on-feature-branch "$feature" git-push-feature-branch "$feature" readme-pushed-to-origin-main git-checkout-feature "$feature" } readme-pushed-to-origin-main() { git-checkout-main commit-rocket-fuel-readme git-push-changes git reset --hard HEAD~1 }
import React from "react"; import ReactDOM from "react-dom"; import TestUtils from "react-addons-test-utils"; import InfinityMenu from "../src/infinity-menu"; import should from "should"; import sinon from "sinon"; import "should-sinon"; describe("shouldComponentUpdate prop", function() { const shouldComponentUpdate = sinon.stub(); let component; let dom; afterEach(function() { ReactDOM.unmountComponentAtNode(ReactDOM.findDOMNode(dom).parentNode); }); it("should call the custom shouldComponentUpdate function", function () { const tree = [ { name: "menu1", id: 1, isOpen: true, children: [ { name: "submenu1", id: 1, isOpen: true, children: [ { name: "item1-1", id: 1 }, { name: "item1-2", id: 2 } ] } ] } ]; component = ( <InfinityMenu tree={tree} shouldComponentUpdate={shouldComponentUpdate} />); dom = TestUtils.renderIntoDocument(component); var searchInputNode = ReactDOM.findDOMNode( TestUtils.scryRenderedDOMComponentsWithClass( dom, "react-infinity-menu-default-search-input" )[0] ); shouldComponentUpdate.should.have.callCount(0); TestUtils.Simulate.click(searchInputNode); shouldComponentUpdate.should.have.callCount(1); }); });
/*! ** bauer-crawler -- Multi-thread crawler engine. ** Copyright (c) 2015 <NAME> <http://yneves.com> ** Licensed under The MIT License <http://opensource.org/licenses/MIT> ** Distributed on <http://github.com/yneves/node-bauer-crawler> */ // - -------------------------------------------------------------------- - // 'use strict'; var bunyan = require('bunyan'); // - -------------------------------------------------------------------- - // module.exports = function (crawler) { var log = bunyan.createLogger({ src: true, name: crawler.name }); crawler.on('error', function (error) { log.trace(error, 'An error happened.'); }); crawler.once('start', function () { log.trace('Crawler is starting...'); }); crawler.once('ready', function () { log.trace('Crawler is ready.'); }); crawler.cluster.once('master', function () { log.trace('Cluster master process initialized.'); crawler.cluster.on('fork', function () { log.trace('Cluster master forked a new worker.'); }); }); crawler.cluster.once('worker', function (worker) { log.trace('Cluster worker process initialized.'); worker.on('message', function (message) { log.trace({ message: message }, 'Message received by worker.'); }); worker.on('request', function (request,response) { log.trace({ request: request }, 'Request received by worker.'); response.on('send', function () { log.trace({ response: this.data }, 'Response sent by worker.'); }); }); worker.on('exit', function () { log.trace('Cluster worker terminated.'); }); }); }; // - -------------------------------------------------------------------- - //
modroot="$(dirname $(readlink -f "${BASH_SOURCE[0]}"))/tools" if [ ! -d "${modroot}" ]; then echo "Module path does not exist: ${modroot}" return 1 fi export OPENROAD=${modroot}/OpenROAD echo "OPENROAD: ${OPENROAD}" export PATH=${modroot}/build/OpenROAD/src:${modroot}/build/yosys/bin:${modroot}/build/LSOracle/build/core:$PATH
<reponame>loganon-build-week/UI-Frontend-Combined import React, {useState, useEffect} from "react"; import axios from "axios"; const listOfPasswords=[] const Form = (props) => { const [state, setState] = useState({ Application: "", EmailAddress: "", Password: "" }) const [pw, setPw] = useState(""); useEffect(() => { setState(props.adjustedValue)}, [props.adjustedValue]) // useEffect(()=>{ // axios.get("http://www.sethcardoza.com/api/rest/tools/random_password_generator/length:12") // .then( pw =>{ // setPw(pw.data) // }) // .catch(error =>{ // return "Error" // }) // },[]) const generate = (action) =>{ action.preventDefault(); axios.get("http://www.sethcardoza.com/api/rest/tools/random_password_generator/length:12") .then( pw =>{ // state.Password = pw.data setPw(pw.data) }) .catch(error =>{ return "Error" }) listOfPasswords.push(pw) if(state.Password === listOfPasswords[listOfPasswords.length-1]){ listOfPasswords.push(state.Password) }else{ state.Password=listOfPasswords[listOfPasswords.length-1] } // document.getElementById('password').value=pw // console.log(state.password) } console.log(listOfPasswords) const change = (action) =>{ setState({ ...state, [action.target.name]: action.target.value }); } const click = (action) =>{ action.preventDefault(); if (props.application === true){ props.editApplication({...state, [action.target.name]: action.target.value}) props.applicationToEdit(false) } else{ props.setList([...props.list, state]) } }; return( <form> <label>Application: </label> <input type="text" name="Application" value={state.Application} onChange={change}/> <label>Email Address</label> <button>Generate</button> <input type="text" name="EmailAddress" value={state.EmailAddress} onChange={change}/> <label>Password</label> <button onClick={generate}>Generate</button> <input type="text" name="Password" id="password" value={state.Password} onChange={change} /> <button type="submit" onClick={click}>Save</button> </form> ); } export default Form;
python run_IAFT.py --model_type roberta --model_name_or_path ./output/ConjNLI --do_train --do_eval --do_lower_case --task_name ConjNLI_Adv --data_dir data/NLI/ --max_seq_length 128 --per_gpu_eval_batch_size=32 --per_gpu_train_batch_size=32 --learning_rate 2e-5 --num_train_epochs 3 --output_dir output/ConjNLI_IAFT
def classify_sentence(sentence): # tokenize the sentence tokens = sentence.split() sentiment_score = 0 for token in tokens: #get sentiment score for each token sentiment_score += get_sentiment_score(token) # classify sentiment if sentiment_score > 0: return 'positive' elif sentiment_score < 0: return 'negative' else: return 'neutral'
/* * Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/monitoring/dashboard/v1/scorecard.proto package com.google.monitoring.dashboard.v1; public interface ScorecardOrBuilder extends // @@protoc_insertion_point(interface_extends:google.monitoring.dashboard.v1.Scorecard) com.google.protobuf.MessageOrBuilder { /** * * * <pre> * Fields for querying time series data from the * Stackdriver metrics API. * </pre> * * <code>.google.monitoring.dashboard.v1.TimeSeriesQuery time_series_query = 1;</code> * * @return Whether the timeSeriesQuery field is set. */ boolean hasTimeSeriesQuery(); /** * * * <pre> * Fields for querying time series data from the * Stackdriver metrics API. * </pre> * * <code>.google.monitoring.dashboard.v1.TimeSeriesQuery time_series_query = 1;</code> * * @return The timeSeriesQuery. */ com.google.monitoring.dashboard.v1.TimeSeriesQuery getTimeSeriesQuery(); /** * * * <pre> * Fields for querying time series data from the * Stackdriver metrics API. * </pre> * * <code>.google.monitoring.dashboard.v1.TimeSeriesQuery time_series_query = 1;</code> */ com.google.monitoring.dashboard.v1.TimeSeriesQueryOrBuilder getTimeSeriesQueryOrBuilder(); /** * * * <pre> * Will cause the scorecard to show a gauge chart. * </pre> * * <code>.google.monitoring.dashboard.v1.Scorecard.GaugeView gauge_view = 4;</code> * * @return Whether the gaugeView field is set. */ boolean hasGaugeView(); /** * * * <pre> * Will cause the scorecard to show a gauge chart. * </pre> * * <code>.google.monitoring.dashboard.v1.Scorecard.GaugeView gauge_view = 4;</code> * * @return The gaugeView. */ com.google.monitoring.dashboard.v1.Scorecard.GaugeView getGaugeView(); /** * * * <pre> * Will cause the scorecard to show a gauge chart. * </pre> * * <code>.google.monitoring.dashboard.v1.Scorecard.GaugeView gauge_view = 4;</code> */ com.google.monitoring.dashboard.v1.Scorecard.GaugeViewOrBuilder getGaugeViewOrBuilder(); /** * * * <pre> * Will cause the scorecard to show a spark chart. * </pre> * * <code>.google.monitoring.dashboard.v1.Scorecard.SparkChartView spark_chart_view = 5;</code> * * @return Whether the sparkChartView field is set. */ boolean hasSparkChartView(); /** * * * <pre> * Will cause the scorecard to show a spark chart. * </pre> * * <code>.google.monitoring.dashboard.v1.Scorecard.SparkChartView spark_chart_view = 5;</code> * * @return The sparkChartView. */ com.google.monitoring.dashboard.v1.Scorecard.SparkChartView getSparkChartView(); /** * * * <pre> * Will cause the scorecard to show a spark chart. * </pre> * * <code>.google.monitoring.dashboard.v1.Scorecard.SparkChartView spark_chart_view = 5;</code> */ com.google.monitoring.dashboard.v1.Scorecard.SparkChartViewOrBuilder getSparkChartViewOrBuilder(); /** * * * <pre> * The thresholds used to determine the state of the scorecard given the * time series' current value. For an actual value x, the scorecard is in a * danger state if x is less than or equal to a danger threshold that triggers * below, or greater than or equal to a danger threshold that triggers above. * Similarly, if x is above/below a warning threshold that triggers * above/below, then the scorecard is in a warning state - unless x also puts * it in a danger state. (Danger trumps warning.) * As an example, consider a scorecard with the following four thresholds: * { * value: 90, * category: 'DANGER', * trigger: 'ABOVE', * }, * { * value: 70, * category: 'WARNING', * trigger: 'ABOVE', * }, * { * value: 10, * category: 'DANGER', * trigger: 'BELOW', * }, * { * value: 20, * category: 'WARNING', * trigger: 'BELOW', * } * Then: values less than or equal to 10 would put the scorecard in a DANGER * state, values greater than 10 but less than or equal to 20 a WARNING state, * values strictly between 20 and 70 an OK state, values greater than or equal * to 70 but less than 90 a WARNING state, and values greater than or equal to * 90 a DANGER state. * </pre> * * <code>repeated .google.monitoring.dashboard.v1.Threshold thresholds = 6;</code> */ java.util.List<com.google.monitoring.dashboard.v1.Threshold> getThresholdsList(); /** * * * <pre> * The thresholds used to determine the state of the scorecard given the * time series' current value. For an actual value x, the scorecard is in a * danger state if x is less than or equal to a danger threshold that triggers * below, or greater than or equal to a danger threshold that triggers above. * Similarly, if x is above/below a warning threshold that triggers * above/below, then the scorecard is in a warning state - unless x also puts * it in a danger state. (Danger trumps warning.) * As an example, consider a scorecard with the following four thresholds: * { * value: 90, * category: 'DANGER', * trigger: 'ABOVE', * }, * { * value: 70, * category: 'WARNING', * trigger: 'ABOVE', * }, * { * value: 10, * category: 'DANGER', * trigger: 'BELOW', * }, * { * value: 20, * category: 'WARNING', * trigger: 'BELOW', * } * Then: values less than or equal to 10 would put the scorecard in a DANGER * state, values greater than 10 but less than or equal to 20 a WARNING state, * values strictly between 20 and 70 an OK state, values greater than or equal * to 70 but less than 90 a WARNING state, and values greater than or equal to * 90 a DANGER state. * </pre> * * <code>repeated .google.monitoring.dashboard.v1.Threshold thresholds = 6;</code> */ com.google.monitoring.dashboard.v1.Threshold getThresholds(int index); /** * * * <pre> * The thresholds used to determine the state of the scorecard given the * time series' current value. For an actual value x, the scorecard is in a * danger state if x is less than or equal to a danger threshold that triggers * below, or greater than or equal to a danger threshold that triggers above. * Similarly, if x is above/below a warning threshold that triggers * above/below, then the scorecard is in a warning state - unless x also puts * it in a danger state. (Danger trumps warning.) * As an example, consider a scorecard with the following four thresholds: * { * value: 90, * category: 'DANGER', * trigger: 'ABOVE', * }, * { * value: 70, * category: 'WARNING', * trigger: 'ABOVE', * }, * { * value: 10, * category: 'DANGER', * trigger: 'BELOW', * }, * { * value: 20, * category: 'WARNING', * trigger: 'BELOW', * } * Then: values less than or equal to 10 would put the scorecard in a DANGER * state, values greater than 10 but less than or equal to 20 a WARNING state, * values strictly between 20 and 70 an OK state, values greater than or equal * to 70 but less than 90 a WARNING state, and values greater than or equal to * 90 a DANGER state. * </pre> * * <code>repeated .google.monitoring.dashboard.v1.Threshold thresholds = 6;</code> */ int getThresholdsCount(); /** * * * <pre> * The thresholds used to determine the state of the scorecard given the * time series' current value. For an actual value x, the scorecard is in a * danger state if x is less than or equal to a danger threshold that triggers * below, or greater than or equal to a danger threshold that triggers above. * Similarly, if x is above/below a warning threshold that triggers * above/below, then the scorecard is in a warning state - unless x also puts * it in a danger state. (Danger trumps warning.) * As an example, consider a scorecard with the following four thresholds: * { * value: 90, * category: 'DANGER', * trigger: 'ABOVE', * }, * { * value: 70, * category: 'WARNING', * trigger: 'ABOVE', * }, * { * value: 10, * category: 'DANGER', * trigger: 'BELOW', * }, * { * value: 20, * category: 'WARNING', * trigger: 'BELOW', * } * Then: values less than or equal to 10 would put the scorecard in a DANGER * state, values greater than 10 but less than or equal to 20 a WARNING state, * values strictly between 20 and 70 an OK state, values greater than or equal * to 70 but less than 90 a WARNING state, and values greater than or equal to * 90 a DANGER state. * </pre> * * <code>repeated .google.monitoring.dashboard.v1.Threshold thresholds = 6;</code> */ java.util.List<? extends com.google.monitoring.dashboard.v1.ThresholdOrBuilder> getThresholdsOrBuilderList(); /** * * * <pre> * The thresholds used to determine the state of the scorecard given the * time series' current value. For an actual value x, the scorecard is in a * danger state if x is less than or equal to a danger threshold that triggers * below, or greater than or equal to a danger threshold that triggers above. * Similarly, if x is above/below a warning threshold that triggers * above/below, then the scorecard is in a warning state - unless x also puts * it in a danger state. (Danger trumps warning.) * As an example, consider a scorecard with the following four thresholds: * { * value: 90, * category: 'DANGER', * trigger: 'ABOVE', * }, * { * value: 70, * category: 'WARNING', * trigger: 'ABOVE', * }, * { * value: 10, * category: 'DANGER', * trigger: 'BELOW', * }, * { * value: 20, * category: 'WARNING', * trigger: 'BELOW', * } * Then: values less than or equal to 10 would put the scorecard in a DANGER * state, values greater than 10 but less than or equal to 20 a WARNING state, * values strictly between 20 and 70 an OK state, values greater than or equal * to 70 but less than 90 a WARNING state, and values greater than or equal to * 90 a DANGER state. * </pre> * * <code>repeated .google.monitoring.dashboard.v1.Threshold thresholds = 6;</code> */ com.google.monitoring.dashboard.v1.ThresholdOrBuilder getThresholdsOrBuilder(int index); public com.google.monitoring.dashboard.v1.Scorecard.DataViewCase getDataViewCase(); }
import { createSelector as ormCreateSelector } from 'redux-orm' import { createSelector } from 'reselect' import { get, includes, isEmpty } from 'lodash/fp' import presentTopic from 'store/presenters/presentTopic' import orm from 'store/models' import { makeGetQueryResults } from 'store/reducers/queryResults' import { FETCH_TOPICS } from 'store/constants' export const MODULE_NAME = 'AllTopics' export const SET_SORT = `${MODULE_NAME}/SET_SORT` export const SET_SEARCH = `${MODULE_NAME}/SET_SEARCH` export const DELETE_COMMUNITY_TOPIC = `${MODULE_NAME}/DELETE_COMMUNITY_TOPIC` export const DELETE_COMMUNITY_TOPIC_PENDING = `${DELETE_COMMUNITY_TOPIC}_PENDING` // Actions export function setSort (sort) { return { type: SET_SORT, payload: sort } } export function setSearch (search) { return { type: SET_SEARCH, payload: search } } export function deleteCommunityTopic (communityTopicId) { return { type: DELETE_COMMUNITY_TOPIC, graphql: { query: `mutation ($id: ID) { deleteCommunityTopic(id: $id) { success } }`, variables: { id: communityTopicId } }, meta: { id: communityTopicId, optimistic: true } } } // Reducer const defaultState = { sort: 'name', search: '' } export default function reducer (state = defaultState, action) { const { error, type, payload } = action if (error) return state switch (type) { case SET_SEARCH: return { ...state, search: payload } case SET_SORT: return { ...state, sort: payload } default: return state } } // Selectors const getTopicsForCurrentUserResults = makeGetQueryResults(FETCH_TOPICS) export const getTopics = ormCreateSelector( orm, state => state.orm, getTopicsForCurrentUserResults, (_, props) => props, (session, results, props) => { if (isEmpty(results) || isEmpty(results.ids)) return [] const topics = session.Topic.all() .filter(x => includes(x.id, results.ids)) .orderBy(x => results.ids.indexOf(x.id)) .toModelArray() return topics.map(topic => presentTopic(topic, props)) } ) export const getTotalTopics = createSelector(getTopicsForCurrentUserResults, get('total')) export const getHasMoreTopics = createSelector(getTopicsForCurrentUserResults, get('hasMore')) export function getSort (state) { return state[MODULE_NAME].sort } export function getSearch (state) { return state[MODULE_NAME].search }
#!/usr/bin/env bash ## Sets VAULT_TOKEN after logging in with ## LDAP passphrase. ## Make sure that these environment variables are set beforehand: ## LDAP_USER= ## LDAP_PASS= ## VAULT_ADDR= export VAULT_TOKEN=$(curl -sk -d "{\"password\": \"${LDAP_PASS}\"}" "${VAULT_ADDR}/v1/auth/ldap/login/${LDAP_USER}" | jq '.auth.client_token' | cut -d'"' -f 2); echo $VAULT_TOKEN; curl -sk --header "X-Vault-Token: ${VAULT_TOKEN}" --request GET "${VAULT_ADDR}/v1/auth/token/lookup-self" | jq '.data.id' ## Get details about the token just generated curl -k --header "X-Vault-Token: ${VAULT_TOKEN}" \ --request GET \ "${VAULT_ADDR}/v1/auth/token/lookup-self" | jq
#!/bin/bash mkdir ./ws #A temporary workspace inside opendistro-build/elasticsearch/linux_distributions ES_VERSION=$(../bin/version-info --es) OD_VERSION=$(../bin/version-info --od) OD_PLUGINVERSION=$OD_VERSION.0 PACKAGE=opendistroforelasticsearch ROOT=$(dirname "$0")/ws TARGET_DIR="$ROOT/Windowsfiles" #Download windowss oss for copying batch files wget https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-oss-$ES_VERSION-windows-x86_64.zip -P $ROOT/ if [ "$?" -eq "1" ] then echo "OSS not available" exit 1 fi #Unzip the oss unzip $ROOT/elasticsearch-oss-$ES_VERSION-windows-x86_64.zip -d $ROOT rm -rf $ROOT/elasticsearch-oss-$ES_VERSION-windows-x86_64.zip #Install plugins for plugin_path in opendistro-sql/opendistro_sql-$OD_PLUGINVERSION.zip opendistro-alerting/opendistro_alerting-$OD_PLUGINVERSION.zip opendistro-job-scheduler/opendistro-job-scheduler-$OD_PLUGINVERSION.zip opendistro-security/opendistro_security-$OD_PLUGINVERSION.zip opendistro-index-management/opendistro_index_management-$OD_PLUGINVERSION.zip do $ROOT/elasticsearch-$ES_VERSION/bin/elasticsearch-plugin install --batch "https://d3g5vo6xdbdb9a.cloudfront.net/downloads/elasticsearch-plugins/$plugin_path" done mv $ROOT/elasticsearch-$ES_VERSION $ROOT/$PACKAGE-$OD_VERSION cd $ROOT #Making zip zip -r odfe-$OD_VERSION.zip $PACKAGE-$OD_VERSION ##Build Exe wget https://download-gcdn.ej-technologies.com/install4j/install4j_unix_8_0_4.tar.gz tar -xzf install4j_unix_8_0_4.tar.gz aws s3 cp s3://odfe-windows/ODFE.install4j . if [ "$?" -eq "1" ] then echo "Install4j not available" exit 1 fi pwd #Build the exe install4j8.0.4/bin/install4jc -d EXE -D sourcedir=./$PACKAGE-$OD_VERSION,version=$OD_VERSION --license="L-M8-AMAZON_DEVELOPMENT_CENTER_INDIA_PVT_LTD#50047687020001-3rhvir3mkx479#484b6" ./ODFE.install4j #upload top S3 aws s3 cp EXE/*.exe s3://artifacts.opendistroforelasticsearch.amazon.com/downloads/odfe-windows/staging/odfe-executable/ aws s3 cp odfe-$OD_VERSION.zip s3://artifacts.opendistroforelasticsearch.amazon.com/downloads/odfe-windows/staging/odfe-window-zip/ aws cloudfront create-invalidation --distribution-id E1VG5HMIWI4SA2 --paths "/downloads/*"
/* eslint-disable no-console, no-unused-vars */ import request from 'request' import colors from 'colors' import chunk from 'lodash/chunk' import fs from 'fs' const projectFiles = [] function headers(publicKey, privateKey) { return { 'Content-Type': 'application/json', 'Authorization': `Uploadcare.Simple ${publicKey}:${privateKey}` } } function unstoreProjectFiles(files, publicKey, privateKey, statsFilePath) { const uuidsToUnstore = chunk(files.map(f => f.uuid), 100) uuidsToUnstore.map((uuidsToUnstoreChunk) => { request.del({ url: 'https://api.uploadcare.com/files/storage/', headers: headers(publicKey, privateKey), body: JSON.stringify(uuidsToUnstoreChunk), }, (err, resp, body) => { }) }) const filesList = {} files.map((f) => filesList[f.hash] = f) const content = JSON.stringify(filesList, null, 2) fs.writeFileSync(statsFilePath, content) } function iterateThroughPage(url, publicKey, privateKey, statsFilePath) { request.get({ url, headers: headers(publicKey, privateKey) }, (err, res, body) => { const { results, next, } = JSON.parse(body) results.map(file => { projectFiles.push({ uuid: file.uuid, hash: file.original_filename }) }) if (next) { console.log('next') iterateThroughPage(next, publicKey, privateKey, statsFilePath) } else { console.log('done') unstoreProjectFiles(projectFiles, publicKey, privateKey, statsFilePath) } }) } export default function unstoreAllFiles(publicKey, privateKey, statsFilePath, options = {}) { if (!publicKey.length || !privateKey.length) { console.log('no private/pubic key were provided, skipping deleting files') return } const { deleteFile = false, limit = 100, } = options const url = `https://api.uploadcare.com/files/?removed=false&stored=true&limit=${limit}` iterateThroughPage(url, publicKey, privateKey, statsFilePath) }
<filename>src/main/java/com/homedepot/bb/DispatcherServlet.java package com.homedepot.bb; import com.homedepot.bb.util.ControllerMapping; import javax.servlet.ServletException; import javax.servlet.annotation.WebServlet; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.io.IOException; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.Map; import java.util.logging.Level; import java.util.logging.Logger; @WebServlet( name = "DispatcherServlet", urlPatterns = {"/mvc/*"} ) public class DispatcherServlet extends HttpServlet { private static final long serialVersionUID = 1L; private static final Logger LOGGER = Logger.getLogger(DispatcherServlet.class.getName()); private static final String RESOURCE_DIR = "/templates/"; private ApplicationContext context; private Map<String, ControllerMapping> controllerMap; @Override public void init() throws ServletException { super.init(); this.context = (ApplicationContext) this.getServletContext().getAttribute(ApplicationContext.APP_CONTEXT_KEY); this.controllerMap = this.context.getControllerMap(); } protected void service(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { String path = request.getPathInfo(); LOGGER.log(Level.INFO, "DispatcherServlet mapping request for " + path); ControllerMapping mapping = controllerMap.get(path); if(mapping != null){ Object instance = mapping.getInstance(); Method method = mapping.getMethod(); if(instance != null && method != null){ try { ModelAndView mav = (ModelAndView) method.invoke(instance, request, response); View view = new View(RESOURCE_DIR + mav.getViewName()); view.render(response, mav.getModel()); } catch (IllegalAccessException | InvocationTargetException e) { LOGGER.log(Level.SEVERE, "Unable to invoke controller method", e); } } } } }
#! /bin/bash . "${TOP_SRCDIR}"/test/testutil.sh die_if_fails ${BUILDDIR}/test-http-parser-upstream exit 0
#!/bin/bash ## REFBOX_DIR="~/rcll-refbox/bin" # SCRIPT_DIR="~/ownCloud/2021/scripts" # ROBVW2_DIR="~/ownCloud/2021/rvw2" # PYTHON_DIR="~/ownCloud/2021/python" SCRIPT_DIR="~/git/btr2021/rcll2021/scripts" ROBVW2_DIR="~/git/btr2021/rcll2021/rvw2" PYTHON_DIR="~/git/btr2021/rcll2021/python" ROBVIEW="robview" # ROBVIEW="robview_interpreter" for PROGNAME in roscore; do killall $PROGNAME done sudo chmod 777 /dev/ttyUSB? # gnome-terminal --geometry=105x56 --window\ xterm -e "bash -c roscore" & xterm -e "sleep 1; cd $SCRIPT_DIR; bash -c $SCRIPT_DIR/rosRcllRefBoxNetwork.sh; bash" & xterm -e "sleep 1; cd $ROBVW2_DIR; bash -c '$ROBVIEW -f $ROBVW2_DIR/ros.rvw2'; bash" & xterm -e "sleep 1; cd $PYTHON_DIR; bash -c 'sleep 2; rosrun rcll_btr_msgs robotino.py'; bash" & xterm -e "sleep 1; cd ~/catkin_ws/src/rplidar_ros/launch; bash -c 'cd ~/catkin_ws/src/rplidar_ros/launch; roslaunch rplidar_a3.launch'; bash" & xterm -e "sleep 2; cd $PYTHON_DIR; bash -c $PYTHON_DIR/btr_rplidar.py; bash" & xterm -e "cd $PYTHON_DIR; bash" & # refbox.py
/* * MX API * The MX Atrium API supports over 48,000 data connections to thousands of financial institutions. It provides secure access to your users' accounts and transactions with industry-leading cleansing, categorization, and classification. Atrium is designed according to resource-oriented REST architecture and responds with JSON bodies and HTTP response codes. Use Atrium's development environment, vestibule.mx.com, to quickly get up and running. The development environment limits are 100 users, 25 members per user, and access to the top 15 institutions. Contact MX to purchase production access. * * OpenAPI spec version: 0.1 * */ package com.mx.atrium; import com.mx.atrium.ApiCallback; import com.mx.atrium.ApiClient; import com.mx.atrium.ApiException; import com.mx.atrium.ApiResponse; import com.mx.atrium.Configuration; import com.mx.atrium.Pair; import com.mx.atrium.ProgressRequestBody; import com.mx.atrium.ProgressResponseBody; import com.google.gson.reflect.TypeToken; import java.io.IOException; import com.mx.model.ConnectWidgetRequestBody; import com.mx.model.ConnectWidgetResponseBody; import java.lang.reflect.Type; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; public class ConnectWidgetApi { private ApiClient apiClient; public ConnectWidgetApi() { this(Configuration.getDefaultApiClient()); } public ConnectWidgetApi(ApiClient apiClient) { this.apiClient = apiClient; } public ApiClient getApiClient() { return apiClient; } public void setApiClient(ApiClient apiClient) { this.apiClient = apiClient; } /** * Build call for getConnectWidget * @param userGuid The unique identifier for a &#x60;user&#x60;. (required) * @param body Optional config options for WebView (is_mobile_webview, current_institution_code, current_member_guid, update_credentials) (required) * @param progressListener Progress listener * @param progressRequestListener Progress request listener * @return Call to execute * @throws ApiException If fail to serialize the request body object */ public com.squareup.okhttp.Call getConnectWidgetCall(String userGuid, ConnectWidgetRequestBody body, final ProgressResponseBody.ProgressListener progressListener, final ProgressRequestBody.ProgressRequestListener progressRequestListener) throws ApiException { Object localVarPostBody = body; // create path and map variables String localVarPath = "/users/{user_guid}/connect_widget_url" .replaceAll("\\{" + "user_guid" + "\\}", apiClient.escapeString(userGuid.toString())); List<Pair> localVarQueryParams = new ArrayList<Pair>(); List<Pair> localVarCollectionQueryParams = new ArrayList<Pair>(); Map<String, String> localVarHeaderParams = new HashMap<String, String>(); Map<String, Object> localVarFormParams = new HashMap<String, Object>(); final String[] localVarAccepts = { "application/vnd.mx.atrium.v1+json" }; final String localVarAccept = apiClient.selectHeaderAccept(localVarAccepts); if (localVarAccept != null) localVarHeaderParams.put("Accept", localVarAccept); final String[] localVarContentTypes = { "application/json" }; final String localVarContentType = apiClient.selectHeaderContentType(localVarContentTypes); localVarHeaderParams.put("Content-Type", localVarContentType); if(progressListener != null) { apiClient.getHttpClient().networkInterceptors().add(new com.squareup.okhttp.Interceptor() { @Override public com.squareup.okhttp.Response intercept(com.squareup.okhttp.Interceptor.Chain chain) throws IOException { com.squareup.okhttp.Response originalResponse = chain.proceed(chain.request()); return originalResponse.newBuilder() .body(new ProgressResponseBody(originalResponse.body(), progressListener)) .build(); } }); } String[] localVarAuthNames = new String[] { "apiKey", "clientID" }; return apiClient.buildCall(localVarPath, "POST", localVarQueryParams, localVarCollectionQueryParams, localVarPostBody, localVarHeaderParams, localVarFormParams, localVarAuthNames, progressRequestListener); } @SuppressWarnings("rawtypes") private com.squareup.okhttp.Call getConnectWidgetValidateBeforeCall(String userGuid, ConnectWidgetRequestBody body, final ProgressResponseBody.ProgressListener progressListener, final ProgressRequestBody.ProgressRequestListener progressRequestListener) throws ApiException { // verify the required parameter 'userGuid' is set if (userGuid == null) { throw new ApiException("Missing the required parameter 'userGuid' when calling getConnectWidget(Async)"); } // verify the required parameter 'body' is set if (body == null) { throw new ApiException("Missing the required parameter 'body' when calling getConnectWidget(Async)"); } com.squareup.okhttp.Call call = getConnectWidgetCall(userGuid, body, progressListener, progressRequestListener); return call; } /** * Embedding in a website * This endpoint will return a URL for an embeddable version of MX Connect. * @param userGuid The unique identifier for a &#x60;user&#x60;. (required) * @param body Optional config options for WebView (is_mobile_webview, current_institution_code, current_member_guid, update_credentials) (required) * @return ConnectWidgetResponseBody * @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body */ public ConnectWidgetResponseBody getConnectWidget(String userGuid, ConnectWidgetRequestBody body) throws ApiException { ApiResponse<ConnectWidgetResponseBody> resp = getConnectWidgetWithHttpInfo(userGuid, body); return resp.getData(); } /** * Embedding in a website * This endpoint will return a URL for an embeddable version of MX Connect. * @param userGuid The unique identifier for a &#x60;user&#x60;. (required) * @param body Optional config options for WebView (is_mobile_webview, current_institution_code, current_member_guid, update_credentials) (required) * @return ApiResponse&lt;ConnectWidgetResponseBody&gt; * @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body */ public ApiResponse<ConnectWidgetResponseBody> getConnectWidgetWithHttpInfo(String userGuid, ConnectWidgetRequestBody body) throws ApiException { com.squareup.okhttp.Call call = getConnectWidgetValidateBeforeCall(userGuid, body, null, null); Type localVarReturnType = new TypeToken<ConnectWidgetResponseBody>(){}.getType(); return apiClient.execute(call, localVarReturnType); } /** * Embedding in a website (asynchronously) * This endpoint will return a URL for an embeddable version of MX Connect. * @param userGuid The unique identifier for a &#x60;user&#x60;. (required) * @param body Optional config options for WebView (is_mobile_webview, current_institution_code, current_member_guid, update_credentials) (required) * @param callback The callback to be executed when the API call finishes * @return The request call * @throws ApiException If fail to process the API call, e.g. serializing the request body object */ public com.squareup.okhttp.Call getConnectWidgetAsync(String userGuid, ConnectWidgetRequestBody body, final ApiCallback<ConnectWidgetResponseBody> callback) throws ApiException { ProgressResponseBody.ProgressListener progressListener = null; ProgressRequestBody.ProgressRequestListener progressRequestListener = null; if (callback != null) { progressListener = new ProgressResponseBody.ProgressListener() { @Override public void update(long bytesRead, long contentLength, boolean done) { callback.onDownloadProgress(bytesRead, contentLength, done); } }; progressRequestListener = new ProgressRequestBody.ProgressRequestListener() { @Override public void onRequestProgress(long bytesWritten, long contentLength, boolean done) { callback.onUploadProgress(bytesWritten, contentLength, done); } }; } com.squareup.okhttp.Call call = getConnectWidgetValidateBeforeCall(userGuid, body, progressListener, progressRequestListener); Type localVarReturnType = new TypeToken<ConnectWidgetResponseBody>(){}.getType(); apiClient.executeAsync(call, localVarReturnType, callback); return call; } }
import asyncio async def do_something(): print('starting something') await asyncio.sleep(2) print('done with something') asyncio.run(do_something())
<reponame>lpellegr/lettusearch<filename>src/main/java/com/redislabs/lettusearch/search/api/SearchCommands.java<gh_stars>1-10 package com.redislabs.lettusearch.search.api; import java.util.List; import java.util.Map; import com.redislabs.lettusearch.search.AddOptions; import com.redislabs.lettusearch.search.Document; import com.redislabs.lettusearch.search.SearchOptions; import com.redislabs.lettusearch.search.SearchResults; /** * Synchronously executed commands for RediSearch search index. * * @param <K> Key type. * @param <V> Value type. * @author <NAME> * @since 1.0 */ public interface SearchCommands<K, V> { String add(K index, Document<K, V> document, AddOptions options); boolean del(K index, K docId, boolean deleteDoc); Map<K, V> get(K index, K docId); List<Map<K, V>> ftMget(K index, K... docIds); SearchResults<K, V> search(K index, V query, Object... options); SearchResults<K, V> search(K index, V query, SearchOptions options); }
#this file takes file name argument and renames it #positional parameters used to store input during running of proram mv $1 $2 cat $2
#!/usr/bin/env bash # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # Script for running the Dantes Socks Proxy set -x set -m [[ -f "/usr/local/sbin/set-dns.sh" ]] && /usr/local/sbin/set-dns.sh [[ -f "/usr/local/sbin/insert-self-into-dns.sh" ]] && /usr/local/sbin/insert-self-into-dns.sh sockd
cd ./game-server && npm install -d echo '============ game-server npm installed ============' cd .. cd ./web-server && npm install -d echo '============ web-server npm installed ============'
<reponame>peacetrue/learn-zookeeper<filename>learn-zookeeper-distributed-id/src/main/java/com/github/peacetrue/learn/zookeeper/DistributedIdApplication.java<gh_stars>0 package com.github.peacetrue.learn.zookeeper; import org.apache.curator.RetryPolicy; import org.apache.curator.framework.CuratorFramework; import org.springframework.boot.autoconfigure.SpringBootApplication; import org.springframework.boot.builder.SpringApplicationBuilder; import org.springframework.context.annotation.Bean; @SpringBootApplication public class DistributedIdApplication { public static void main(String[] args) { new SpringApplicationBuilder(DistributedIdApplication.class).run(args); } @Bean public DistributedIdGenerator distributedIdGenerator(CuratorFramework curatorFramework, RetryPolicy retryPolicy) { return new DistributedIdGenerator("/order", curatorFramework, retryPolicy); } }
<gh_stars>0 import { dispatch } from '@rematch/core'; import React, { Component } from 'react'; import { StyleSheet, Text, View } from 'react-native'; import { Facebook, Logout } from './Button'; export default class UpgradeAccountView extends React.Component { render() { const { canLogout } = this.props; return ( <View style={styles.container}> {!canLogout && ( <Text style={styles.text}> Link your account to access your score and achievements across games and devices. </Text> )} {canLogout && <Logout onPress={this._onLogout}>Log Out</Logout>} {!canLogout && <Facebook onPress={this._onPress}>Link with Facebook</Facebook>} </View> ); } _onPress = () => { dispatch.facebook.upgradeAccount(); }; _onLogout = () => { dispatch.user.logoutAsync(); }; } const styles = StyleSheet.create({ container: { alignItems: 'flex-start', borderTopWidth: StyleSheet.hairlineWidth, borderTopColor: 'gray', paddingTop: 8, marginTop: 24, }, text: { fontSize: 14, marginBottom: 8, }, });
<reponame>Jarunik/sm-team-finder<gh_stars>1-10 package org.slos.rating; import org.slos.splinterlands.domain.monster.ColorType; import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.stream.Collectors; public class RatingContext { private Map<ColorType, ColorPlacements> placementRanks = new HashMap<>(); private Map<ColorType, ColorRank> colorRanks = new HashMap<>(); private Map<Integer, MonsterRank> monsterRanks = new HashMap<>(); public Map<ColorType, ColorPlacements> getPlacementRanks() { return placementRanks; } public Map<ColorType, ColorRank> getColorRanks() { return colorRanks; } public Map<Integer, MonsterRank> getMonsterRanks() { return monsterRanks; } public List<ColorRank> getSortedColorRanks() { List<ColorRank> colorRanksSorted = colorRanks.values().stream().sorted(new ColorRankComparator()).collect(Collectors.toList()); return colorRanksSorted; } @Override public String toString() { return "RatingContext{" + "placementRanks=" + placementRanks + ", colorRanks=" + colorRanks + ", monsterRanks=" + monsterRanks + '}'; } } class ColorRankComparator implements Comparator<ColorRank> { @Override public int compare(ColorRank o1, ColorRank o2) { if (o1.getColorAverage().equals(o2.getColorAverage())) { return 0; } if (o1.getColorAverage() < o2.getColorAverage()) { return 1; } else { return -1; } } }
<reponame>EdwGx/AeroplaneChess class CreateGames < ActiveRecord::Migration def change create_table :games do |t| t.integer :state, default: 0, null: false t.integer :turn, default: 0 t.integer :steps, default: 0, null: false t.integer :stage, default: 0, null: false t.timestamps null: false end end end
#!/bin/bash while true do ../testbed_poll.pl --jfrog_passwd tip-read --jfrog_user tip-read --url http://orch/tip/testbeds/nola-basic-01/pending_work/ sleep 120 done
#ifndef RENDERER_H #define RENDERER_H #include "stdfx.h" #include "IRenderPass.h" #include "CommandQueue.h" #include <Timer.h> namespace Zephyr { namespace Graphics { const int FRAME_BUFFER_COUNT = 3; // triple buffering class GraphicsEngine; class ZEPHYR_GRAPHICS_API Renderer { public: Renderer(GraphicsEngine* pEngine); virtual ~Renderer(); // function declarations bool initialize(unsigned int backBufferWidth, unsigned int backBufferHeight, HWND& hwnd, bool bFullscreen); // initializes direct3d 12 // Render Pass management void addRenderPass(const std::string& renderPassName, IRenderPass* pRenderPass); // update the direct3d pipeline (update command lists) // automatically create new command list when queue index is higher current command list count bool enqueuRenderPass(const std::string& renderPassName, const int queueIndex); void clearRenderPassQueue(const int queueIndex); std::shared_ptr<Graphics::IRenderPass> getRenderPass(const std::string& renderPassName); bool enqueueUIRenderPass(const std::string& renderPassName, const int queueIndex); void clearUIRenderPassQueue(const int queueIndex); void render(); // execute the command list void waitForPreviousFrame(); // wait until gpu is finished with command list public: // accessor bool isRunning() const; SharedPtr<ID3D12Device> getDevice() const; SharedPtr<ID3D12DescriptorHeap> getDescriptionHeap() const; int getFrameIndex() const; int getRtvDescriptorSize() const; //SharedPtr<ID3D12CommandQueue> getCommandQueue() const; //std::shared_ptr<CommandList> getCommandList(const int Id) const; //int getCommandListCount() const; std::vector<ID3D12Resource*> getRenderTargets() const; DXGI_SAMPLE_DESC getSampleDesc() const; protected: bool createDevice(); bool createCommandQueue(); bool createUICommandQueue(); bool createSwapChain(HWND& hwnd, bool bFullScreen); bool createRenderTargetView(); bool createFence(); void cleanup(); // release com ojects and clean up memory private: GraphicsEngine* mpEngine; bool bIsRunning; unsigned int mBackBufferWidth, mBackBufferHeight; SharedPtr<IDXGIFactory4> mpDxgiFactory; SharedPtr<ID3D12Device> mpDevice; // direct3d device SharedPtr<IDXGISwapChain3> mpSwapChain; // swapchain used to switch between render targets SharedPtr<ID3D12DescriptorHeap> mpRtvDescriptorHeap; // a descriptor heap to hold resources like the render targets std::vector<ID3D12Resource*> mRenderTargets; // number of render targets equal to buffer count std::vector<std::shared_ptr<CommandQueue>> mCommandQueues; // std::vector<std::shared_ptr<CommandQueue>> mUICommandQueues; // Command queue for UI render, ie this is rendered last int mFrameIndex; // current rtv we are on int mRtvDescriptorSize; // size of the rtv descriptor on the device (all front and back buffers will be the same size) std::vector<std::shared_ptr<Fence>> mFences; // std::unordered_map <std::string, std::shared_ptr<IRenderPass>> mRenderPassMap; // storage for Render Pass DXGI_SAMPLE_DESC mSampleDesc; Common::Timer mTimer; }; } } #endif
#!/bin/bash feat_dir=$PWD/data/mfcc_features stage=1 # Assuming the stage variable is defined elsewhere if [ $stage -le 1 ]; then for set in test dev train; do dir=data/$set steps/make_mfcc.sh --nj 20 --cmd "$train_cmd" $dir $dir/log $dir/data || exit 1 done fi
from datamodels.validation import metrics def evaluate_model_accuracy(model, test_data, actual_results): predicted_results = model.predict(test_data) accuracy = metrics.calculate_accuracy(predicted_results, actual_results) return accuracy
<gh_stars>1-10 class CreateUser < SeriousBusiness::Action att :name, presence: true, length: { minimum: 3 } def execute user_params = form_model.attributes.merge(role: :unprivileged) new_user = User.create!(user_params) [new_user] end end
#!/usr/bin/env bash while getopts "e:f:d:" opt; do case $opt in f) pidFile="$OPTARG" esac done if [ -z "$pidFile" ]; then printf "No pidFile was provided. Provide one with -f." exit fi sudo rm "$pidFile" sudo touch "$pidFile" DIR="$(dirname "${BASH_SOURCE[0]}")" cd "$DIR" || exit if [ "$(git rev-parse --abbrev-ref HEAD)" = "master" ]; then vapor run --release=true --env=production & echo $! > "$pidFile" else vapor run --release=false --env=development & echo $! > "$pidFile" fi
class CircleCI::Build attr_reader :job_name, :build_number, :status, :project def initialize(hash, project) @job_name = hash.dig 'workflows', 'job_name' @build_number = hash['build_num'] @status = hash['status'] @project = project end def artifacts api_path = [ 'project', project.vcs_type, project.organization, project.name, build_number, 'artifacts' ].join '/' client = CircleCI::Client.instance response = client.get(api_path) response.collect { |artifact| CircleCI::Artifact.new artifact } end end
package state import ( "encoding/json" "math/rand" "testing" "github.com/nspcc-dev/neo-go/pkg/core/transaction" "github.com/nspcc-dev/neo-go/pkg/internal/random" "github.com/nspcc-dev/neo-go/pkg/internal/testserdes" "github.com/nspcc-dev/neo-go/pkg/util" "github.com/stretchr/testify/require" ) func testStateRoot() *MPTRoot { return &MPTRoot{ MPTRootBase: MPTRootBase{ Version: byte(rand.Uint32()), Index: rand.Uint32(), PrevHash: random.Uint256(), Root: random.Uint256(), }, } } func TestStateRoot_Serializable(t *testing.T) { r := testStateRoot() testserdes.EncodeDecodeBinary(t, r, new(MPTRoot)) t.Run("WithWitness", func(t *testing.T) { r.Witness = &transaction.Witness{ InvocationScript: random.Bytes(10), VerificationScript: random.Bytes(11), } testserdes.EncodeDecodeBinary(t, r, new(MPTRoot)) }) } func TestStateRootEquals(t *testing.T) { r1 := testStateRoot() r2 := *r1 require.True(t, r1.Equals(&r2.MPTRootBase)) r2.MPTRootBase.Index++ require.False(t, r1.Equals(&r2.MPTRootBase)) } func TestMPTRootState_Serializable(t *testing.T) { rs := &MPTRootState{ MPTRoot: *testStateRoot(), Flag: 0x04, } rs.MPTRoot.Witness = &transaction.Witness{ InvocationScript: random.Bytes(10), VerificationScript: random.Bytes(11), } testserdes.EncodeDecodeBinary(t, rs, new(MPTRootState)) } func TestMPTRootStateUnverifiedByDefault(t *testing.T) { var r MPTRootState require.Equal(t, Unverified, r.Flag) } func TestMPTRoot_MarshalJSON(t *testing.T) { t.Run("Good", func(t *testing.T) { r := testStateRoot() rs := &MPTRootState{ MPTRoot: *r, Flag: Verified, } testserdes.MarshalUnmarshalJSON(t, rs, new(MPTRootState)) }) t.Run("Compatibility", func(t *testing.T) { js := []byte(`{ "flag": "Unverified", "stateroot": { "version": 1, "index": 3000000, "prehash": "0x4f30f43af8dd2262fc331c45bfcd9066ebbacda204e6e81371cbd884fe7d6c90", "stateroot": "0xb2fd7e368a848ef70d27cf44940a35237333ed05f1d971c9408f0eb285e0b6f3" }}`) rs := new(MPTRootState) require.NoError(t, json.Unmarshal(js, &rs)) require.EqualValues(t, 1, rs.Version) require.EqualValues(t, 3000000, rs.Index) require.Nil(t, rs.Witness) u, err := util.Uint256DecodeStringLE("4f30f43af8dd2262fc331c45bfcd9066ebbacda204e6e81371cbd884fe7d6c90") require.NoError(t, err) require.Equal(t, u, rs.PrevHash) u, err = util.Uint256DecodeStringLE("b2fd7e368a848ef70d27cf44940a35237333ed05f1d971c9408f0eb285e0b6f3") require.NoError(t, err) require.Equal(t, u, rs.Root) }) }
<filename>packages/coinstac-pipeline/src/io-store.js<gh_stars>10-100 const backingStore = {}; const init = (storeKey) => { if (!backingStore[storeKey]) backingStore[storeKey] = {}; const store = backingStore[storeKey]; const put = (group, id, key) => { if (store[group]) { store[group][id] = key; } else { store[group] = { [id]: key }; } }; const get = (group, id) => { return store[group] ? store[group][id] : undefined; }; const getGroup = (group) => { if (!store[group]) return undefined; return Object.keys(store[group]).reduce((filtered, id) => { if (store[group][id]) filtered[id] = store[group][id]; return filtered; }, {}); }; const group = (group) => { if (store[group]) { return Object.keys(store[group]).reduce((array, member) => { if (store[group][member]) array.push(member); return array; }, []); } return []; }; const has = (group, id) => { if (store[group]) { return store[group][id] !== undefined; } return false; }; const remove = (group, id) => { if (store[group]) store[group][id] = undefined; }; const removeGroup = (group) => { store[group] = undefined; }; const getAndRemove = (group, id) => { const i = get(group, id); remove(group, id); return i; }; const getAndRemoveGroup = (group) => { const g = getGroup(group); removeGroup(group); return g; }; return { put, get, getAndRemove, getGroup, getAndRemoveGroup, group, has, remove, removeGroup, }; }; module.exports = { init, };
#!/bin/bash ######################################################################### # Exercício 3 - Criando e Executando Scripts # # # # Nome: RelatorioUsuario.sh # # # # Autor: Ricardo Prudenciato (ricardo@linuxsemfronteiras.com.br) # # Data: DD/MM/AAAA # # # # Descrição: O script gera um relatório com informações de um # # usuário do sistema passado como parâmetro # # # # Uso: ./RelatorioUsuario.sh <usuario> # # # ######################################################################### #ls /home/$1 > /dev/null 2>&1 || echo "Usuario Inexistente" #ls /home/$1 > /dev/null 2>&1 || exit 1 ls /home/$1 > /dev/null 2>&1 || { echo "Usuario Inexistente" ; exit 1; } USERID=$(grep $1 /etc/passwd|cut -d":" -f3) DESC=$(grep $1 /etc/passwd|cut -d":" -f5 | tr -d ,) USOHOME=$(du -sh /home/$1|cut -f1) clear echo "==========================================================================" echo "Relatório do Usuário: $1" echo echo "UID: $USERID" echo "Nome ou Descrição: $DESC" echo echo "Total Usado no /home/$1: $USOHOME" echo echo "Ultimo Login:" lastlog -u $1 echo "==========================================================================" exit 0
#!/usr/bin/bash IMAGE_DIR="./rendered" PREFIX=$1 # e.g. PREFIX=test-sequence1 --> rendered/test-sequence1/test-sequence1-0000.hdr [...] EXTENSION=$2 OUTPUT=$3 DEFAULT_EXTENSION=".png" DEFAULT_OUTPUT="${PREFIX}.mp4" # check PREFIX if [ "${PREFIX}" == "" ]; then echo "Error: No prefix defined!" exit 1 fi # check EXTENSION if [ "${EXTENSION}" == "" ]; then echo "No extension defined (using default)" EXTENSION=".hdr" elif [[ ! "${EXTENSION}" == *"." ]]; then EXTENSION=".${EXTENSION}" fi # check OUTPUT if [ "${OUTPUT}" == "" ]; then echo "No output file defined (using default)" OUTPUT=${DEFAULT_OUTPUT} fi # print parameters echo "" echo " ==> Prefix: ${PREFIX}" echo " ==> Extension: ${EXTENSION}" echo " ==> Output: ${OUTPUT}" echo " ==> Directory: ${IMAGE_DIR}/${PREFIX}/" echo "" echo " ====> ${IMAGE_DIR}/${PREFIX}/${PREFIX}-%05d.png" echo "" F_PREFIX="${IMAGE_DIR}/${PREFIX}/${PREFIX}" # convert hdr files to png (TODO: ffmpeg HDR support?) if [[ "${EXTENSION}" == *".hdr" ]]; then echo "Converting HDR images to PNG..." echo "" # get last file in sequence i=0000 while true; do if test -f "${F_PREFIX}-${i}${EXTENSION}"; then i=$((10#$i+1)) i=`printf "%05d" $i` elif [ $i == 0 ]; then echo "Error: no image files with provided prefix! ( ${F_PREFIX}-${i}${EXTENSION} )" exit 1 else i=$((10#$i-1)) i=`printf "%05d" $i` LAST_FILE=`ls ${F_PREFIX}-${i}${EXTENSION}` echo "Last file: in sequence: ${LAST_FILE}" break fi done NUM=${LAST_FILE#"${PREFIX}-"} # remove prefix from filename echo "$NUM" NUM=${NUM//".hdr"/} # ${EXTENSION#"."}} # remove extension from filename echo "$NUM" NUM=$((10#$NUM)) START=0 END=`printf "%05d" ${NUM}` for i in `seq $START $END`; do i=`printf "%05d" $i` if test -f "${F_PREFIX}-${i}.png"; then echo " ==> (found ${F_PREFIX}-${i}.png)" else CMD="convert ${F_PREFIX}-${i}${EXTENSION} ${F_PREFIX}-${i}.png" echo " ==> $CMD" convert ${F_PREFIX}-${i}${EXTENSION} ${F_PREFIX}-${i}.png fi done EXTENSION=".png" fi CODEC="libx265" # lossy #CMD="ffmpeg -i ${F_PREFIX}-%05d${EXTENSION}.png -c:v libx264 -vf fps=${FPS} -pix_fmt yuv420p ${IMAGE_DIR}/${OUTPUT}" # lossless CMD="ffmpeg -i ${F_PREFIX}-%05d${EXTENSION} -c:v $CODEC -vf fps=$FPS -s 1920x1080 -pix_fmt yuv420p10le -preset veryslow $IMAGE_DIR/$OUTPUT" echo "" echo "Making video..." echo " ==> ${CMD}" echo "" echo "" eval $CMD
/** * Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.analytics.math.statistics.descriptive; import org.apache.commons.lang.Validate; import com.opengamma.analytics.math.function.Function1D; /** * Calculates the population standard deviation of a series of data. The population standard deviation of a series of data is defined as the square root of * the population variance (see {@link PopulationVarianceCalculator}). */ public class PopulationStandardDeviationCalculator extends Function1D<double[], Double> { private static final Function1D<double[], Double> VARIANCE = new PopulationVarianceCalculator(); /** * @param x The array of data, not null, must contain at least two data points * @return The population standard deviation */ @Override public Double evaluate(final double[] x) { Validate.notNull(x, "x"); Validate.isTrue(x.length > 1, "Need at least two points to calculate standard deviation"); return Math.sqrt(VARIANCE.evaluate(x)); } }
<filename>examples/main.cpp #include "main.hpp" int main() { std::thread basicPhongThread(&basicPhongMain); std::thread advancedLightingThread(&advancedLightingMain); basicPhongThread.join(); advancedLightingThread.join(); return 0; }
define(['utilities/dom', 'utilities/efence'], function(dom, pubsub) { var styleLinks = dom.qsa('link'), themes = [], themeManager = {}, current; styleLinks = Array.prototype.filter.call(styleLinks, function(link) { return link.rel === 'alternate stylesheet' || link.rel === 'stylesheet'; }); Array.prototype.forEach.call(styleLinks, function(link) { themes.push({ name: link.getAttribute('data-name'), display: link.getAttribute('data-display') }); }); themes.sort(function(x, y) { return x.display < y.display ? -1 : 1; }); current = themes[0].name; themeManager.changeTheme = function(name) { Array.prototype.forEach.call(styleLinks, function(link, index) { if (link.getAttribute('data-name') === name) { link.disabled = false; } else { link.disabled = 'disabled'; } }); current = name; } themeManager.getCurrent = function() { return current; } themeManager.themes = themes; return themeManager; });
<gh_stars>0 import diefpy.dief as diefpy traces_email_enron = diefpy.load_trace("/Users/juan/Projects/upc/upc-miri-tfm/connected-comp/results/diefpy/email-Enron.csv") diefpy.plot_answer_trace(traces_email_enron, "email-Enron",["#ECC30B","#D56062"]).show() metrics_enron = diefpy.load_metrics("/Users/juan/Projects/upc/upc-miri-tfm/connected-comp/results/diefpy/email-Enron-metrics.csv") exp1 = diefpy.experiment1(traces_email_enron, metrics_enron) diefpy.plotExperiment1Test(exp1, 'email-Enron', ["#ECC30B","#D56062"]).show() traces_ca_astroph = diefpy.load_trace("/Users/juan/Projects/upc/upc-miri-tfm/connected-comp/results/diefpy/ca-AstroPh.csv") diefpy.plot_answer_trace(traces_ca_astroph, "ca-AstroPh",["#ECC30B","#D56062"]).show() metrics_ca_astroph = diefpy.load_metrics("/Users/juan/Projects/upc/upc-miri-tfm/connected-comp/results/diefpy/ca-AstroPh-metrics.csv") exp2 = diefpy.experiment1(traces_ca_astroph, metrics_ca_astroph) diefpy.plotExperiment1Test(exp2, 'ca-AstroPh', ["#ECC30B","#D56062"]).show() traces_web_google = diefpy.load_trace("/Users/juan/Projects/upc/upc-miri-tfm/connected-comp/results/diefpy/web-Google.csv") diefpy.plot_answer_trace(traces_web_google, "web-Google",["#ECC30B","#D56062"]).show() metrics_web_google = diefpy.load_metrics("/Users/juan/Projects/upc/upc-miri-tfm/connected-comp/results/diefpy/web-Google-metrics.csv") exp2 = diefpy.experiment1(traces_web_google, metrics_web_google) diefpy.plotExperiment1Test(exp2, 'web-Google', ["#ECC30B","#D56062"]).show()
const pdf = require('pdf-creator-node'); // Create a document const document = { content: [ { text: 'Hello World', fontSize: 25 }, ], defaultStyle: { font: 'Times New Roman' } } // Create a PDF const pdfDoc = pdf.createPdf(document); // Save the PDF pdfDoc.write('example.pdf');
import AlertBox from './AlertBox' export default AlertBox
query { allArticles(orderBy: [{createdAt: "desc"}]) { title text createdAt } }
#!/bin/bash # # # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # # set -o xtrace SCRIPTDIR=$(cd $(dirname "$0") && pwd) HOMEDIR="$SCRIPTDIR/../../../" # clone OpenWhisk repo. in order to run scanCode.py cd $HOMEDIR git clone https://github.com/apache/incubator-openwhisk-utilities.git
<gh_stars>0 package com.meterware.servletunit; /******************************************************************************************************************** * $Id: RequestDispatcherImpl.java 482 2003-02-27 18:40:39Z russgold $ * * Copyright (c) 2002-2003, <NAME> * * Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated * documentation files (the "Software"), to deal in the Software without restriction, including without limitation * the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and * to permit persons to whom the Software is furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all copies or substantial portions * of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO * THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF * CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER * DEALINGS IN THE SOFTWARE. * *******************************************************************************************************************/ import java.io.IOException; import java.net.URL; import javax.servlet.RequestDispatcher; import javax.servlet.ServletRequest; import javax.servlet.ServletResponse; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; /** * * @author <a href="mailto:<EMAIL>"><NAME></a> **/ class RequestDispatcherImpl extends RequestContext implements RequestDispatcher { private ServletMetaData _servletMetaData; RequestDispatcherImpl( WebApplication application, URL url ) throws ServletException { super( url ); _servletMetaData = application.getServletRequest( url ); } public ServletMetaData getServletMetaData() { return _servletMetaData; } public void forward( ServletRequest request, ServletResponse response ) throws ServletException, IOException { response.reset(); _servletMetaData.getServlet().service( DispatchedRequestWrapper.createForwardRequestWrapper( (HttpServletRequest) request, this ), response ); } public void include( ServletRequest request, ServletResponse response ) throws ServletException, IOException { _servletMetaData.getServlet().service( DispatchedRequestWrapper.createIncludeRequestWrapper( (HttpServletRequest) request, this ), response ); } }
#!/usr/bin/python # coding=utf-8 # ^^ because https://www.python.org/dev/peps/pep-0263/ from __future__ import division import codecs import csv import xml.etree.cElementTree as ET import json import nltk from nltk import sent_tokenize from nltk import word_tokenize # It is okay to include tokenization and symbols in the average word size count. # Use the Thorn character (þ) to separate the fields # Be sure to include coding=utf-8 to the first or second line # All the output can be printed to the screen # Note, when printing to a non-unicode terminal or using linux to write to a file # http://stackoverflow.com/questions/4545661/unicodedecodeerror-when-redirecting-to-file # You may need to download a corpus in nltk using the nltk.download() command. # If you are having trouble completing feel free to post a message on the forum. # Usage: PYTHONIOENCODING=UTF-8 python process-data.py > output.txt with codecs.open("twitter.txt", encoding='utf-8') as f: # Your code here count = 0 root = ET.Element("document") csv_file = open('twitter.csv', 'wb') sent_tokens= sent_tokenize(f.read()) sents = ET.SubElement(root, "sentences") data = {} document = {} sentences = [] for sent in sent_tokens: ET.SubElement(sents, "sentence id").text = str(iter) ET.SubElement(sents, "text").text = sent word_tot = 0 if sent.rstrip(): word_tokens= word_tokenize(sent) for words in word_tokens: word_tot += len(words) avg_length = word_tot/len(word_tokens) final_out = str(count) + u"\u00FE" + sent+ u"\u00FE" + str(avg_length) csv_write = csv.writer(csv_file,delimiter= '\xfe',quotechar = '"') row = [] sentences.append({"sentence id" : count, "text" : sent,"avg":avg_length}) count +=1; row.append(final_out.strip().encode("utf-8"))#write to csv file ET.SubElement(sents, "avg").text = str(avg_length) data["sentences"] = sentences document["documents"] = data csv_write.writerow(row) tree = ET.ElementTree(root) tree.write("twitter.xml",encoding="utf-8", xml_declaration=True)#Write to xml file with open('twitterjson.txt', 'w') as outfile: json.dump(document, outfile,indent=2)#Write to json file pass
<filename>test/data-structures/trees/binary-search-tree.test.ts<gh_stars>100-1000 import BinarySearchTree from '../../../src/data-structures/trees/binary-search-tree' import TreeNode from '../../../src/data-structures/trees/binary-search-tree/tree-node' describe('Binary Search Tree', () => { let tree: BinarySearchTree<number> beforeEach(() => { tree = new BinarySearchTree() }) describe('Inspection', () => { it('size()', () => { expect(tree.size()).toBe(0) }) it('isEmpty()', () => { expect(tree.isEmpty()).toBe(true) tree.insert(8) expect(tree.isEmpty()).toBe(false) }) it('height()', () => { // Tree should look like: // 10 // 5 15 // 2 12 21 // 1 // No tree expect(tree.height()).toBe(0) // Layer One tree.insert(10) expect(tree.height()).toBe(1) // Layer Two tree.insert(5) expect(tree.height()).toBe(2) tree.insert(15) expect(tree.height()).toBe(2) // Layer Three tree.insert(2) expect(tree.height()).toBe(3) tree.insert(12) expect(tree.height()).toBe(3) tree.insert(21) expect(tree.height()).toBe(3) // Layer 4 tree.insert(1) expect(tree.height()).toBe(4) }) }) describe('Searching', () => { const treeB = new BinarySearchTree<number>() const a = new TreeNode(5, null) const b = new TreeNode(4, a) const c = new TreeNode(3, b) const d = new TreeNode(2, c) const e = new TreeNode(1, d) const f = new TreeNode(6, a) const g = new TreeNode(7, f) const h = new TreeNode(8, g) a.left = b b.left = c c.left = d d.left = e a.right = f f.right = g g.right = h treeB.root = a it('find()', () => { expect(treeB.find(5)).toBe(a) expect(treeB.find(4)).toBe(b) expect(treeB.find(3)).toBe(c) expect(treeB.find(2)).toBe(d) expect(treeB.find(1)).toBe(e) expect(treeB.find(6)).toBe(f) expect(treeB.find(7)).toBe(g) expect(treeB.find(8)).toBe(h) }) it('findMin()', () => { expect(treeB.findMin()).toBe(e) }) it('findMax()', () => { expect(treeB.findMax()).toBe(h) }) it('findSucessor()', () => { expect(treeB.findSucessor(a)).toBe(f) expect(treeB.findSucessor(e)).toBe(d) expect(treeB.findSucessor(f)).toBe(g) const treeC = new BinarySearchTree<number>() const m = new TreeNode(5, null) const n = new TreeNode(3, m) const o = new TreeNode(2, n) const p = new TreeNode(1, o) const q = new TreeNode(4, n) m.left = n n.left = o o.left = p n.right = q treeC.root = m expect(treeC.findSucessor(q)).toBe(m) }) it('findPredecessor()', () => { expect(treeB.findPredecessor(a)).toBe(b) expect(treeB.findPredecessor(e)).toBe(null) expect(treeB.findPredecessor(f)).toBe(a) }) }) describe('Insertion/Deletion', () => { it('insert()', () => { tree.insert(5) expect(tree.size()).toBe(1) tree.insert(3) expect(tree.size()).toBe(2) tree.insert(2) expect(tree.size()).toBe(3) tree.insert(6) expect(tree.size()).toBe(4) tree.insert(9) expect(tree.size()).toBe(5) tree.insert(4) expect(tree.size()).toBe(6) }) it('remove()', () => { tree.remove(tree.insert(3)) expect(tree.size()).toBe(0) expect(tree.find(3)).toBe(null) tree.insert(5) tree.insert(8) tree.remove(tree.insert(3)) expect(tree.size()).toBe(2) expect(tree.find(3)).toBe(null) }) it('remove() node with left child', () => { const a = new TreeNode(5, null) const b = new TreeNode(3, null) a.left = b tree.root = a tree.remove(a) expect(tree.find(5)).toBe(null) }) it('remove() node with two children', () => { const a = new TreeNode(5, null) const b = new TreeNode(3, null) const c = new TreeNode(8, null) a.left = b a.right = c tree.root = a tree.remove(a) expect(tree.find(5)).toBe(null) }) it('remove() node with two children and successor is not immediate right child', () => { const a = new TreeNode(5, null) const b = new TreeNode(3, null) const c = new TreeNode(8, null) const d = new TreeNode(7, null) a.left = b a.right = c c.left = d tree.root = a tree.remove(a) expect(tree.find(5)).toBe(null) }) }) describe('Traversals', () => { const treeB = new BinarySearchTree<number>() const a = new TreeNode(5, null) const b = new TreeNode(4, a) const c = new TreeNode(3, b) const d = new TreeNode(2, c) const e = new TreeNode(1, d) const f = new TreeNode(6, a) const g = new TreeNode(7, f) const h = new TreeNode(8, g) a.left = b b.left = c c.left = d d.left = e a.right = f f.right = g g.right = h treeB.root = a it('inorder()', () => { for (const _ of tree.inorderTraversal()) { throw new Error() } const inorderNumbers = [1, 2, 3, 4, 5, 6, 7, 8] let i = 0 for (const n of treeB.inorderTraversal()) { expect(n).toBe(inorderNumbers[i]) i += 1 } }) it('preorder()', () => { for (const _ of tree.preorderTraversal()) { throw new Error() } // Tree should look like: // 10 // 5 15 // 2 12 21 // 1 // Layer One tree.insert(10) // Layer Two tree.insert(5) tree.insert(15) // Layer Three tree.insert(2) tree.insert(12) tree.insert(21) // Layer 4 tree.insert(1) const preorderNumbers = [10, 5, 2, 1, 15, 12, 21] let i = 0 for (const n of tree.preorderTraversal()) { expect(n).toBe(preorderNumbers[i]) i += 1 } }) it('postorder()', () => { for (const _ of tree.postorderTraversal()) { throw new Error() } // Tree should look like: // 10 // 5 15 // 2 12 21 // 1 // Layer One tree.insert(10) // Layer Two tree.insert(5) tree.insert(15) // Layer Three tree.insert(2) tree.insert(12) tree.insert(21) // Layer 4 tree.insert(1) const postorderNumbers = [1, 2, 5, 12, 21, 15, 10] let i = 0 for (const n of tree.postorderTraversal()) { expect(n).toBe(postorderNumbers[i]) i += 1 } }) }) })
<reponame>zoho/Zoho-CRM-Field-Buddy package com.zoho.crm_field_buddy; import android.app.ProgressDialog; import android.os.AsyncTask; import android.os.Bundle; import android.support.v7.app.AppCompatActivity; import android.view.MenuItem; import android.widget.TextView; import com.zoho.crm_field_buddy.listActivity.ListViewAdapter; import com.zoho.crm.library.crud.ZCRMModule; import com.zoho.crm.library.crud.ZCRMRecord; import com.zoho.crm.sdk.android.zcrmandroid.common.SDKCommonUtil; /** * Created by sruthi-4404 on 08/09/16. */ public class JobCardsViewHandler extends AppCompatActivity { private ZCRMRecord zcrmRecord; ProgressDialog dialog; public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.survey_view); getSupportActionBar().setDisplayHomeAsUpEnabled(true); getSupportActionBar().setTitle(ListViewAdapter.title); setForm(); } public void setForm() { dialog = ProgressDialog.show(JobCardsViewHandler.this, "", "Loading. Please wait...", true); //No I18N APImodeRunner runner = new APImodeRunner(); runner.execute(); } public void loadForm() throws Exception { TextView subj = (TextView) findViewById(R.id.textView24); subj.setText(String.valueOf(zcrmRecord.getFieldValue("Name"))); //No I18N TextView contact = (TextView) findViewById(R.id.textView26); contact.setText(((ZCRMRecord)zcrmRecord.getFieldValue("Appointment")).getLookupLabel()); //No I18N TextView status = (TextView) findViewById(R.id.textView28); status.setText(SDKCommonUtil.isoStringToGMTTimestamp(String.valueOf( zcrmRecord.getFieldValue("Visit_Time"))).toString()); //No I18N TextView desc = (TextView) findViewById(R.id.textView30); desc.setText((CharSequence) zcrmRecord.getFieldValue("Details")); //No I18N } @Override public boolean onOptionsItemSelected(MenuItem item) { switch (item.getItemId()) { case android.R.id.home: // API 5+ solution onBackPressed(); return true; default: return super.onOptionsItemSelected(item); } } public void onBackPressed() { finish(); } class APImodeRunner extends AsyncTask<String, String, String> { private String resp; @Override protected String doInBackground(String... params) { try { zcrmRecord = (ZCRMRecord) ZCRMModule.getInstance(ListViewAdapter.moduleAPIname).getRecord(ListViewAdapter.idClicked).getData(); resp = "Success"; //no I18N } catch (Exception e) { e.printStackTrace(); resp = e.getMessage(); } return resp; } @Override protected void onPostExecute(String result) { try { loadForm(); dialog.dismiss(); } catch (Exception e) { e.printStackTrace(); } } } }
def reverse(string): return string[::-1] print(reverse('Hello')) #prints "olleH"
package com.lbs.api.json.model /** { "AvailableNewPayers": [ { "Id": 45185, "IsFeeForService": false, "Name": "Moja firma SP. Z O.O." } ], "CityId": 5, "Payer": { "Id": 45185, "IsFeeForService": false, "Name": "Moja firma SP. Z O.O." } } */ case class ChangeTermDetailsResponse(availableNewPayers: ShortPayerDetails, cityId: Long, payer: ShortPayerDetails) extends SerializableJsonObject case class ShortPayerDetails(id: Long, isFeeForService: Boolean, name: String) extends SerializableJsonObject
package org.zalando.intellij.swagger.file; import com.intellij.json.JsonLanguage; import com.intellij.lang.Language; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiFile; import java.util.Optional; import org.apache.commons.io.FilenameUtils; import org.jetbrains.yaml.YAMLLanguage; import org.zalando.intellij.swagger.traversal.path.PathFinder; public class FileDetector { private static final String SWAGGER_KEY = "swagger"; private static final String SWAGGER_VERSION = "[\"'](2\\.0)[\"']"; // Must be "2.0", yaml does allow '2.0' private static final String OPEN_API_KEY = "openapi"; private static final String OPEN_API_VERSION = ".*(3(\\.\\d+)+)(?![\\d\\.]).*"; // openapi uses semantic versioning so 3.*.* would be // allowed. public boolean isMainSwaggerJsonFile(final PsiFile psiFile) { return hasJsonRootKey(psiFile, String.format("$.%s", SWAGGER_KEY), SWAGGER_VERSION); } public boolean isMainSwaggerYamlFile(final PsiFile psiFile) { return hasYamlRootKey(psiFile, String.format("$.%s", SWAGGER_KEY), SWAGGER_VERSION); } public boolean isMainOpenApiJsonFile(final PsiFile psiFile) { return hasJsonRootKey(psiFile, String.format("$.%s", OPEN_API_KEY), OPEN_API_VERSION); } public boolean isMainOpenApiYamlFile(final PsiFile psiFile) { return hasYamlRootKey(psiFile, String.format("$.%s", OPEN_API_KEY), OPEN_API_VERSION); } private boolean hasYamlRootKey( final PsiFile psiFile, final String lookupKey, final String lookupVersion) { final Language language = psiFile.getLanguage(); if (!YAMLLanguage.INSTANCE.is(language)) { return false; } return new PathFinder() .findByPathFrom(lookupKey, psiFile) .filter(psiElement -> hasVersion(psiElement, lookupVersion)) .isPresent(); } private boolean hasJsonRootKey( final PsiFile psiFile, final String lookupKey, final String lookupVersion) { final Language language = psiFile.getLanguage(); if (!JsonLanguage.INSTANCE.is(language)) { return false; } return new PathFinder() .findByPathFrom(lookupKey, psiFile) .filter(psiElement -> hasVersion(psiElement, lookupVersion)) .isPresent(); } private boolean hasVersion(final PsiElement psiElement, final String lookupVersion) { return Optional.ofNullable(psiElement.getLastChild()) .map(PsiElement::getText) .filter(text -> text.matches(lookupVersion)) .isPresent(); } public boolean isMainSwaggerFile(final PsiFile file) { return isMainSwaggerJsonFile(file) || isMainSwaggerYamlFile(file); } public boolean isMainOpenApiFile(final PsiFile file) { return isMainOpenApiJsonFile(file) || isMainOpenApiYamlFile(file); } public boolean isSwaggerContentCompatible(VirtualFile file) { return FilenameUtils.isExtension( file.getName(), new String[] { FileConstants.JSON_FILE_EXTENSION, FileConstants.YAML_FILE_EXTENSION, FileConstants.YML_FILE_EXTENSION }); } }
#!/bin/bash DATA_PATH="/share/pi/nigam/projects/sepsis/extraction_201003" CONFIG_EXPERIMENT_NAME='finetune_config' my_func() { EXPERIMENT_NAME='finetune_subsample_ped_'$1 python -m sepsis.get_best_model \ --data_path=$DATA_PATH \ --experiment_name=$EXPERIMENT_NAME \ --config_experiment_name=$CONFIG_EXPERIMENT_NAME } for i in {0..9} do my_func $i done
<gh_stars>10-100 /******************************************************************************* * This file is part of the Symfony eclipse plugin. * * (c) <NAME> <<EMAIL>> * * For the full copyright and license information, please view the LICENSE * file that was distributed with this source code. ******************************************************************************/ package com.dubture.symfony.core.preferences; import org.eclipse.core.runtime.preferences.DefaultScope; import com.dubture.symfony.core.SymfonyCorePlugin; /** * * The PreferenceSupport of the Symfony core plugin. * * * @author <NAME> <<EMAIL>> * */ public class CorePreferencesSupport extends PreferencesSupport { private static CorePreferencesSupport corePreferencesSupport; public CorePreferencesSupport() { super(SymfonyCorePlugin.ID, SymfonyCorePlugin.getDefault() == null ? null : DefaultScope.INSTANCE.getNode(SymfonyCorePlugin.ID)); } public static CorePreferencesSupport getInstance() { if (corePreferencesSupport == null) { corePreferencesSupport = new CorePreferencesSupport(); } return corePreferencesSupport; } }
<filename>sources/VS/ThirdParty/wxWidgets/src/generic/caret.cpp /////////////////////////////////////////////////////////////////////////////// // Name: src/generic/caret.cpp // Purpose: generic wxCaret class implementation // Author: <NAME> (original code by <NAME>) // Modified by: // Created: 25.05.99 // Copyright: (c) wxWidgets team // Licence: wxWindows licence /////////////////////////////////////////////////////////////////////////////// // ============================================================================ // declarations // ============================================================================ // ---------------------------------------------------------------------------- // headers // ---------------------------------------------------------------------------- // For compilers that support precompilation, includes "wx.h". #include "wx/wxprec.h" #if wxUSE_CARET #ifndef WX_PRECOMP #include "wx/window.h" #include "wx/dcclient.h" #include "wx/dcmemory.h" #endif //WX_PRECOMP #include "wx/caret.h" // ---------------------------------------------------------------------------- // global variables for this module // ---------------------------------------------------------------------------- // the blink time (common to all carets for MSW compatibility) static int gs_blinkTime = 500; // in milliseconds // ============================================================================ // implementation // ============================================================================ // ---------------------------------------------------------------------------- // timer stuff // ---------------------------------------------------------------------------- wxCaretTimer::wxCaretTimer(wxCaret *caret) { m_caret = caret; } void wxCaretTimer::Notify() { m_caret->OnTimer(); } void wxCaret::OnTimer() { // don't blink the caret when we don't have the focus if ( m_hasFocus ) Blink(); } // ---------------------------------------------------------------------------- // wxCaret static functions and data // ---------------------------------------------------------------------------- int wxCaretBase::GetBlinkTime() { return gs_blinkTime; } void wxCaretBase::SetBlinkTime(int milliseconds) { gs_blinkTime = milliseconds; #ifdef _WXGTK__ GtkSettings *settings = gtk_settings_get_default(); if (millseconds == 0) { gtk_settings_set_long_property(settings, "gtk-cursor-blink", gtk_false, NULL); } else { gtk_settings_set_long_property(settings, "gtk-cursor-blink", gtk_true, NULL); gtk_settings_set_long_property(settings, "gtk-cursor-time", milliseconds, NULL); } #endif } // ---------------------------------------------------------------------------- // initialization and destruction // ---------------------------------------------------------------------------- void wxCaret::InitGeneric() { m_hasFocus = true; m_blinkedOut = true; #ifndef wxHAS_CARET_USING_OVERLAYS m_xOld = m_yOld = -1; if (m_width && m_height) m_bmpUnderCaret.Create(m_width, m_height); #endif } wxCaret::~wxCaret() { if ( IsVisible() ) { // stop blinking if ( m_timer.IsRunning() ) m_timer.Stop(); } } // ---------------------------------------------------------------------------- // showing/hiding/moving the caret (base class interface) // ---------------------------------------------------------------------------- void wxCaret::DoShow() { int blinkTime = GetBlinkTime(); if ( blinkTime ) m_timer.Start(blinkTime); if ( m_blinkedOut ) Blink(); } void wxCaret::DoHide() { m_timer.Stop(); if ( !m_blinkedOut ) { Blink(); } } void wxCaret::DoMove() { #ifdef wxHAS_CARET_USING_OVERLAYS m_overlay.Reset(); #endif if ( IsVisible() ) { if ( !m_blinkedOut ) { // hide it right now and it will be shown the next time it blinks Blink(); // but if the caret is not blinking, we should blink it back into // visibility manually if ( !m_timer.IsRunning() ) Blink(); } } //else: will be shown at the correct location when it is shown } void wxCaret::DoSize() { int countVisible = m_countVisible; if (countVisible > 0) { m_countVisible = 0; DoHide(); } #ifdef wxHAS_CARET_USING_OVERLAYS m_overlay.Reset(); #else // Change bitmap size if (m_width && m_height) m_bmpUnderCaret = wxBitmap(m_width, m_height); else m_bmpUnderCaret = wxBitmap(); #endif if (countVisible > 0) { m_countVisible = countVisible; DoShow(); } } // ---------------------------------------------------------------------------- // handling the focus // ---------------------------------------------------------------------------- void wxCaret::OnSetFocus() { m_hasFocus = true; if ( IsVisible() ) Refresh(); } void wxCaret::OnKillFocus() { m_hasFocus = false; if ( IsVisible() ) { // the caret must be shown - otherwise, if it is hidden now, it will // stay so until the focus doesn't return because it won't blink any // more // hide it first if it isn't hidden ... if ( !m_blinkedOut ) Blink(); // .. and show it in the new style Blink(); } } // ---------------------------------------------------------------------------- // drawing the caret // ---------------------------------------------------------------------------- void wxCaret::Blink() { m_blinkedOut = !m_blinkedOut; Refresh(); } void wxCaret::Refresh() { wxClientDC dcWin(GetWindow()); // this is the new code, switch to 0 if this gives problems #ifdef wxHAS_CARET_USING_OVERLAYS wxDCOverlay dcOverlay( m_overlay, &dcWin, m_x, m_y, m_width , m_height ); if ( m_blinkedOut ) { dcOverlay.Clear(); } else { DoDraw( &dcWin, GetWindow() ); } #else wxMemoryDC dcMem; dcMem.SelectObject(m_bmpUnderCaret); if ( m_blinkedOut ) { // restore the old image dcWin.Blit(m_xOld, m_yOld, m_width, m_height, &dcMem, 0, 0); m_xOld = m_yOld = -1; } else { if ( m_xOld == -1 && m_yOld == -1 ) { // save the part we're going to overdraw dcMem.Blit(0, 0, m_width, m_height, &dcWin, m_x, m_y); m_xOld = m_x; m_yOld = m_y; } //else: we already saved the image below the caret, don't do it any // more // and draw the caret there DoDraw(&dcWin, GetWindow()); } #endif } void wxCaret::DoDraw(wxDC *dc, wxWindow* win) { wxPen pen(*wxBLACK_PEN); wxBrush brush(*wxBLACK_BRUSH); if (win) { wxColour backgroundColour(win->GetBackgroundColour()); if (backgroundColour.Red() < 100 && backgroundColour.Green() < 100 && backgroundColour.Blue() < 100) { pen = *wxWHITE_PEN; brush = *wxWHITE_BRUSH; } } dc->SetPen( pen ); dc->SetBrush(m_hasFocus ? brush : *wxTRANSPARENT_BRUSH); // VZ: unfortunately, the rectangle comes out a pixel smaller when this is // done under wxGTK - no idea why //dc->SetLogicalFunction(wxINVERT); dc->DrawRectangle(m_x, m_y, m_width, m_height); } #endif // wxUSE_CARET
<filename>index.js module.exports = require('./src/katch');
<reponame>vipulnsward/sinatra-contrib<gh_stars>100-1000 require 'sinatra/base' require 'sinatra/capture' module Sinatra # = Sinatra::ContentFor # # <tt>Sinatra::ContentFor</tt> is a set of helpers that allows you to capture # blocks inside views to be rendered later during the request. The most # common use is to populate different parts of your layout from your view. # # The currently supported engines are: Erb, Erubis, Haml and Slim. # # == Usage # # You call +content_for+, generally from a view, to capture a block of markup # giving it an identifier: # # # index.erb # <% content_for :some_key do %> # <chunk of="html">...</chunk> # <% end %> # # Then, you call +yield_content+ with that identifier, generally from a # layout, to render the captured block: # # # layout.erb # <%= yield_content :some_key %> # # === Classic Application # # To use the helpers in a classic application all you need to do is require # them: # # require "sinatra" # require "sinatra/content_for" # # # Your classic application code goes here... # # === Modular Application # # To use the helpers in a modular application you need to require them, and # then, tell the application you will use them: # # require "sinatra/base" # require "sinatra/content_for" # # class MyApp < Sinatra::Base # helpers Sinatra::ContentFor # # # The rest of your modular application code goes here... # end # # == And How Is This Useful? # # For example, some of your views might need a few javascript tags and # stylesheets, but you don't want to force this files in all your pages. # Then you can put <tt><% yield_content :scripts_and_styles %></tt> on your # layout, inside the <head> tag, and each view can call <tt>content_for</tt> # setting the appropriate set of tags that should be added to the layout. # module ContentFor include Capture # Capture a block of content to be rendered later. For example: # # <% content_for :head do %> # <script type="text/javascript" src="/foo.js"></script> # <% end %> # # You can call +content_for+ multiple times with the same key # (in the example +:head+), and when you render the blocks for # that key all of them will be rendered, in the same order you # captured them. # # Your blocks can also receive values, which are passed to them # by <tt>yield_content</tt> def content_for(key, &block) content_blocks[key.to_sym] << capture_later(&block) end # Check if a block of content with the given key was defined. For # example: # # <% content_for :head do %> # <script type="text/javascript" src="/foo.js"></script> # <% end %> # # <% if content_for? :head %> # <span>content "head" was defined.</span> # <% end %> def content_for?(key) content_blocks[key.to_sym].any? end # Render the captured blocks for a given key. For example: # # <head> # <title>Example</title> # <%= yield_content :head %> # </head> # # Would render everything you declared with <tt>content_for # :head</tt> before closing the <tt><head></tt> tag. # # You can also pass values to the content blocks by passing them # as arguments after the key: # # <%= yield_content :head, 1, 2 %> # # Would pass <tt>1</tt> and <tt>2</tt> to all the blocks registered # for <tt>:head</tt>. def yield_content(key, *args) content_blocks[key.to_sym].map { |b| capture(*args, &b) }.join end private def content_blocks @content_blocks ||= Hash.new {|h,k| h[k] = [] } end end helpers ContentFor end
import { Component, OnInit, Input, OnDestroy, OnChanges, SimpleChanges } from '@angular/core'; import { ITrip } from 'app/shared/interfaces/trip'; import { splitIntoThree } from 'app/shared/utils'; @Component({ selector: 'app-trip-list', templateUrl: './trip-list.component.html', styleUrls: ['./trip-list.component.scss'] }) export class TripListComponent implements OnInit, OnChanges { @Input() all: ITrip[]; allTrips: ITrip[] center: ITrip[] = []; left: ITrip[] = []; right: ITrip[] = []; constructor() { } ngOnChanges(changes: SimpleChanges): void { console.log(`changes:${JSON.stringify(changes)}`) this.allTrips = this.all; this.split(); } private split() { const arr = splitIntoThree(this.allTrips); this.left = arr.left; this.center = arr.center; this.right = arr.right; } ngOnInit() { this.allTrips = this.all; this.split(); } }
module.exports = function () { const path = require("path"); const fs = require("fs"); const MIME_MAP = { ".ttf": "application/x-font-ttf", ".wotf": "application/x-font-woff", ".wotf2": "application/x-font-woff2" }; const FORMAT_MAP = { ".ttf": "truetype", ".wotf": "woff", ".wotf2": "woff2" }; function filePathToURL(filePath, options) { filePath = path.resolve(filePath).replace(/\\/g, "/"); if (!filePath.match(/^\/.+$/)) { filePath = "/" + filePath; } return "file://" + encodeURI(filePath); }; function buildEmbeddedFontFaceCSS(faces, callback) { //creating combinedCSS var combinedCSS = ""; if (!faces) { callback(combinedCSS) return; } var index = -1; function next() { index ++; if (index >= faces.length) { callback(combinedCSS); return; } var installedFace = faces[index]; fs.readFile(installedFace.filePath, function (error, bytes) { var ext = path.extname(installedFace.filePath).toLowerCase(); var mime = MIME_MAP[ext]; if (!mime) { mime = "application/octet-stream"; } var format = FORMAT_MAP[ext]; if (!format) format = "truetype"; var url = "data:" + mime + ";base64," + new Buffer(bytes).toString("base64"); combinedCSS += "@font-face {\n" + " font-family: '" + installedFace.name + "';\n" + " src: url('" + url + "') format('" + format + "');\n" + " font-weight: " + installedFace.weight + ";\n" + " font-style: " + installedFace.style + ";\n" + "}\n"; next(); }); } next(); }; function buildFontFaceCSS(faces) { if (!faces) return ""; var combinedCSS = ""; for (var installedFace of faces) { var ext = path.extname(installedFace.filePath).toLowerCase(); var format = FORMAT_MAP[ext]; if (!format) format = "truetype"; var url = filePathToURL(installedFace.filePath); combinedCSS += "@font-face {\n" + " font-family: '" + installedFace.name + "';\n" + " src: url('" + url + "') format('" + format + "');\n" + " font-weight: " + installedFace.weight + ";\n" + " font-style: " + installedFace.style + ";\n" + "}\n"; } return combinedCSS; }; return { buildEmbeddedFontFaceCSS: buildEmbeddedFontFaceCSS, buildFontFaceCSS: buildFontFaceCSS, filePathToURL: filePathToURL } }();
<filename>src/db/config.js<gh_stars>0 import { Database } from "sqlite3"; import { open } from "sqlite"; export const database = () => open({ filename: "./database.sqlite", driver: Database });
#! /bin/sh set -e qmake make cd tests for i in ./tst_*; do ./$i; done
package no5 // https://leetcode-cn.com/problems/longest-palindromic-substring/ // 5. 最长回文子串--正读和反读都相同的字符序列为“回文” // 给定一个字符串 s,找到 s 中最长的回文子串。你可以假设 s 的最大长度为 1000。 // 示例 1: // 输入: "babad" // 输出: "bab" // 注意: "aba" 也是一个有效答案。 // 示例 2: // 输入: "cbbd" // 输出: "bb"
package com.persado.assignment.project.controller; import com.persado.assignment.project.dto.BookDTO; import com.persado.assignment.project.service.BookService; import java.util.List; import javax.validation.Valid; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Controller; import org.springframework.ui.Model; import org.springframework.validation.BindingResult; import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.ModelAttribute; import org.springframework.web.bind.annotation.PostMapping; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestParam; @Controller @RequestMapping("/book") public class BookController { private BookService bookService; @Autowired public BookController(BookService bookService) { this.bookService = bookService; } /** * Create an empty Book and pass it into the Model. * * @param model Model * @return The html page to be opened. */ @GetMapping("/addBookForm") public String showAddBookForm(Model model) { BookDTO book = new BookDTO(); model.addAttribute("book", book); return "addBookForm"; } /** * Get all the Books and pass them into the Model. * * @param model Model * @return The html page to be opened. */ @GetMapping("/manageBooksForm") public String showManageBooksForm(Model model) { // Get all the Books List<BookDTO> bookDTOs = bookService.findAll(); // Add the books attribute to the Model model.addAttribute("books", bookDTOs); return "manageBooksForm"; } /** * Save the given Book. * * @param bookDTO The Book to be saved. * @return The html page to be redirected. */ @PostMapping("/save") public String save(@ModelAttribute("book") @Valid BookDTO bookDTO, BindingResult bindingResult) { if (bindingResult.hasErrors()) { return "addBookForm"; } bookService.save(bookDTO); return "redirect:/dashboard"; } /** * Delete the Book with the given id. * * @param id Book id * @return The html page to be opened. */ @GetMapping("/delete") public String delete(@RequestParam("bookId") Long id) { bookService.delete(id); return "redirect:/book/manageBooksForm"; } }
class QuantifyParams: def __init__(self, quantify_rate, alpha, beta=0.1, gamma=2, delta=100, th=0.03): self.quantify_rate = quantify_rate self.alpha = alpha self.beta = beta self.gamma = gamma self.delta = delta self.th = th class MatmulQuantifyParams(QuantifyParams): def __init__(self, quantify_rate, alpha, beta=0.1, gamma=2, delta=100, th=0.03): super().__init__(quantify_rate, alpha, beta, gamma, delta, th)
<reponame>extra2000/saferwall<gh_stars>1-10 // Copyright 2021 Saferwall. All rights reserved. // Use of this source code is governed by Apache v2 license // license that can be found in the LICENSE file. package kaspersky import ( "strings" "github.com/saferwall/saferwall/pkg/utils" ) // Our consts const ( kesl = "/opt/kaspersky/kesl/bin/kesl-control" ) // Result represents detection results type Result struct { Infected bool `json:"infected"` Output string `json:"output"` } // Version represents database components' versions. type Version struct { CurrentAVDatabasesDate string `json:"current_av_db_ate"` LastAVDatabasesUpdateDate string `json:"last_av_db_update_date"` CurrentAVDatabasesState string `json:"current_av_db_state"` CurrentAVDatabasesRecords string `json:"current_av_db_records"` } // GetProgramVersion returns Kaspersky Anti-Virus for Linux File Server version. func GetProgramVersion() (string, error) { // Run kesl to grab the version out, err := utils.ExecCommand("sudo", kesl, "-S", "--app-info") if err != nil { return "", err } version := "" lines := strings.Split(out, "\n") for _, line := range lines { if strings.Contains(line, "Version:") { version = strings.TrimSpace(strings.TrimPrefix(line, "Version:")) break } } return version, nil } // GetDatabaseVersion returns AV database update version func GetDatabaseVersion() (Version, error) { // Run kav4s to grab the database update version databaseOut, err := utils.ExecCommand("sudo", kesl, "--get-stat", "Update") ver := Version{} if err != nil { return ver, nil } lines := strings.Split(databaseOut, "\n") for _, line := range lines { if strings.Contains(line, "Current AV databases date") { ver.CurrentAVDatabasesDate = strings.TrimSpace( strings.TrimPrefix(line, "Current AV databases date:")) } else if strings.Contains(line, "Last AV databases update date") { ver.LastAVDatabasesUpdateDate = strings.TrimSpace( strings.TrimPrefix(line, "Last AV databases update date:")) } else if strings.Contains(line, "Current AV databases state") { ver.CurrentAVDatabasesState = strings.TrimSpace( strings.TrimPrefix(line, "Current AV databases state:")) } else if strings.Contains(line, "Current AV databases records") { ver.CurrentAVDatabasesRecords = strings.TrimSpace( strings.TrimPrefix(line, "Current AV databases records:")) } } return ver, nil } // ScanFile a file with Kaspersky scanner func ScanFile(filePath string) (Result, error) { // Return codes // 0 – command / task completed successfully. // 1 – general error in command arguments. // 2 – error in passed application settings. // 64 – Kaspersky Endpoint Security is not running. // 65 - Protection is disabled. // 66 – anti-virus databases have not been downloaded (used only for the command kesl-control --app-info). // 67 – activation 2.0 ended with an error due to network problems. // 68 – the command cannot be executed because the application is running under a policy. // 128 – unknown error. // 65 – all other errors. // Run now out, err := utils.ExecCommand("sudo", kesl, "--scan-file", filePath, "--action", "Skip") // root@404e0cc38216:/# /opt/kaspersky/kesl/bin/kesl-control --scan-file eicar.com.txt --action Skip // Scanned objects : 1 // Total detected objects : 1 // Infected objects and other objects : 1 // Disinfected objects : 0 // Moved to Storage : 0 // Removed objects : 0 // Not disinfected objects : 1 // Scan errors : 0 // Password-protected objects : 0 // Skipped : 0 if err != nil { return Result{}, err } // Check if infected if !strings.Contains(out, "Total detected objects : 1") { return Result{}, nil } // Grab detection name with a separate cmd // sudo /opt/kaspersky/kesl/bin/kesl-control -E --query "EventType=='ThreatDetected'" out, err = utils.ExecCommand("sudo", kesl, "-E", "--query", "EventType=='ThreatDetected'") // EventType=ThreatDetected // EventId=2544 // Date=2019-06-11 22:12:16 // DangerLevel=Critical // FileName=/eicar // ObjectName=File // TaskName=Scan_File_ca3f0bc2-ce71-4d4a-bdc1-c8ae502566d0 // RuntimeTaskId=4 // TaskId=100 // DetectName=EICAR-Test-File // TaskType=ODS // FileOwner=root // FileOwnerId=0 // DetectCertainty=Sure // DetectType=Virware // DetectSource=Local // ObjectId=1 // AccessUser=root // AccessUserId=0 if err != nil { return Result{}, err } // so hackish, there is no easy way to grab detection name // no way to clean all these events as it was in previous version // so pretty hardcoded for now res := Result{} lines := strings.Split(out, "\n\n") if len(lines) > 0 { index := len(lines) - 1 lines = strings.Split(lines[index], "\n") if len(lines) > 8 { res.Output = strings.TrimSpace(strings.Split(lines[9], "=")[1]) res.Infected = true } } return res, nil } // GetLicenseInfos queries license infos func GetLicenseInfos() (string, error) { out, err := utils.ExecCommand("sudo", kesl, "-L", "--query") // Active key information: // Expiration date : 2019-07-13 // Days remaining until expiration : 0 // Protection : No protection // Updates : No updates // Key status : Expired // License type : XYZ // Usage restriction : 1 // Application name : Kaspersky Endpoint Security 10 SP1 MR1 for Linux // Active key : XYZ // Activation date : 2019-06-12 if err != nil { return "", err } return out, err }
#!/bin/bash # Process alignment files # Written by David Coffey dcoffey@fhcrc.org # Updated February 8, 2016 ## Prerequisites (see Software_installation.sh) # Download and install picard tools # Download and install samtools # Download and install IGVtools ## Variables # export GENOME="..." # export SAMPLE="..." # export IGV_GENOME=".../$GENOME.genome" # export ALIGNMENT_DIRECTORY="" # export PICARD=".../picard.jar" # export SAMTOOLS=".../samtools/1.0/bin/samtools" # export IGVTOOLS=".../IGVTools/2.3.26/igvtools" # export LAST_SAMPLE="..." # export EMAIL="..." START=`date +%s` echo Begin Process_alignment.sh for sample $SAMPLE on `date +"%B %d, %Y at %r"` # Index Aligned.bam $SAMTOOLS index $ALIGNMENT_DIRECTORY/$SAMPLE.Aligned.bam mv -f $ALIGNMENT_DIRECTORY/$SAMPLE.Aligned.bam.bai $ALIGNMENT_DIRECTORY/$SAMPLE.Aligned.bai # Index Chimeric.out.bam file $SAMTOOLS index $ALIGNMENT_DIRECTORY/$SAMPLE.Chimeric.out.bam mv -f $ALIGNMENT_DIRECTORY/$SAMPLE.Chimeric.out.bam.bai $ALIGNMENT_DIRECTORY/$SAMPLE.Chimeric.out.bai # Compute the average number of reads over a 25bp window across the genome for use with IGV $IGVTOOLS count $ALIGNMENT_DIRECTORY/$SAMPLE.Aligned.bam \ $ALIGNMENT_DIRECTORY/$SAMPLE.Aligned.tdf \ $IGV_GENOME rm igv.log # Calculate alignment statistics java -jar $PICARD BamIndexStats \ INPUT=$ALIGNMENT_DIRECTORY/$SAMPLE.Aligned.bam > \ $ALIGNMENT_DIRECTORY/$SAMPLE.alignment.stats.txt # Move log files mkdir $ALIGNMENT_DIRECTORY/$SAMPLE.logs mv $ALIGNMENT_DIRECTORY/*Log* $ALIGNMENT_DIRECTORY/$SAMPLE.logs END=`date +%s` MINUTES=$(((END-START)/60)) echo End Process_alignment.sh for sample $SAMPLE. The run time was $MINUTES minutes. if [[ $SAMPLE = $LAST_SAMPLE ]] then echo "The runtime was $MINUTES minutes" | mail -s "Finished Process_alignments.sh for sample $LAST_SAMPLE" $EMAIL fi
<reponame>maypok86/finance-bot package service import ( "context" "testing" "github.com/golang/mock/gomock" "github.com/maypok86/finance-bot/internal/model" mock_repository "github.com/maypok86/finance-bot/internal/repository/mocks" "github.com/maypok86/finance-bot/pkg/random" "github.com/pkg/errors" "github.com/stretchr/testify/require" ) func TestGetBaseDailyLimit(t *testing.T) { input := &model.Budget{ Codename: "base", DailyLimit: 500, } tests := []struct { name string expectations func(budgetRepo *mock_repository.MockBudget) input *model.Budget }{ { name: "valid and found", expectations: func(budgetRepo *mock_repository.MockBudget) { budgetRepo.EXPECT().GetBaseBudget(context.Background()).Return(input, nil) }, input: input, }, { name: "not found", expectations: func(budgetRepo *mock_repository.MockBudget) { budgetRepo.EXPECT().GetBaseBudget(context.Background()).Return(nil, errors.New("error")) }, input: new(model.Budget), }, } for _, test := range tests { t.Run(test.name, func(t *testing.T) { c := gomock.NewController(t) defer c.Finish() repo := mock_repository.NewMockBudget(c) test.expectations(repo) service := NewBudget(context.Background(), repo) limit := service.GetBaseDailyLimit() require.Equal(t, test.input.DailyLimit, limit) }) } } func TestGetDailyLimitByName(t *testing.T) { input := &model.Budget{ Codename: random.Name(), DailyLimit: random.Int(), } tests := []struct { name string expectations func(budgetRepo *mock_repository.MockBudget, budget *model.Budget) input *model.Budget }{ { name: "valid and found", expectations: func(budgetRepo *mock_repository.MockBudget, budget *model.Budget) { budgetRepo.EXPECT().GetBudgetByCodename(context.Background(), budget.Codename).Return(budget, nil) }, input: input, }, { name: "not found", expectations: func(budgetRepo *mock_repository.MockBudget, budget *model.Budget) { budgetRepo.EXPECT().GetBudgetByCodename(context.Background(), budget.Codename).Return(nil, errors.New("error")) }, input: new(model.Budget), }, } for _, test := range tests { t.Run(test.name, func(t *testing.T) { c := gomock.NewController(t) defer c.Finish() repo := mock_repository.NewMockBudget(c) test.expectations(repo, test.input) service := NewBudget(context.Background(), repo) limit := service.GetDailyLimitByName(test.input.Codename) require.Equal(t, test.input.DailyLimit, limit) }) } }
#!/usr/bin/env bash set -euo pipefail find . -type f -name '*.rb' -or -name '*.ru' | entr -r ./scripts/server.sh
#include <iostream> #include <string> #include <nlohmann/json.hpp> #include <cassert> using json = nlohmann::json; class JSONParser { private: json parsedJson; public: JSONParser(const std::string& jsonString) : parsedJson(json::parse(jsonString)) {} bool getBoolean(const std::string& key) { return parsedJson[key]; } double getFloat(const std::string& key) { return parsedJson[key]; } }; int main() { std::string jsonString = R"( { "happy": true, "pi": 3.141 } )"; JSONParser parser(jsonString); assert(parser.getBoolean("happy") == true); assert(parser.getFloat("pi") > 3.14); std::cout << "All assertions passed successfully." << std::endl; return 0; }
<gh_stars>0 import React, { Component } from 'react'; import { Image, ScrollView, StyleSheet, Text, View } from 'react-native'; import Biometrics from 'react-native-biometrics'; import { Divider } from 'react-native-elements'; import { Credentials } from '../credentials.js'; import SyncStorage from 'sync-storage'; import { ShoppingCart } from '../shopping-cart.js'; import I18n from '../config/I18n'; import { testProperties } from '../config/TestProperties'; import { MUSEO_SANS_BOLD, MUSEO_SANS_NORMAL, SCREENS } from '../config/Constants'; import { ParseText } from '../utils/parseText'; import { colors } from '../utils/colors'; import { STATUS_BAR_HEIGHT } from '../components/StatusBar'; import ActionButton from '../components/ActionButton'; import InputError from '../components/InputError'; import ErrorMessageContainer from '../components/ErrorMessageContainer'; import BiometryButton from '../components/BiometryButton'; import { handleQuickActionsNavigation } from '../config/QuickActionsNavigation'; export default class Login extends Component { static navigationOptions = { header: null, }; constructor(props) { super(props); this.state = { username: '', usernameError: false, password: '', passwordError: false, error: '', biometryType: null, }; this.handlePassChange = this.handlePassChange.bind(this); this.handleUserChange = this.handleUserChange.bind(this); this.handleSubmit = this.handleSubmit.bind(this); this.renderBiometryIcon = this.renderBiometryIcon.bind(this); this.handleBiometryLogin = this.handleBiometryLogin.bind(this); this.successfulLogin = this.successfulLogin.bind(this); } async componentDidMount() { // This is the first page loaded, so init our storage here await SyncStorage.init(); // Set the type of biometry so it can be used later, catch the error // if it can't be determined on a specific phone try { this.setState({ biometryType: (await Biometrics.isSensorAvailable()).available, }); } catch (e) { // Do nothing } handleQuickActionsNavigation(this.props.navigation); } resetState() { this.setState({ error: '', passwordError: false, usernameError: false, }); } handleUserChange(text) { this.setState({ username: text, }); } handlePassChange(text) { this.setState({ password: text, }); } handleSubmit() { // First, clear any errors this.resetState(); if (!this.state.username) { return this.setState({ error: I18n.t('login.errors.username'), usernameError: true, }); } if (!this.state.password) { return this.setState({ error: I18n.t('login.errors.password'), passwordError: true, }); } if (Credentials.verifyCredentials(this.state.username, this.state.password)) { // Catch our locked-out user and bail out const isLockedOutUser = Credentials.isLockedOutUser(); if (isLockedOutUser) { return this.setState({ error: I18n.t('login.errors.lockedOut') }); } return this.successfulLogin(); } return this.setState({ error: I18n.t('login.errors.noMatch'), passwordError: true, usernameError: true, }); } async handleBiometryLogin() { // Using object destructuring here will automatically call the `handleBiometryLogin` const loginResult = await Biometrics.simplePrompt({ promptMessage: 'Please sign in', cancelButtonText: 'Cancel', }); return loginResult.success ? this.successfulLogin() : this.handleBiometryLogin(); } successfulLogin() { // First, clear any errors this.resetState(); this.handleUserChange(''); this.handlePassChange(''); // and redirect after we wipe out any previous shopping cart contents ShoppingCart.resetCart(); this.props.navigation.navigate(SCREENS.INVENTORY_LIST); } /** * Parse a string that holds a `__text__` markdown and transform it into a * string with bolds or normal text components * * @param {string} string * * @return {*[]} */ parseNormalBoldText(string) { return (ParseText(string).map(text => ( <Text style={ [ text.bold ? styles.text_bold : {} ] } key={ text.id }> { text.value } </Text> ))); } /** * Render the biometry icon if it is there * * @return {null} */ renderBiometryIcon() { return this.state.biometryType ? <BiometryButton onPress={ this.handleBiometryLogin } type={ this.state.biometryType }/> : null; } render() { return ( <ScrollView contentContainerStyle={ styles.scroll_container } keyboardShouldPersistTaps="handled" { ...testProperties(I18n.t('login.screen')) } > <View style={ styles.wrapper }> <View style={ styles.login_container }> <Image resizeMode="contain" source={ require('../../img/swag-labs-logo.png') } style={ styles.swag_logo_image } /> <InputError placeholder={ 'login.username' } value={ this.state.username } onChangeText={ this.handleUserChange } error={ this.state.usernameError } /> <Divider style={ styles.bottomMargin20 }/> <InputError placeholder={ '<PASSWORD>' } value={ this.state.password } onChangeText={ this.handlePassChange } error={ this.state.passwordError } secureTextEntry={ true } /> <ErrorMessageContainer testID={ I18n.t('login.errors.container') } message={ this.state.error } /> { this.renderBiometryIcon() } <ActionButton onPress={ this.handleSubmit } title={ I18n.t('login.loginButton') } /> <Image source={ require('../../img/login-bot.png') } style={ styles.login_bot_image } resizeMode="contain" /> </View> <View style={ styles.login_info_container }> <Text style={ styles.login_info }> { this.parseNormalBoldText(I18n.t('login.loginText.usernames')) } </Text> <Divider style={ styles.divider }/> <Text style={ styles.login_info }> { this.parseNormalBoldText(I18n.t('login.loginText.password')) } </Text> </View> </View> </ScrollView> ); } } const styles = StyleSheet.create({ scroll_container: { backgroundColor: colors.white, paddingTop: STATUS_BAR_HEIGHT, }, wrapper: { flex: 1, }, login_container: { alignItems: 'center', paddingTop: 20, paddingRight: 40, paddingLeft: 40, }, swag_logo_image: { marginBottom: 30, width: '100%', }, bottomMargin20: { marginBottom: 20, }, message_container: { width: '100%', height: 55, paddingLeft: 10, paddingRight: 10, marginBottom: 2, marginTop: 2, flex: 1, justifyContent: 'center', alignItems: 'center', }, error_message_container: { backgroundColor: colors.slRed, }, error_message: { color: colors.white, fontSize: 14, fontFamily: MUSEO_SANS_NORMAL, textAlign: 'center', }, login_bot_image: { flex: 1, height: 320, width: '100%', }, login_info_container: { backgroundColor: colors.superLightGray, paddingBottom: STATUS_BAR_HEIGHT, paddingLeft: 40, paddingRight: 40, paddingTop: 20, }, login_info: { color: colors.gray, fontSize: 18, fontFamily: MUSEO_SANS_NORMAL, }, divider: { borderColor: colors.lightGray, borderBottomWidth: 3, marginBottom: 20, marginTop: 20, }, text_bold: { fontFamily: MUSEO_SANS_BOLD, }, });
<filename>imports/parsers/getSongInfoNct.js /* © 2017 NauStud.io * @author <NAME> * * NCT URL parser module */ import { xml2js } from 'meteor/vjau:xml2js'; import getGzipURL from './getGzipURL'; import { SongOrigin, defaultThumbnailUrl } from '../constants.js'; // Utility / Private functions const xmlURLReg = /https?:\/\/(?:www)?.nhaccuatui.com\/flash\/xml\?.*?key1=(\w+)/; const lyricReg = /<p id="divLyric"[\s\S]+ <\/p>/; // sample xml url: "http://www.nhaccuatui.com/flash/xml?key1=99decd7306277634419b987bed859265" /** * Get NCT stream URL and other info * * @param {[type]} songurl [description] * @return {[type]} [description] */ const getSongInfoNct = songurl => { let linkRes; let xmlURL; let lyric; // First Step: parse the HTML page to get the XML data URL for the flash-based player try { linkRes = getGzipURL(songurl); } catch (err) { console.error('Get NCT MP3 URL Error', err); } linkRes = linkRes && linkRes.content ? linkRes.content : ''; // console.log('linkRes:', linkRes); // run the html against regexp to get XML URL const xmlURLResults = xmlURLReg.exec(linkRes); const lyricResults = lyricReg.exec(linkRes); if (xmlURLResults) { xmlURL = xmlURLResults[0]; console.log('xmlURLResults:', xmlURLResults[0]); } else { console.log('xmlURL parse failed'); return null; } if (lyricResults) { // eslint-disable-next-line no-script-url if (lyricResults[0].includes('javascript:;')) { lyric = null; } else { lyric = lyricResults[0]; } console.log('lyricResult: ', lyricResults[0]); } else { console.log('lyric get failed'); } // Second Step: get the XML data file for the sone let xmlRes; let json; // Note: Manually install the node package in server folder const parser = new xml2js.Parser({ trim: true, }); // console.log('XML2JS:', XML2JS); try { xmlRes = getGzipURL(xmlURL); xmlRes = xmlRes.content; // console.log('Response:', xmlRes); // Third Step: parse and convert the XML string to JSON object parser.parseString(xmlRes, (error, result) => { json = result; }); console.log(`==> ${JSON.stringify(json)}`); // see sample JSON below } catch (err) { console.error('Get NCT stream Error', err); } // Fourth Step: normalize the JSON object to a song record if (json && json.tracklist && json.tracklist.track[0]) { console.log('Checking the XML data'); const track = json.tracklist.track[0]; //TODO: need to check if we ever got error with copyright checker like Zing if (track.location[0] /*&& String(track.errorcode[0]) === '0'*/) { console.log('URL is valid. Adding new song.'); return { timeAdded: Date.now(), originalURL: songurl, origin: SongOrigin.NHACCUATUI, name: track.title[0], artist: track.creator[0], streamURL: track.location[0], thumbURL: track.avatar[0] || defaultThumbnailUrl, lyric, play: 0, }; } else if (track.errormessage[0]) { console.log(`Error received: ${track.errormessage[0]}`); return { error: track.errormessage[0], }; } console.log('Unknown errors'); return { error: 'Errors unknown.', }; } console.log("Can't parse link"); return { error: "Can't parse and get song info from link", }; }; export default getSongInfoNct; //sample json: // { // "tracklist": { // "type": [ // "song" // ], // "track": [ // { // "title": [ // "Tâm Sự Với Người Lạ" // ], // "creator": [ // "Tiên Cookie" // ], // "location": [ // "http://s82.stream.nixcdn.com/bd786719943728e32606ccc9f113864b/56dd7572/NhacCuaTui913/TamSuVoiNguoiLa-TienCookie-4282715.mp3" // ], // "info": [ // "http://www.nhaccuatui.com/bai-hat/tam-su-voi-nguoi-la-tien-cookie.H8GqrTEErwJR.html" // ], // "image": [ // "http://avatar.nct.nixcdn.com/singer/avatar/2016/01/25/4/1/1/7/1453716830438.jpg" // ], // "thumb": [ // "" // ], // "bgimage": [ // "http://avatar.nct.nixcdn.com/singer/avatar/2016/01/25/4/1/1/7/1453716830438.jpg" // ], // "avatar": [ // "http://avatar.nct.nixcdn.com/singer/avatar/2016/01/25/4/1/1/7/1453716830438.jpg" // ], // "lyric": [ // "http://lrc.nct.nixcdn.com/2016/02/26/4/7/a/1/1456452594231.lrc" // ], // "newtab": [ // "http://www.nhaccuatui.com/nghe-si-tien-cookie.html" // ], // "kbit": [ // "320" // ], // "key": [ // "<KEY>" // ] // } // ] // } // }
import psycopg2 # Assuming PostgreSQL database def retrieve_and_process_data(connection, noc_id): try: # Create a cursor object using the provided database connection cursor = connection.cursor() # Construct a SQL query to retrieve data from the 'noc' table query = f''' SELECT * FROM noc WHERE noc.noc_id = '{noc_id}' ORDER BY noc_id; ''' # Execute the constructed query using the cursor cursor.execute(query) except psycopg2.Error as e: # Catch the exception and print a custom error message print(f"An error occurred: {e}") # Example usage # Assuming 'connection' is a valid database connection object # and 'sample_noc_id' is a valid noc_id retrieve_and_process_data(connection, 'sample_noc_id')
<reponame>LoliKingdom/KemonoFixer package com.rong.kemonofixer.mixins.entity.ai; import org.spongepowered.asm.mixin.Mixin; import org.spongepowered.asm.mixin.injection.At; import org.spongepowered.asm.mixin.injection.Inject; import org.spongepowered.asm.mixin.injection.callback.CallbackInfoReturnable; import com.rong.kemonofixer.Configuration; import erx.kemonocraft.entity.ai.EntityAIFriendsMine; import net.minecraft.entity.ai.EntityAIBase; @Mixin(EntityAIFriendsMine.class) public abstract class MixinEntityAIFriendsMine extends EntityAIBase { //EntityAIBase#shouldExecute() @Inject(method = "func_75250_a", at = @At("HEAD"), cancellable = true) private void onShouldExecute(CallbackInfoReturnable<Boolean> info) { if (!Configuration.AI.AI_CAN_MINE) { info.setReturnValue(false); } } }
<reponame>swimshahriar/DailyPick-Grocery-Shop<gh_stars>1-10 /** * Orders Model * @module ordersModel */ const mongoose = require('mongoose'); const { nanoid } = require('nanoid'); /** * @constructor orderSchema * @property {String} _id - unique order id * @property {Array} items - list of items * @property {Number} subTotal - sub total price of the items * @property {Number} deliveryCharge - delivery charge * @property {Number} total - total price after adding delivery charge * @property {String} userId - id of the user * @property {String} address - address of the delivery * @property {String} phoneNumber - phone number of the user * @property {String} email - email of the user * @property {Object} deliveryTime - contains the delivery time and date * @property {Object} payment - contains payment method and paid status * @property {String} status - order process status * @property {Date} orderDate - order placed time */ const orderSchema = new mongoose.Schema({ _id: { type: String, default: () => nanoid(8), }, items: { type: Array, required: true }, subTotal: { type: Number, required: true }, deliveryCharge: { type: Number, required: true }, total: { type: Number, required: true }, userId: { type: mongoose.Types.ObjectId, required: true }, userName: { type: String, required: true }, address: { type: String, required: true }, phoneNumber: { type: String, required: true }, email: { type: String, required: true }, deliveryTime: { time: { type: String, required: true }, date: { type: String, required: true }, }, payment: { paymentMethod: { type: String, required: true }, paid: { type: Boolean, default: false }, }, status: { type: String, default: 'Pending' }, orderDate: { type: String, required: true }, }); // export module.exports = mongoose.model('Order', orderSchema);
#!/bin/bash --posix # # @superglue # @version 0.1.0-beta.1 # # A bash superset that cleans the environment, defines helper references, # sources helper functions, and sources a user-defined SCRIPT. # # @dest $BIN/superglue # @dest $BIN/sglue # @dest $BIN/sgl # @mode 0755 # # @author Adam Smith <imagineadamsmith@gmail.com> (https://github.com/imaginate) # @copyright 2016-2022 Adam A Smith <imagineadamsmith@gmail.com> # # @use sgl|sglue|superglue [...OPTION] FUNC [...FUNC_ARG] # @use sgl|sglue|superglue [...OPTION] SCRIPT [...SCRIPT_ARG] # @opt -a|--alias Enable function names without `sgl_' prefixes # for each sourced FUNC. # @opt -C|--no-color Disable ANSI output coloring for terminals. # @opt -c|--color Enable ANSI output coloring for non-terminals. # @opt -D|--silent-child Disable `stderr' and `stdout' for child processes. # @opt -d|--quiet-child Disable `stdout' output for child processes. # @opt -h|-?|--help[=FUNC] Print help info and exit. # @opt -P|--silent-parent Disable `stderr' and `stdout' for parent process. # @opt -p|--quiet-parent Disable `stdout' output for parent process. # @opt -Q|--silent Disable `stderr' and `stdout' outputs. # @opt -q|--quiet Disable `stdout' output. # @opt -S|--source-all Source every FUNC. # @opt -s|--source=FUNCS Source each FUNC in FUNCS. # @opt -V|--verbose Appends line number and context to errors. # @opt -v|--version Print version info and exit. # @opt -x|--xtrace Enables bash `xtrace' option. # @opt -|-- End the options. # @val FUNC Must be a valid `superglue' function. The `sgl_' prefix # is optional. # @val FUNCS Must be a list of 1 or more FUNC using `,', `|', or ` ' # to separate each. # @val SCRIPT Must be a valid file path to a `superglue' script. # @exit # 0 PASS A successful exit. # 1 ERR An unknown error. # 2 OPT An invalid option. # 3 VAL An invalid or missing value. # 4 AUTH A permissions error. # 5 DPND A dependency error. # 6 CHLD A child process exited unsuccessfully. # 7 SGL A `superglue' script error. ############################################################################## readonly SGL_VERSION='0.1.0-beta.1' readonly SGL='superglue' ############################################################################## ## DEFINE LIB DIRS ############################################################################## readonly SGL_LIB='/usr/lib/superglue' readonly SGL_HELP='/usr/share/superglue/help' ############################################################################## ## DEFINE NULL & TMP REFS ############################################################################## readonly NIL='/dev/null' readonly TMP='/tmp' ############################################################################## ## DEFINE PRIVATE FUNCS ############################################################################## # @include ./include/_sgl_chk_cmd.sh # @include ./include/_sgl_chk_core.sh # @include ./include/_sgl_chk_exit.sh # @include ./include/_sgl_clean_builtin.sh # @include ./include/_sgl_cmd_to_str.sh # @include ./include/_sgl_err.sh # @include ./include/_sgl_fail.sh # @include ./include/_sgl_get_alias.sh # @include ./include/_sgl_get_quiet.sh # @include ./include/_sgl_get_silent.sh # @include ./include/_sgl_get_verbose.sh # @include ./include/_sgl_help.sh # @include ./include/_sgl_is_cmd.sh # @include ./include/_sgl_is_dir.sh # @include ./include/_sgl_is_false.sh # @include ./include/_sgl_is_file.sh # @include ./include/_sgl_is_flat.sh # @include ./include/_sgl_is_func.sh # @include ./include/_sgl_is_name.sh # @include ./include/_sgl_is_path.sh # @include ./include/_sgl_is_read.sh # @include ./include/_sgl_is_set.sh # @include ./include/_sgl_is_true.sh # @include ./include/_sgl_match_func.sh # @include ./include/_sgl_parse_args.sh # @include ./include/_sgl_parse_init.sh # @include ./include/_sgl_prefix.sh # @include ./include/_sgl_source.sh # @include ./include/_sgl_unalias.sh # @include ./include/_sgl_unalias_each.sh # @include ./include/_sgl_unset_func.sh # @include ./include/_sgl_unset_funcs.sh # @include ./include/_sgl_version.sh # @include ./include/_sgl_which.sh ############################################################################## ## CLEAN BUILTINS ############################################################################## _sgl_clean_builtin ############################################################################## ## CHECK BASH VERSION ############################################################################## if [[ -z "${BASH_VERSINFO}" ]] || [[ "${BASH_VERSINFO[0]}" != '4' ]]; then _sgl_err DPND "bash version 4 required" fi ############################################################################## ## DEFINE SGL FUNCS ############################################################################## declare -ar SGL_FUNCS=( \ sgl_chk_cmd \ sgl_chk_dir \ sgl_chk_exit \ sgl_chk_file \ sgl_chk_uid \ sgl_color \ sgl_cp \ sgl_err \ sgl_mk_dest \ sgl_parse_args \ sgl_print \ sgl_rm_dest \ sgl_set_color \ sgl_source ) ############################################################################## ## CHECK CORE PATHS ############################################################################## _sgl_chk_core "${SGL_LIB}" "${SGL_FUNCS[@]}" _sgl_chk_core "${SGL_HELP}" 'superglue' "${SGL_FUNCS[@]}" if ! _sgl_is_dir "${TMP}"; then _sgl_err DPND \ "missing core directory \`${TMP}' - your system may not be compatible" fi ############################################################################## ## DEFINE COMMANDS ############################################################################## readonly bash='/bin/bash' readonly cat="$(_sgl_which cat)" readonly chgrp="$(_sgl_which chgrp)" readonly chmod="$(_sgl_which chmod)" readonly chown="$(_sgl_which chown)" readonly cp="$(_sgl_which cp)" readonly grep="$(_sgl_which grep)" readonly ln="$(_sgl_which ln)" readonly ls="$(_sgl_which ls)" readonly mkdir="$(_sgl_which mkdir)" readonly mv="$(_sgl_which mv)" readonly rm="$(_sgl_which rm)" readonly sed="$(_sgl_which sed)" readonly sort="$(_sgl_which sort)" ############################################################################## ## CHECK COMMANDS ############################################################################## _sgl_chk_cmd ${bash} ${cat} ${chgrp} ${chmod} ${chown} ${cp} ${grep} ${ln} \ ${ls} ${mkdir} ${mv} ${rm} ${sed} ${sort} ############################################################################## ## DEFINE COLORS ############################################################################## readonly _SGL_UNCOLOR="$(printf '%b' '\033[0;0m')" readonly _SGL_BLACK="$(printf '%b' '\033[0;30m')" readonly _SGL_RED="$(printf '%b' '\033[0;91m')" readonly _SGL_GREEN="$(printf '%b' '\033[0;32m')" readonly _SGL_YELLOW="$(printf '%b' '\033[0;33m')" readonly _SGL_BLUE="$(printf '%b' '\033[0;94m')" readonly _SGL_PURPLE="$(printf '%b' '\033[0;35m')" readonly _SGL_CYAN="$(printf '%b' '\033[0;36m')" readonly _SGL_WHITE="$(printf '%b' '\033[0;97m')" SGL_UNCOLOR="${_SGL_UNCOLOR}" SGL_BLACK="${_SGL_BLACK}" SGL_RED="${_SGL_RED}" SGL_GREEN="${_SGL_GREEN}" SGL_YELLOW="${_SGL_YELLOW}" SGL_BLUE="${_SGL_BLUE}" SGL_PURPLE="${_SGL_PURPLE}" SGL_CYAN="${_SGL_CYAN}" SGL_WHITE="${_SGL_WHITE}" SGL_COLOR_OFF=0 SGL_COLOR_ON=0 ############################################################################## ## DECLARE DEFS ARRAY ############################################################################## declare -A _SGL_DEFS ############################################################################## ## DEFINE NEWLINE REF ############################################################################## readonly NEWLINE="$(printf '\n')" ############################################################################## ## PARSE ARGS ############################################################################## _sgl_parse_init \ '-a|--alias' 0 \ '-C|--no-color' 0 \ '-c|--color' 0 \ '-D|--silent-child' 0 \ '-d|--quiet-child' 0 \ '-h|-?|--help' 2 \ '-P|--silent-parent' 0 \ '-p|--quiet-parent' 0 \ '-Q|--silent' 0 \ '-q|--quiet' 0 \ '-S|--source-all' 0 \ '-s|--source' 1 \ '-V|--verbose' 0 \ '-v|--version' 0 \ '-x|--xtrace' 0 \ -- "${@}" ############################################################################## ## LOAD SOURCE FUNCTION ############################################################################## . "${SGL_LIB}/sgl_source" ############################################################################## ## PARSE OPTS ############################################################################## SGL_ALIAS=0 SGL_SILENT_CHILD=0 SGL_QUIET_CHILD=0 SGL_SILENT_PARENT=0 SGL_QUIET_PARENT=0 SGL_SILENT=0 SGL_QUIET=0 SGL_VERBOSE=0 if [[ ${#__SGL_OPTS[@]} -gt 0 ]]; then declare -i __I=0 declare __OPT declare __VAL for __OPT in "${__SGL_OPTS[@]}"; do case "${__OPT}" in -a|--alias) SGL_ALIAS=1 ;; -C|--no-color) SGL_COLOR_OFF=1 SGL_COLOR_ON=0 ;; -c|--color) SGL_COLOR_OFF=0 SGL_COLOR_ON=1 ;; -D|--silent-child) SGL_SILENT_CHILD=1 ;; -d|--quiet-child) SGL_QUIET_CHILD=1 ;; -h|-\?|--help) if [[ ${__SGL_OPT_BOOL[${__I}]} -eq 1 ]]; then __VAL="$(_sgl_prefix "${__SGL_OPT_VALS[${__I}]}")" if ! _sgl_is_func "${__VAL}"; then _sgl_err VAL "invalid \`${SGL}' FUNC \`${__VAL}'" fi _sgl_help "${__VAL}" else _sgl_help superglue fi ;; -P|--silent-parent) SGL_SILENT_PARENT=1 ;; -p|--quiet-parent) SGL_QUIET_PARENT=1 ;; -Q|--silent) SGL_SILENT=1 ;; -q|--quiet) SGL_QUIET=1 ;; -S|--source-all) sgl_source '*' ;; -s|--source) declare __RE='^[a-z*]+[a-z_ ,|*]*$' __VAL="${__SGL_OPT_VALS[${__I}]}" if [[ ! "${__VAL}" =~ ${__RE} ]]; then _sgl_err VAL "invalid \`${SGL}' FUNCS \`${__VAL}'" fi unset -v __RE declare -a __FUNCS=() declare __FUNC __VAL="$(printf '%s' "${__VAL}" | ${sed} -e 's/[,|]/ /g')" __VAL="${__VAL% }" while IFS= read -r -d ' ' __FUNC; do __FUNC="$(_sgl_prefix "${__FUNC}")" if ! _sgl_match_func "${__FUNC}"; then __FUNC="FUNC \`${__FUNC}'" __VAL="FUNCS \`${__VAL}'" _sgl_err VAL "invalid \`${SGL}' ${__FUNC} in ${__VAL}" fi __FUNCS[${#__FUNCS[@]}]="${__FUNC}" done <<< "${__VAL} " sgl_source "${__FUNCS[@]}" unset -v __FUNC unset -v __FUNCS ;; -V|--verbose) SGL_VERBOSE=1 ;; -v|--version) _sgl_version ;; -x|--xtrace) set -x ;; *) _sgl_err SGL "invalid parsed \`${SGL}' OPTION \`${__OPT}'" ;; esac : $(( ++__I )) done unset -v __I unset -v __OPT unset -v __VAL fi ############################################################################## ## PARSE ARGS ############################################################################## if [[ ${#__SGL_VALS[@]} -eq 0 ]]; then _sgl_err VAL "missing \`${SGL}' FUNC|SCRIPT" fi declare -a SGL_ARGS=() if [[ ${#__SGL_VALS[@]} -gt 1 ]]; then declare __ARG for __ARG in "${__SGL_VALS[@]:1}"; do SGL_ARGS[${#SGL_ARGS[@]}]="${__ARG}" done unset -v __ARG fi readonly -a SGL_ARGS ############################################################################## ## PARSE FUNC ############################################################################## SGL_FUNC="$(_sgl_prefix "${__SGL_VALS[0]}")" if ! _sgl_is_func "${SGL_FUNC}"; then SGL_FUNC='' fi readonly SGL_FUNC if [[ -n "${SGL_FUNC}" ]]; then sgl_source ${SGL_FUNC} ${SGL_FUNC} "${SGL_ARGS[@]}" exit ${?} fi ############################################################################## ## PARSE SCRIPT ############################################################################## readonly SGL_SCRIPT="${__SGL_VALS[0]}" if ! _sgl_is_read "${SGL_SCRIPT}"; then _sgl_err VAL "invalid \`${SGL}' file path SCRIPT \`${SGL_SCRIPT}'" fi . "${SGL_SCRIPT}" "${SGL_ARGS[@]}" exit ${?}
<gh_stars>10-100 /******************************************************************************* * Copyright (c) 2015, 2016 <NAME>. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. *******************************************************************************/ 'use strict'; describe('Service: initializationService', function () { // load the service's module beforeEach(module('greensopinionfinanceApp')); // instantiate service var initializationService; beforeEach(inject(function (_initializationService_) { initializationService = _initializationService_; })); it('should expose isInitialized()', function () { expect(initializationService.isInitialized()).toBe(false); initializationService.initialized(true); expect(initializationService.isInitialized()).toBe(true); initializationService.initialized(false); expect(initializationService.isInitialized()).toBe(false); }); });
import numpy as np def cksaap(sequence, k): if not sequence or k <= 0: raise ValueError("Invalid input: sequence cannot be empty and k must be a positive integer") amino_acids = "ACDEFGHIKLMNPQRSTVWY" # List of all 20 standard amino acids num_acids = len(amino_acids) feature_vector = np.zeros((num_acids, num_acids)) for i in range(len(sequence) - k): acid1 = sequence[i] acid2 = sequence[i + k] if acid1 in amino_acids and acid2 in amino_acids: index1 = amino_acids.index(acid1) index2 = amino_acids.index(acid2) feature_vector[index1, index2] += 1 return feature_vector
from bs4 import BeautifulSoup def calculate_average_margin_top(html_file): with open(html_file, 'r') as file: html_content = file.read() soup = BeautifulSoup(html_content, 'html.parser') elements = soup.find_all(style=True) total_margin_top = 0 count = 0 for element in elements: style = element['style'] style_properties = style.split(';') for prop in style_properties: prop_name, prop_value = prop.split(':') if prop_name.strip() == 'margin-top': total_margin_top += int(prop_value.strip()[:-2]) # remove 'px' and convert to int count += 1 if count == 0: return "No elements with margin-top found in the HTML file." average_margin_top = total_margin_top / count return f"The average margin-top value is {average_margin_top}px." # Example usage html_file = 'example.html' result = calculate_average_margin_top(html_file) print(result)
#!/bin/sh # Based on https://github.com/eldarlabs/ghpages-deploy-script/blob/master/scripts/deploy-ghpages.sh # abort the script if there is a non-zero error #set -e remote=$(git config remote.origin.url) # now lets setup a new repo so we can update the branch git config --global user.email "$GH_EMAIL" > /dev/null 2>&1 git config --global user.name "$GH_NAME" > /dev/null 2>&1 if ! git diff-index --quiet HEAD --; then # stage any changes and new files git add -A # now commit git commit -m "auto-lint" # and push, but send any output to /dev/null to hide anything sensitive git push --force --quiet origin $CIRCLE_BRANCH > /dev/null 2>&1 fi # cd ~/repo/library/artifacts # git pull --rebase --progress "origin" +refs/heads/artifacts # cd ~/repo # if ! git diff-index --quiet HEAD --; then # # stage any changes and new files # git add -A # # now commit # git commit -m "artifacts-update" # # and push, but send any output to /dev/null to hide anything sensitive # git push --force --quiet origin $CIRCLE_BRANCH > /dev/null 2>&1 # fi
function vetor1(vetor, num) { let novoVetor = [] for (let i = 0; i < vetor.length; i++) { novoVetor.push(vetor[i] * num) } return novoVetor } function vetor2(vetor, num) { let novoVetor2 = [] for (let i = 0; i < vetor.length; i++) { if (vetor[i] > 5) { novoVetor2.push(vetor[i] * num) } } return novoVetor2 } let vetor = [1, 2, 4, 5, 6, 7, 8, 9] console.log(vetor1(vetor, 2)) console.log(vetor2(vetor, 2))
#!/bin/bash #summary: meta reporting on scripts #tags: meta, fluentBash #load loader first. [ -z ${BASH_DIR+x} ] && BASH_DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd) source $BASH_DIR/../core/core.sh #first thing we load is the script loader #load dependencies. loadScript piping/piping.sh loadScript piping/strings.sh loadScript piping/lists.sh loadScript piping/conditionals.sh #we need a way to query for functionality in our libraries to and to know to use it. #to facilitate this we are adding meta information comments to scripts that follows the format below #scripts.sh #summary: a summary of the script #tags: a, b, c #.... #description: appears in the lines directly above function definitions #usage: appears in the lines directly above function definitions #myFunction #description: finds all scripts (name ends in .sh, has a shebang) #usage: getAllScripts getAllScripts() { #get all a) files with b) a name ending in ".sh" that have c) a shebang of #!/bin LIST=$(find "$BASH_DIR/.." -type f -name "*.sh" -exec grep -l '#!/bin' {} \; ) #filter out scripts that don't have meta echo "$LIST" } readonly -f getAllScripts #description: does the file have fluentBash meta information #usage: echo myfile | hasScriptMeta hasScriptMeta() { local STDIN=$(getStdIn) debecho hasScriptMeta stdin "$STDIN" #return false if file does not have: #line 2 starts with "#summary:" #line 3 starts with "#tags:" local FILEDATA=$(cat "$STDIN" | head -n 3) debecho hasScriptMeta filedata "$FILEDATA" local FILTER=$(echo "getLine 2 | ifStartsWith #summary:" | appendLine "getLine 3 | ifStartsWith #tags:" ) local RESULT local RV RESULT=$(echo "$FILEDATA" | filter FILTER) RV=$? if [[ "$RV" == 0 ]]; then echo "$STDIN" return 0 fi return 1 } readonly -f hasScriptMeta #debugFlagOn hasScriptMeta #description: finds all scripts that have fluentBash meta information #usage: getAllScriptsWithMeta getAllScriptsWithMeta() { local LIST=$(getAllScripts ) echo "$LIST" | doEachLine hasScriptMeta } readonly -f getAllScriptsWithMeta #debugFlagOn getAllScriptsWithMeta #description: gets a list of all files and their meta tags #usage: getAllScriptsAndTags getAllScriptsAndTags() { local LIST=$(getAllScriptsWithMeta) #now grep all the tag lines in this list local TAGLIST=$(echo "$LIST" | doEachLine dump | grep '^#tags' | doEachLine getSubstring 6) #we probably want some join and split going on here echo "$LIST" | sideJoinLists TAGLIST " ===> " } readonly -f getAllScriptsAndTags #debugFlagOn getAllScriptsAndTags #description: returns which files have the provided tag #usage: echo myTag | whereIsTag whereIsTag() { local STDIN=$(getStdIn) getAllScriptsAndTags | grep "$STDIN" } readonly -f whereIsTag #debugFlagOn whereIsTag #description: returns all of the functions every script has #usage: getAllScriptFunctions getAllScriptFunctions() { #find files that match the script name local MATCHES=$(getAllScriptsWithMeta) IFS=$'\n' read -d '' -r -a SCRIPTS <<< "$MATCHES" #now iterate thru the matched files for EACH in "${SCRIPTS[@]}" do TEXT=$(cat "$EACH") LEN=$(echo "$TEXT" | getLineCount) #split into array IFS=$'\n' read -d '' -r -a ARR <<< "$TEXT" #iterate over each line for ((i = 0 ; i < "$LEN" ; i++)); do LINE="${ARR[$i]}" #if the line starts with "#description:" we start our grab HASMATCH=false echo "$LINE" | ifStartsWith "#description" && HASMATCH=true while [[ "$HASMATCH" == true ]]; do i=$((i + 1)) LINE="${ARR[$i]}" echo "$LINE" | ifStartsWith "#usage" || HASMATCH=false #if we've switched HASMATCH off the first time we echo it if [[ "$HASMATCH" == false ]]; then echo "$EACH"" ===> ""$LINE" echo fi done done done } readonly -f getAllScriptFunctions #debugFlagOn getAllScriptFunctions #description: looks for all functions that have "description" and "usage" header comments #usage: echo scriptPartialName | getScriptFunctions getScriptFunctions() { local STDIN=$(getStdIn) #find files that match the script name local MATCHES=$(getAllScriptsWithMeta | grep "$STDIN") #now iterate thru the matched files for EACH in "${MATCHES[@]}" do TEXT=$(cat "$EACH") LEN=$(echo "$TEXT" | getLineCount) #split into array IFS=$'\n' read -d '' -r -a ARR <<< "$TEXT" #iterate over each line for ((i = 0 ; i < "$LEN" ; i++)); do LINE="${ARR[$i]}" #if the line starts with "#description:" we start our grab HASMATCH=false echo "$LINE" | ifStartsWith "#description" && HASMATCH=true while [[ "$HASMATCH" == true ]]; do i=$((i + 1)) LINE="${ARR[$i]}" echo "$LINE" | ifStartsWith "#usage" || HASMATCH=false #if we've switched HASMATCH off the first time we echo it if [[ "$HASMATCH" == false ]]; then echo "$EACH"" ===> ""$LINE" echo fi done done done } readonly -f getScriptFunctions #debugFlagOn getScriptFunctions #description: returns which file defines the function #usage: echo myFunction | whereIsFunction whereIsFunction() { local STDIN=$(getStdIn) getAllScriptFunctions | grep "$STDIN" } readonly -f whereIsFunction #debugFlagOn whereIsFunction
<gh_stars>0 public abstract class User { protected String name; protected MessageMediator mediator; public User(String name, MessageMediator mediator) { this.name = name; this.mediator = mediator; } public abstract void send(String message); public abstract void receive(String message); }