text
stringlengths
1
1.05M
#!/bin/sh set -e ROOTDIR=dist BUNDLE=${ROOTDIR}/HLMN-Qt.app CODESIGN=codesign TEMPDIR=sign.temp TEMPLIST=${TEMPDIR}/signatures.txt OUT=signature.tar.gz if [ ! -n "$1" ]; then echo "usage: $0 <codesign args>" echo "example: $0 -s MyIdentity" exit 1 fi rm -rf ${TEMPDIR} ${TEMPLIST} mkdir -p ${TEMPDIR} ${CODESIGN} -f --file-list ${TEMPLIST} "$@" "${BUNDLE}" for i in `grep -v CodeResources ${TEMPLIST}`; do TARGETFILE="${BUNDLE}/`echo ${i} | sed "s|.*${BUNDLE}/||"`" SIZE=`pagestuff $i -p | tail -2 | grep size | sed 's/[^0-9]*//g'` OFFSET=`pagestuff $i -p | tail -2 | grep offset | sed 's/[^0-9]*//g'` SIGNFILE="${TEMPDIR}/${TARGETFILE}.sign" DIRNAME="`dirname ${SIGNFILE}`" mkdir -p "${DIRNAME}" echo "Adding detached signature for: ${TARGETFILE}. Size: ${SIZE}. Offset: ${OFFSET}" dd if=$i of=${SIGNFILE} bs=1 skip=${OFFSET} count=${SIZE} 2>/dev/null done for i in `grep CodeResources ${TEMPLIST}`; do TARGETFILE="${BUNDLE}/`echo ${i} | sed "s|.*${BUNDLE}/||"`" RESOURCE="${TEMPDIR}/${TARGETFILE}" DIRNAME="`dirname "${RESOURCE}"`" mkdir -p "${DIRNAME}" echo "Adding resource for: "${TARGETFILE}"" cp "${i}" "${RESOURCE}" done rm ${TEMPLIST} tar -C ${TEMPDIR} -czf ${OUT} . rm -rf ${TEMPDIR} echo "Created ${OUT}"
<reponame>dominicbarnes/virtual-element-assertions var assert = require('assert'); var element = require('virtual-element'); var assertions = require('..'); describe('node', function () { it('should be an object', function () { assert(assertions); assert.strictEqual(typeof assertions, 'object'); }); describe('.isNode(node, type)', function () { it('should throw when missing the node', fail(function () { assertions.isNode(); })); it('should throw for objects that are not virtual nodes', fail(function () { assertions.isNode({}); })); it('should not throw for plain elements', function () { assertions.isNode(element('div')); }); it('should match the type if given', function () { assertions.isNode(element('div'), 'div'); }); it('should match strictly on component types', function () { var Component = { render: function () {} }; assertions.isNode(element(Component), Component); }); }); describe('.hasAttribute(node, attr, [value])', function () { it('should throw when missing the node', fail(function () { assertions.hasAttribute(); })); it('should throw for objects that are not virtual nodes', fail(function () { assertions.hasAttribute({}); })); it('should throw when missing the attr name', fail(function () { assertions.hasAttribute(element('div')); })); it('should throw when the attribute name is not found', fail(function () { assertions.hasAttribute(element('div'), 'id'); })); it('should not throw when the attribute name is found', function () { assertions.hasAttribute(element('div', { id: 'a' }), 'id'); }); it('should throw when the attribute does not match', fail(function () { assertions.hasAttribute(element('div', { id: 'a' }), 'id', 'b'); })); it('should not throw when the attribute does match', function () { assertions.hasAttribute(element('div', { id: 'a' }), 'id', 'a'); }); it('should strictly match attribute values', function () { assertions.hasAttribute(element('a', { onClick: noop }), 'onClick', noop); }); it('should not treat falsy values as a missing attribute', function () { assertions.hasAttribute(element('input', { disabled: false }), 'disabled'); }); it('should fail when the attribute is falsy and does not match', fail(function() { assertions.hasAttribute(element('input', { disabled: true }), 'disabled', false); })); }); describe('.notHasAttribute(node, attr)', function () { it('should throw when missing the node', fail(function () { assertions.notHasAttribute(); })); it('should throw for objects that are not virtual nodes', fail(function () { assertions.notHasAttribute({}); })); it('should throw when missing the attr name', fail(function () { assertions.notHasAttribute(element('div')); })); it('should not throw when the attribute is not present', function () { assertions.notHasAttribute(element('div'), 'id'); }); it('should throw when the attribute is present, but falsy', fail(function () { assertions.notHasAttribute(element('input', { disabled: false }), 'disabled'); })); }); describe('.hasClass(node, name)', function () { it('should throw when missing the node', fail(function () { assertions.hasClass(); })); it('should throw for objects that are not virtual nodes', fail(function () { assertions.hasClass({}); })); it('should throw when missing the attr name', fail(function () { assertions.hasClass(element('div')); })); it('should not throw when the class name is present', function () { assertions.hasClass(element('div', { class: 'a' }), 'a'); assertions.hasClass(element('div', { class: 'a b' }), 'b'); assertions.hasClass(element('div', { class: 'a b c' }), 'b'); assertions.hasClass(element('div', { class: 'a b c' }), 'c'); }); it('should throw when the class name is missing', fail(function () { assertions.hasClass(element('div', { class: 'a' }), 'b'); })); it('should throw when the node has no class names', fail(function () { assertions.hasClass(element('div'), 'b'); })); it('should throw when the class is not a string', fail(function () { assertions.hasClass(element('div', { class: { a: true, b: false } }), 'a'); })); }); describe('.notHasClass(node, name)', function () { it('should throw when missing the node', fail(function () { assertions.notHasClass(); })); it('should throw for objects that are not virtual nodes', fail(function () { assertions.notHasClass({}); })); it('should throw when missing the class name', fail(function () { assertions.notHasClass(element('div')); })); it('should throw when the class name is present', fail(function () { assertions.notHasClass(element('div', { class: 'a' }), 'a'); })); it('should not throw when the class name is missing', function () { assertions.notHasClass(element('div', { class: 'a' }), 'b'); }); it('should not throw when the node has no class names', function () { assertions.notHasClass(element('div'), 'b'); }); }); describe('.hasChildren(node, children)', function () { it('should throw when missing the node', fail(function () { assertions.hasChildren(); })); it('should throw for objects that are not virtual nodes', fail(function () { assertions.hasChildren({}); })); it('should throw when there are no children', fail(function () { assertions.hasChildren(element('div')); })); it('should not throw when there are children', function () { assertions.hasChildren(element('div', null, 'hello world')); }); it('should not throw when the number of children matches', function () { assertions.hasChildren(element('div', null, 'a'), 1); assertions.hasChildren(element('div', null, 'a', 'b'), 2); }); it('should throw when the number of children does not match', fail(function () { assertions.hasChildren(element('div', null, 'a'), 0); assertions.hasChildren(element('div', null, 'a', 'b'), 1); })); it('should not throw when the array of children matches', function () { assertions.hasChildren(element('div', null, 'a', 'b'), [ 'a', 'b' ]); }); it('should throw when the array of children does not match', fail(function () { assertions.hasChildren(element('div', null, 'a', 'b'), [ 'a' ]); })); it('should treat a string argument as a single child array', function () { assertions.hasChildren(element('div', null, 'a'), 'a'); }); it('should throw if the children does not match the single argument', fail(function () { assertions.hasChildren(element('div', null, 'a'), 'b'); })); it('should throw if there are multiple children and a single argument is passed', fail(function () { assertions.hasChildren(element('div', null, 'a', 'b'), 'a'); })); it('should not throw when the fn does not throw for any node', function () { assertions.hasChildren(element('div', null, 'a'), test); function test(child) {} }); it('should throw when the fn throws for any node', fail(function () { assertions.hasChildren(element('div', null, 'a', 'b'), test); function test(child) { throw new Error('fail'); } })); }); describe('.notHasChildren(node)', function () { it('should throw when missing the node', fail(function () { assertions.notHasChildren(); })); it('should throw for objects that are not virtual nodes', fail(function () { assertions.notHasChildren({}); })); it('should not throw when there are no children', function () { assertions.notHasChildren(element('div')); }); it('should not throw when there are children', fail(function () { assertions.notHasChildren(element('div', null, 'hello world')); })); }); describe('.hasChild(node, index, [fn])', function () { it('should throw when missing the node', fail(function () { assertions.hasChild(); })); it('should throw for objects that are not virtual nodes', fail(function () { assertions.hasChild({}); })); it('should throw when there are no children', fail(function () { assertions.hasChild(element('div')); })); it('should throw when an index is not provided', fail(function () { assertions.hasChild(element('div', null, 'hello world')); })); it('should throw when a negative index is provided', fail(function () { assertions.hasChild(element('div', null, 'hello world'), -1); })); it('should throw when a child at the given index does not exist', fail(function () { assertions.hasChild(element('div', null, 'a'), 1); })); it('should not throw when there are children and an index is provided', function () { assertions.hasChild(element('div', null, 'hello world'), 0); }); context('with array for index', function () { var root = element('ul', null, [ element('li', null, element('a', { href: 'http://example.com/' })), element('li', null, element('a', { href: 'http://example.org/' })) ]); it('should not throw when it finds the right child', function () { assertions.hasChild(root, [ 0, 0 ]); assertions.hasChild(root, [ 1, 0 ]); }); it('should throw when it cannot find a child', fail(function () { assertions.hasChild(root, [ 2, 0 ]); })); }); }); describe('.hasChild(node, index, criteria)', function () { describe('criteria is not a function', function() { it('should not throw when the deep comparison succeeds', function() { assertions.hasChild(element('div', null, 'a', 'b'), 0, 'a'); assertions.hasChild(element('div', null, 'a', 'b'), 1, 'b'); }); it('should throw when the deep comparison fails', fail(function() { assertions.hasChild(element('div', null, 'a', 'b'), 0, 'b'); })); it('should throw when the criteria is falsy and does not match', fail(function () { assertions.hasChild(element('div', null, 'a', 'b'), 0, null); })); context('with array for index', function () { var root = element('ul', null, [ element('li', null, element('b', null, 'Hello')), element('li', null, element('span', null, 'World')) ]); it('should not throw when it finds the right child', function () { assertions.hasChild(root, [ 0, 0, 0 ], 'Hello'); assertions.hasChild(root, [ 1, 0, 0 ], 'World'); }); it('should throw when it cannot find a child', fail(function () { assertions.hasChild(root, [ 2, 0, 0 ], 'Hello'); })); it('should throw when deep comparison fails', fail(function () { assertions.hasChild(root, [ 1, 0, 0 ], 'Hello'); })); }); }); describe('criteria is a function', function() { it('should throw when `criteria` throws', fail(function () { assertions.hasChild(element('div', null, 'a', 'b'), 0, test); function test(child) { throw new Error('fail'); } })); it('should not throw when `criteria` does not throw', function () { assertions.hasChild(element('div', null, 'a'), 0, test); function test(child) {} }); context('with array for index', function () { var root = element('ul', null, [ element('li', null, element('b', null, 'Hello')), element('li', null, element('span', null, 'World')) ]); it('should not throw when `criteria` does not throw', function () { assertions.hasChild(root, [ 0, 0, 0 ], test); function test(child) {} }); it('should throw when `criteria throws`', fail(function () { assertions.hasChild(root, [ 2, 0, 0 ], test); function test(child) { throw new Error('fail'); } })); }); }); }); }); function noop() {} function fail(fn) { return function () { assert.throws(fn, Error); }; }
#!/bin/bash # Usage: deinterleave_fastq.sh < interleaved.fastq f.fastq r.fastq [compress] # # Deinterleaves a FASTQ file of paired reads into two FASTQ # files specified on the command line. Optionally GZip compresses the output # FASTQ files using pigz if the 3rd command line argument is the word "compress" # # Latest code: https://gist.github.com/3521724 # Also see my interleaving script: https://gist.github.com/4544979 # # Inspired by Torsten Seemann's blog post: # http://thegenomefactory.blogspot.com.au/2012/05/cool-use-of-unix-paste-with-ngs.html if [[ no$2 == no ]]; then echo "Usage: deinterleave_fastq.sh < interleaved.fastq f.fastq r.fastq [compress]" exit fi # Set up some defaults GZIP_OUTPUT=0 PIGZ_COMPRESSION_THREADS=10 # If the third argument is the word "compress" then we'll compress the output using pigz if [[ $3 == "compress" ]]; then source ~/conda_source GZIP_OUTPUT=1 fi if [[ ${GZIP_OUTPUT} == 0 ]]; then paste - - - - - - - - | tee >(cut -f 1-4 | tr "\t" "\n" > $1) | cut -f 5-8 | tr "\t" "\n" > $2 else paste - - - - - - - - | tee >(cut -f 1-4 | tr "\t" "\n" | pigz --best --processes ${PIGZ_COMPRESSION_THREADS} > $1) | cut -f 5-8 | tr "\t" "\n" | pigz --best --processes ${PIGZ_COMPRESSION_THREADS} > $2 fi
#!/usr/bin/env bash # ARG_OPTIONAL_BOOLEAN([ci],[],[Enable CI mode. Do not use tmux, but report exit code.]) # ARG_POSITIONAL_DOUBLEDASH([]) # ARG_LEFTOVERS([command]) # ARG_DEFAULTS_POS([]) # ARGBASH_GO() # needed because of Argbash --> m4_ignore([ ### START OF CODE GENERATED BY Argbash v2.10.0 one line above ### # Argbash is a bash code generator used to get arguments parsing right. # Argbash is FREE SOFTWARE, see https://argbash.io for more info die() { local _ret="${2:-1}" test "${_PRINT_HELP:-no}" = yes && print_help >&2 echo "$1" >&2 exit "${_ret}" } begins_with_short_option() { local first_option all_short_options='' first_option="${1:0:1}" test "$all_short_options" = "${all_short_options/$first_option/}" && return 1 || return 0 } # THE DEFAULTS INITIALIZATION - POSITIONALS _positionals=() _arg_leftovers=() # THE DEFAULTS INITIALIZATION - OPTIONALS _arg_ci="off" print_help() { printf 'Usage: %s [--(no-)ci] [--] ... \n' "$0" printf '\t%s\n' "... : command" printf '\t%s\n' "--ci, --no-ci: Enable CI mode. Do not use tmux, but report exit code. (off by default)" } parse_commandline() { _positionals_count=0 while test $# -gt 0 do _key="$1" if test "$_key" = '--' then shift test $# -gt 0 || break _positionals+=("$@") _positionals_count=$((_positionals_count + $#)) shift $(($# - 1)) _last_positional="$1" break fi case "$_key" in --no-ci|--ci) _arg_ci="on" test "${1:0:5}" = "--no-" && _arg_ci="off" ;; *) _last_positional="$1" _positionals+=("$_last_positional") _positionals_count=$((_positionals_count + 1)) ;; esac shift done } assign_positional_args() { local _positional_name _shift_for=$1 _positional_names="" _our_args=$((${#_positionals[@]} - 0)) for ((ii = 0; ii < _our_args; ii++)) do _positional_names="$_positional_names _arg_leftovers[$((ii + 0))]" done shift "$_shift_for" for _positional_name in ${_positional_names} do test $# -gt 0 || break eval "$_positional_name=\${1}" || die "Error during argument parsing, possibly an Argbash bug." 1 shift done } parse_commandline "$@" assign_positional_args 1 "${_positionals[@]}" # OTHER STUFF GENERATED BY Argbash ### END OF CODE GENERATED BY Argbash (sortof) ### ])
#include <iostream> #include <vector> using namespace std; // Function to check whether two numbers // can be added to get the target value bool isPossible(vector<int> arr1, vector<int> arr2, int target) { // Create two index variables to traverse // both arrays int i = 0, j = 0; while (i < arr1.size() && j < arr2.size()) { if (arr1[i] + arr2[j] == target) return true; else if (arr1[i] + arr2[j] > target) j++; else i++; } return false; } // Driver code int main() { vector<int> arr1 = {1, 3, 5}; vector<int> arr2 = {2, 6, 7}; int target = 4; if(isPossible(arr1, arr2, target)) cout << "True"; else cout << "False"; return 0; }
from imports import Resources, request from __main__ import app, db #resources-------------------------------------------------------# @app.route('/resources', methods=['POST']) def resources_post(): return Resources(db).post(request.json) @app.route('/resources', methods=['GET']) def resources_get(): return Resources(db).get_all() @app.route('/resources/<int:topic_id>', methods= ['GET']) def resource_get(topic_id): return Resources(db).get_one(topic_id) @app.route('/resources/<int:topic_id>', methods=['PUT']) def resource_put(topic_id): return Resources(db).put(topic_id) @app.route('/resources/<int:topic_id>', methods=['DELETE']) def resource_delete(topic_id): return Resources(db).delete(topic_id)
PlaylistType = GraphQL::ObjectType.define do name "Playlist" field :id, types.ID, "Playlist id" field :name, types.String, "Playlist name" field :image, types.String, "Playlist cover image" field :plays, types.Int, "Times the playlist has been played" field :year, types.Int, "Year the playlist was created" field :tracks, TrackType.to_list_type do description "Tracks related to the playlist" resolve -> (obj, args, ctx) { AssociationLoader.for(Track, :tracks).load(obj) } end end
#include "AgentServiceImp.h" #include "AgentServer.h" #include "PlayerBase.h" #include "WorkerOperateHelper.h" CAgentServiceImp::CAgentServiceImp( const std::string& strServerBind, const std::string& servantAddress, const std::string& serverName, uint16_t serverId, CreatePlayerMethod createPlayerMethod /*= NULL*/, ListInterestsMethod listProtoMethod /*= NULL*/, ListInterestsMethod listNotifMethod /*= NULL*/) :CWorkerServiceImp(strServerBind, servantAddress, serverName, serverId, createPlayerMethod, listProtoMethod, listNotifMethod) { } CAgentServiceImp::~CAgentServiceImp(void) { } void CAgentServiceImp::SendToClient(const ::node::DataPacket& request, ::rpcz::reply< ::node::DataPacket> response) { CAgentServer::PTR_T pAgentServer(CAgentServer::Pointer()); bool rt = pAgentServer->SendToClient(request.route(), request); ::node::DataPacket dataPacket; dataPacket.set_cmd(request.cmd()); dataPacket.set_result(rt); response.send(dataPacket); } void CAgentServiceImp::CloseClient(const ::node::DataPacket& request, ::rpcz::reply< ::node::DataPacket> response) { CAgentServer::PTR_T pAgentServer(CAgentServer::Pointer()); pAgentServer->CloseClient(request.route()); ::node::DataPacket dataPacket; dataPacket.set_cmd(request.cmd()); dataPacket.set_result(TRUE); response.send(dataPacket); } void CAgentServiceImp::SendToWorker(const ::node::DataPacket& request, ::rpcz::reply< ::node::DataPacket> response) { ::node::DataPacket dspResponse; CChannelManager::PTR_T pChlMgr(CChannelManager::Pointer()); util::CAutoPointer<CPlayerBase> pPlayer(pChlMgr->GetPlayer(request.route())); if(pPlayer.IsInvalid()) { SendWorkerNotification(request, dspResponse, pPlayer); } else { CScopedPlayerMutex scopedPlayerMutex(pPlayer); SendWorkerNotification(request, dspResponse, pPlayer); } dspResponse.set_cmd(request.cmd()); response.send(dspResponse); } void CAgentServiceImp::KickLogged(const ::node::DataPacket& request, ::rpcz::reply< ::node::DataPacket> response) { ::node::DataPacket dataPacket; dataPacket.set_cmd(request.cmd()); CAgentServer::PTR_T pAgentServer(CAgentServer::Pointer()); if(pAgentServer->KickLogged(request.route())) { dataPacket.set_result(TRUE); } else { dataPacket.set_result(FALSE); } response.send(dataPacket); }
package vehicle import ( "fmt" "strings" "time" "github.com/evcc-io/evcc/api" "github.com/evcc-io/evcc/util" "github.com/evcc-io/evcc/util/request" "github.com/evcc-io/evcc/vehicle/id" ) // https://github.com/TA2k/ioBroker.vw-connect // ID is an api.Vehicle implementation for ID cars type ID struct { *embed *id.Provider // provides the api implementations } func init() { registry.Add("id", NewIDFromConfig) } // NewIDFromConfig creates a new vehicle func NewIDFromConfig(other map[string]interface{}) (api.Vehicle, error) { cc := struct { embed `mapstructure:",squash"` User, Password, VIN string Cache time.Duration Timeout time.Duration }{ Cache: interval, Timeout: request.Timeout, } if err := util.DecodeOther(other, &cc); err != nil { return nil, err } v := &ID{ embed: &cc.embed, } log := util.NewLogger("id").Redact(cc.User, cc.Password, cc.VIN) ts := id.NewIdentity(log, cc.User, cc.Password) err := ts.Login() if err != nil { return v, fmt.Errorf("login failed: %w", err) } api := id.NewAPI(log, ts) api.Client.Timeout = cc.Timeout if cc.VIN == "" { cc.VIN, err = findVehicle(api.Vehicles()) if err == nil { log.DEBUG.Printf("found vehicle: %v", cc.VIN) } } v.Provider = id.NewProvider(api, strings.ToUpper(cc.VIN), cc.Cache) return v, err }
<reponame>khaled-11/Botai // Function to handle the Postbacks // const CryptoJS = require("crypto-js"), callSendAPI = require("../messenger/callSendAPI"), rp = require('request-promise'), getPages = require('../database/get_page'), witResolve = require("../wit/resolve"), updateSent = require("../database/update_sent_events"), updateComments = require('../database/update_comments'); module.exports = async (id, changes) => { pageData = await getPages(id); if (pageData.Item){ if (pageData.Item.bot_type.S === "api"){ if (changes && changes[0] && changes[0].value){ var bytes = CryptoJS.AES.decrypt(pageData.Item.page_access_token.S, process.env.KEY); var token = bytes.toString(CryptoJS.enc.Utf8); if (changes[0].value.verb === "add"){ if (changes[0].value.item === "comment"){ if (changes[0].value.message){ var nlpData = await witResolve(changes[0].value.message, pageData.Item.wit_key.S); if (nlpData){ var myNLP = JSON.stringify(nlpData); } else { var myNLP = JSON.stringify({}) } } else { var myNLP = JSON.stringify({}) } if (changes[0].value.from.id !== id){ var state; try { body = {"new_comment":"Your page received the following event.","pageID": `${id}`, "senderID": `${changes[0].value.from.id}`, "userName":`${changes[0].value.from.name}`, "timestamp": `${Math.floor(Date.now() / 1000)}`, "eventType": `comment`, "value": `${changes[0].value.message}`, "nlp": `${myNLP}`} var options = { method: 'post', uri: `${pageData.Item.post_link.S}`, body: JSON.stringify(body), headers: { 'Content-Type': 'application/json' }, }; state = await rp(options); if (state.includes("{{user_first_name}}")){ state = state.replace("{{user_first_name}}", `${changes[0].value.from.name}`) } state = JSON.parse(state); if (state && state.token === pageData.Item.post_secret.S){ response = state.response.response; if (state.response.persona_id !== "none"){ persona_id = state.response.persona_id } else { persona_id = null; } st = await callSendAPI(null, response, null, null, token, persona_id, changes[0].value.comment_id); if (!st.recipient_id && state.secondaryResponse){ st2 = await callSendAPI(null, state.secondaryResponse, null, null, token, null, changes[0].value.comment_id); if (!st2.recipient_id){ resStat = "failed" data = {'comment_id':changes[0].value.comment_id ,'message': changes[0].value.message, 'from_id': changes[0].value.from.id, 'from_name':changes[0].value.from.name, 'post_id':changes[0].value.post.id,'post_link':changes[0].value.post.permalink_url, 'reply_state':"none"} newIndex = pageData.Item.comments_count.N; newIndex++; await updateComments(id,"add",data, newIndex); } else { resStat = "replaced" data = {'comment_id':changes[0].value.comment_id ,'message': changes[0].value.message, 'from_id': changes[0].value.from.id, 'from_name':changes[0].value.from.name, 'post_id':changes[0].value.post.id,'post_link':changes[0].value.post.permalink_url, 'reply_state':"replied"} newIndex = pageData.Item.comments_count.N; newIndex++; await updateComments(id,"add",data, newIndex); var myArray = []; if (state.secondaryResponse.text && state.secondaryResponse.quick_replies){ my_response_type = `Quick replies(${response.quick_replies.length}).` } else if (state.secondaryResponse.text){ my_response_type = "Text." } myArray[0] = [`${my_response_type}`, `Default`, `${resStat}`, `Secondary Response`] var sentEventData = [`${pageData.Item.pageID.S}`, `${pageData.Item.sent_list.L.length}`,`${changes[0].value.from.id}`, `${changes[0].value.created_time}`, "Comment", myArray] await updateSent(sentEventData); } } else if (!st.recipient_id && !state.secondaryResponse){ resStat = "failed" data = {'comment_id':changes[0].value.comment_id ,'message': changes[0].value.message, 'from_id': changes[0].value.from.id, 'from_name':changes[0].value.from.name, 'post_id':changes[0].value.post.id,'post_link':changes[0].value.post.permalink_url, 'reply_state':"none"} newIndex = pageData.Item.comments_count.N; newIndex++; await updateComments(id,"add",data, newIndex); } else { resStat = "success" data = {'comment_id':changes[0].value.comment_id ,'message': changes[0].value.message, 'from_id': changes[0].value.from.id, 'from_name':changes[0].value.from.name, 'post_id':changes[0].value.post.id,'post_link':changes[0].value.post.permalink_url, 'reply_state':"replied"} newIndex = pageData.Item.comments_count.N; newIndex++; await updateComments(id,"add",data, newIndex); if (response.text && response.quick_replies){ my_response_type = `Quick replies(${response.quick_replies.length})` } else if (response.text){ my_response_type = "Text" } else if (response.attachment && response.attachment.type ==="audio"){ my_response_type = "Audio attachment" } else if (response.attachment && response.attachment.type ==="video"){ my_response_type = "Video attachment" } else if (response.attachment && response.attachment.type ==="image"){ my_response_type = "Image attachment" } else if (response.attachment && response.attachment.type ==="file"){ my_response_type = "File attachment" } else if (response.attachment && response.attachment.type ==="template" && response.attachment.payload.template_type === "generic" ){ my_response_type = "Generic template" } else if (response.attachment && response.attachment.type ==="template" && response.attachment.payload.template_type === "button" ){ my_response_type = "Button template" } else if (response.attachment && response.attachment.type ==="template" && response.attachment.payload.template_type === "media" ){ my_response_type = "Media template" } else if (response.attachment && response.attachment.type ==="template" && response.attachment.payload.template_type === "airline_boardingpass" ){ my_response_type = "Airline boardingpass template" } else if (response.attachment && response.attachment.type ==="template" && response.attachment.payload.template_type === "airline_checkin"){ my_response_type = "Airline check-in template" } else if (response.attachment && response.attachment.type ==="template" && response.attachment.payload.template_type === "airline_itinerary"){ my_response_type = "Airline itinerary template" } else if (response.attachment && response.attachment.type ==="template" && response.attachment.payload.template_type === "airline_update"){ my_response_type = "Airline update template" } else if (response.attachment && response.attachment.type ==="template" && response.attachment.payload.template_type === "receipt" ){ my_response_type = "Receipt template" } else if (response.attachment && response.attachment.type ==="template" && response.attachment.payload.template_type === "product" ){ my_response_type = "Product template" } var myArray = []; myArray[0] = [`${my_response_type}`, `${persona_id}`, `${resStat}`, `Custom Response`] var sentEventData = [`${pageData.Item.pageID.S}`, `${pageData.Item.sent_list.L.length}`,`${changes[0].value.from.id}`, `${changes[0].value.created_time}`, "Comment", myArray] await updateSent(sentEventData); } } else { data = {'comment_id':changes[0].value.comment_id ,'message': changes[0].value.message, 'from_id': changes[0].value.from.id, 'from_name':changes[0].value.from.name, 'post_id':changes[0].value.post.id,'post_link':changes[0].value.post.permalink_url, 'reply_state':"none"} newIndex = pageData.Item.comments_count.N; newIndex++; await updateComments(id,"add",data, newIndex); } } catch (e){ data = {'comment_id':changes[0].value.comment_id ,'message': changes[0].value.message, 'from_id': changes[0].value.from.id, 'from_name':changes[0].value.from.name, 'post_id':changes[0].value.post.id,'post_link':changes[0].value.post.permalink_url, 'reply_state':"none"} newIndex = pageData.Item.comments_count.N; newIndex++; await updateComments(id,"add",data, newIndex); } } else { data = {'comment_id':changes[0].value.comment_id ,'message': changes[0].value.message, 'from_id': changes[0].value.from.id, 'from_name':changes[0].value.from.name, 'post_id':changes[0].value.post.id,'post_link':changes[0].value.post.permalink_url, 'reply_state':"none"} newIndex = pageData.Item.comments_count.N; newIndex++; await updateComments(id,"add",data, newIndex); } } } else if (changes[0].value.verb === "remove"){ data = {'comment_id':changes[0].value.comment_id} if (pageData.Item.comments.M[`${changes[0].value.comment_id}`]){ newIndex = pageData.Item.comments_count.N newIndex-- } else { newIndex = pageData.Item.comments_count.N newIndex++ newIndex-- } await updateComments(id,"remove",data, newIndex) } } } else if (pageData.Item.bot_type.S === "hosted"){ var bytes = CryptoJS.AES.decrypt(pageData.Item.page_access_token.S, process.env.KEY); var token = bytes.toString(CryptoJS.enc.Utf8); if (changes && changes[0] && changes[0].value){ if (changes[0].value.verb === "add"){ if (changes[0].value.item === "comment"){ var reply_name; if (pageData.Item.auto_reply.S === "false"){ if (changes[0].value.message){ data = {'comment_id':changes[0].value.comment_id ,'message': changes[0].value.message, 'from_id': changes[0].value.from.id, 'from_name':changes[0].value.from.name, 'post_id':changes[0].value.post.id,'post_link':changes[0].value.post.permalink_url, 'reply_state':"none"} } else { data = {'comment_id':changes[0].value.comment_id ,'message': "NO TEXT FOUND", 'from_id': changes[0].value.from.id, 'from_name':changes[0].value.from.name, 'post_id':changes[0].value.post.id,'post_link':changes[0].value.post.permalink_url, 'reply_state':"none"} } newIndex = pageData.Item.comments_count.N; newIndex++; await updateComments(id,"add",data, newIndex); } else if (pageData.Item.auto_reply.S === "auto_default"){ reply_name = pageData.Item.responses.M[`default_reply`].L[1].S if (pageData.Item.secondary_responses.M[`default_reply`]){ secondaryResponse = pageData.Item.secondary_responses.M[`default_reply`].L[0].S; if (secondaryResponse.includes("{{user_first_name}}")){ secondaryResponse = secondaryResponse.replace("{{user_first_name}}", `${changes[0].value.from.name}`) } } responses = pageData.Item.responses.M[`default_reply`].L[0].S; if (responses.includes("{{user_first_name}}")){ responses = responses.replace("{{user_first_name}}", `${changes[0].value.from.name}`) } if (changes[0].value.message){ data = {'comment_id':changes[0].value.comment_id ,'message': changes[0].value.message, 'from_id': changes[0].value.from.id, 'from_name':changes[0].value.from.name, 'post_id':changes[0].value.post.id,'post_link':changes[0].value.post.permalink_url, 'reply_state':"replied_+_Default_+_Default"} } else { data = {'comment_id':changes[0].value.comment_id ,'message': "NO TEXT FOUND", 'from_id': changes[0].value.from.id, 'from_name':changes[0].value.from.name, 'post_id':changes[0].value.post.id,'post_link':changes[0].value.post.permalink_url, 'reply_state':"replied_+_Default_+_Default"} } newIndex = pageData.Item.comments_count.N; newIndex++; await updateComments(id,"add",data, newIndex); } else { if (changes[0].value.message){ var myNLP = await witResolve(changes[0].value.message, pageData.Item.wit_key.S); if (myNLP && myNLP.intents[0] && pageData.Item.responses.M[`${myNLP.intents[0].name}`]){ reply_name = pageData.Item.responses.M[`${myNLP.intents[0].name}`].L[1].S var responses; var secondaryResponse; if (pageData.Item.secondary_responses.M[`${myNLP.intents[0].name}`]){ secondaryResponse = pageData.Item.secondary_responses.M[`${myNLP.intents[0].name}`].L[0].S; if (secondaryResponse.includes("{{user_first_name}}")){ secondaryResponse = secondaryResponse.replace("{{user_first_name}}", `${changes[0].value.from.name}`) } } responses = pageData.Item.responses.M[`${myNLP.intents[0].name}`].L[0].S if (responses.includes("{{user_first_name}}")){ responses = responses.replace("{{user_first_name}}", `${changes[0].value.from.name}`) } data = {'comment_id':changes[0].value.comment_id ,'message': changes[0].value.message, 'from_id': changes[0].value.from.id, 'from_name':changes[0].value.from.name, 'post_id':changes[0].value.post.id,'post_link':changes[0].value.post.permalink_url, 'reply_state':`replied_+_${myNLP.intents[0].name}_+_${pageData.Item.responses.M[`${myNLP.intents[0].name}`].L[1].S}`} newIndex = pageData.Item.comments_count.N; newIndex++; await updateComments(id,"add",data, newIndex); } else { reply_name = pageData.Item.responses.M[`default_reply`].L[1].S if (pageData.Item.secondary_responses.M[`default_reply`]){ secondaryResponse = pageData.Item.secondary_responses.M[`default_reply`].L[0].S; if (secondaryResponse.includes("{{user_first_name}}")){ secondaryResponse = secondaryResponse.replace("{{user_first_name}}", `${changes[0].value.from.name}`) } } responses = pageData.Item.responses.M[`default_reply`].L[0].S; if (responses.includes("{{user_first_name}}")){ responses = responses.replace("{{user_first_name}}", `${changes[0].value.from.name}`) } data = {'comment_id':changes[0].value.comment_id ,'message': changes[0].value.message, 'from_id': changes[0].value.from.id, 'from_name':changes[0].value.from.name, 'post_id':changes[0].value.post.id,'post_link':changes[0].value.post.permalink_url, 'reply_state':`replied_+_Default_+_Default`} newIndex = pageData.Item.comments_count.N; newIndex++; await updateComments(id,"add",data, newIndex); } } else { reply_name = pageData.Item.responses.M[`default_reply`].L[1].S if (pageData.Item.secondary_responses.M[`default_reply`]){ secondaryResponse = pageData.Item.secondary_responses.M[`default_reply`].L[0].S; if (secondaryResponse.includes("{{user_first_name}}")){ secondaryResponse = secondaryResponse.replace("{{user_first_name}}", `${changes[0].value.from.name}`) } } responses = pageData.Item.responses.M[`default_reply`].L[0].S; if (responses.includes("{{user_first_name}}")){ responses = responses.replace("{{user_first_name}}", `${changes[0].value.from.name}`) } data = {'comment_id':changes[0].value.comment_id ,'message': changes[0].value.message, 'from_id': changes[0].value.from.id, 'from_name':changes[0].value.from.name, 'post_id':changes[0].value.post.id,'post_link':changes[0].value.post.permalink_url, 'reply_state':`replied_+_Default_+_Default`} newIndex = pageData.Item.comments_count.N; newIndex++; await updateComments(id,"add",data, newIndex); } } if (pageData.Item.auto_reply.S !== "false" && changes[0].value.from.id !== pageData.Item.pageID.S){ var secondaryFinalData; if (secondaryResponse){ secondaryFinalData = JSON.parse(secondaryResponse) } finalData = JSON.parse(responses) if (finalData.persona_id){ persona_id = finalData.persona_id; } else { persona_id = null; } var my_response_type = "" var resStat // Loop to send the main response for ( i = 0 ; i < finalData.response.length ; i++){ response = finalData.response[i].response; if (response.text && response.quick_replies){ my_response_type += `Quick replies(${response.quick_replies.length}), ` } else if (response.text){ my_response_type += "Text, " } else if (response.attachment && response.attachment.type ==="audio"){ my_response_type += "Audio attachment, " } else if (response.attachment && response.attachment.type ==="video"){ my_response_type += "Video attachment, " } else if (response.attachment && response.attachment.type ==="image"){ my_response_type += "Image attachment, " } else if (response.attachment && response.attachment.type ==="file"){ my_response_type += "File attachment, " } else if (response.attachment && response.attachment.type ==="template" && response.attachment.payload.template_type === "generic" ){ my_response_type += "Generic template, " } else if (response.attachment && response.attachment.type ==="template" && response.attachment.payload.template_type === "button" ){ my_response_type += "Button template, " } else if (response.attachment && response.attachment.type ==="template" && response.attachment.payload.template_type === "media" ){ my_response_type += "Media template, " } else if (response.attachment && response.attachment.type ==="template" && response.attachment.payload.template_type === "airline_boardingpass" ){ my_response_type += "Airline boardingpass template, " } else if (response.attachment && response.attachment.type ==="template" && response.attachment.payload.template_type === "airline_checkin"){ my_response_type += "Airline check-in template, " } else if (response.attachment && response.attachment.type ==="template" && response.attachment.payload.template_type === "airline_itinerary"){ my_response_type += "Airline itinerary template, " } else if (response.attachment && response.attachment.type ==="template" && response.attachment.payload.template_type === "airline_update"){ my_response_type += "Airline update template, " } else if (response.attachment && response.attachment.type ==="template" && response.attachment.payload.template_type === "receipt" ){ my_response_type += "Receipt template, " } else if (response.attachment && response.attachment.type ==="template" && response.attachment.payload.template_type === "product" ){ my_response_type += "Product template, " } st = await callSendAPI(null, response, null, null, token, persona_id, changes[0].value.comment_id); resStat = "success" if (!st.recipient_id && secondaryFinalData){ resStat = "replaced"; response = secondaryFinalData.response[i].response; st2 = await callSendAPI(null, response, null, null, token, null, changes[0].value.comment_id); if (!st2.recipient_id){ resStat = "failed" } } } var myArray = []; myArray[0] = [`${my_response_type.substring(0,my_response_type.length-2)}`, `${persona_id}`, `${resStat}`, `${reply_name}`] var sentRventData = [`${pageData.Item.pageID.S}`, `${pageData.Item.sent_list.L.length}`,`${changes[0].value.from.id}`, `${changes[0].value.created_time}`, "Comment", myArray] await updateSent(sentRventData); } } } else if (changes[0].value.verb === "remove"){ data = {'comment_id':changes[0].value.comment_id} if (pageData.Item.comments.M[`${changes[0].value.comment_id}`]){ newIndex = pageData.Item.comments_count.N newIndex-- } else { newIndex = pageData.Item.comments_count.N newIndex++ newIndex-- } await updateComments(id,"remove",data, newIndex) } } } } }
// // Pod.h // Pod // // Created by 张星宇 on 2017/1/8. // Copyright © 2017年 bestswifter. All rights reserved. // #import <Foundation/Foundation.h> @interface Pod : NSObject @end
package weixin.popular.bean.card.update; import java.util.HashMap; import java.util.Map; import com.alibaba.fastjson.annotation.JSONField; /** * 会员信息更新 * * @author zhongmin * */ public class UpdateMember extends AbstractUpdate { @JSONField(name = "code") private String code; @JSONField(name = "background_pic_url") private String backgroundPicUrl; @JSONField(name = "record_bonus") private String recordBonus; @JSONField(name = "bonus") private Integer bonus; @JSONField(name = "add_bonus") private Integer addBonus; @JSONField(name = "balance") private Integer balance; @JSONField(name = "add_balance") private Integer addBalance; @JSONField(name = "record_balance") private String recordBalance; @JSONField(name = "custom_field_value1") private String customFieldValue1; @JSONField(name = "custom_field_value2") private String customFieldValue2; @JSONField(name = "notify_optional") private Map<String,Object> notifyOptional = new HashMap<String,Object>(); public String getCode() { return code; } public UpdateMember setCode(String code) { this.code = code; return this; } public String getBackgroundPicUrl() { return backgroundPicUrl; } public UpdateMember setBackgroundPicUrl(String backgroundPicUrl) { this.backgroundPicUrl = backgroundPicUrl; return this; } public String getRecordBonus() { return recordBonus; } public UpdateMember setRecordBonus(String recordBonus) { this.recordBonus = recordBonus; return this; } public Integer getBonus() { return bonus; } public UpdateMember setBonus(Integer bonus) { this.bonus = bonus; return this; } public Integer getAddBonus() { return addBonus; } public UpdateMember setAddBonus(Integer addBonus) { this.addBonus = addBonus; return this; } public Integer getBalance() { return balance; } public UpdateMember setBalance(Integer balance) { this.balance = balance; return this; } public Integer getAddBalance() { return addBalance; } public UpdateMember setAddBalance(Integer addBalance) { this.addBalance = addBalance; return this; } public String getRecordBalance() { return recordBalance; } public UpdateMember setRecordBalance(String recordBalance) { this.recordBalance = recordBalance; return this; } public String getCustomFieldValue1() { return customFieldValue1; } public UpdateMember setCustomFieldValue1(String customFieldValue1) { this.customFieldValue1 = customFieldValue1; return this; } public String getCustomFieldValue2() { return customFieldValue2; } public UpdateMember setCustomFieldValue2(String customFieldValue2) { this.customFieldValue2 = customFieldValue2; return this; } public Map<String, Object> getNotifyOptional() { return notifyOptional; } public UpdateMember setNotifyOptional(Map<String, Object> notifyOptional) { this.notifyOptional = notifyOptional; return this; } public UpdateMember setIsNotifyBonus(boolean isNotifyBonus) { this.notifyOptional.put("is_notify_bonus", isNotifyBonus); return this; } public UpdateMember setIsNotifyBalance(boolean isNotifyBalance) { this.notifyOptional.put("is_notify_balance", isNotifyBalance); return this; } }
package builder import ( "context" "fmt" "io/ioutil" "os" "os/exec" "path/filepath" "github.com/hashicorp/packer/helper/multistep" "github.com/hashicorp/packer/packer" "github.com/mholt/archiver" ) // StepExtractAndCopyImage creates filesystem on already partitioned image type StepExtractAndCopyImage struct { FromKey string } // Run the step func (s *StepExtractAndCopyImage) Run(ctx context.Context, state multistep.StateBag) multistep.StepAction { ui := state.Get("ui").(packer.Ui) config := state.Get("config").(*Config) archivePath := state.Get(s.FromKey).(string) var err error var out []byte // step 1: create temporary dir dir, err := ioutil.TempDir("", "image") if err != nil { ui.Error(fmt.Sprintf("error while creating temporary directory %v", err)) return multistep.ActionHalt } defer os.RemoveAll(dir) // step 2: copy downloaded archive to temporary dir dst := filepath.Join(dir, filepath.Base(archivePath)) out, err = exec.Command("cp", "-rf", archivePath, dst).CombinedOutput() if err != nil { ui.Error(fmt.Sprintf("error while copying file %v: %s", err, out)) return multistep.ActionHalt } // skip unarchive logic if provided raw image (steps: 3&4) if(config.RemoteFileConfig.TargetExtension == "img" || config.RemoteFileConfig.TargetExtension == "iso") { ui.Message("using raw image") } else { // step 3: unarchive file within temporary dir ui.Message(fmt.Sprintf("unpacking %s to %s", archivePath, config.ImageConfig.ImagePath)) if len(config.RemoteFileConfig.FileUnarchiveCmd) != 0 { cmd := make([]string, len(config.RemoteFileConfig.FileUnarchiveCmd)) vars := map[string]string{ "$ARCHIVE_PATH": dst, "$TMP_DIR": dir, } for i, elem := range config.RemoteFileConfig.FileUnarchiveCmd { if _, ok := vars[elem]; ok { cmd[i] = vars[elem] } else { cmd[i] = elem } } ui.Message(fmt.Sprintf("unpacking with custom comand: %s", cmd)) out, err = exec.Command(cmd[0], cmd[1:]...).CombinedOutput() } else { out, err = []byte("N/A"), archiver.Unarchive(archivePath, dir) } if err != nil { ui.Error(fmt.Sprintf("error while unpacking %v: %s", err, out)) return multistep.ActionHalt } // step 4: if previously copied archive still exists, lets remove it if _, err := os.Stat(dst); err == nil { os.RemoveAll(dst) } } // step 5: we expect only one file in the directory files, err := ioutil.ReadDir(dir) if err != nil { ui.Error(fmt.Sprintf("error while reading temporary directory %v", err)) return multistep.ActionHalt } if len(files) != 1 { ui.Error(fmt.Sprintf("only one file is expected to be present after unarchiving, found: %d", len(files))) return multistep.ActionHalt } // step 6: move single file to destination (as image) out, err = exec.Command("mv", filepath.Join(dir, files[0].Name()), config.ImageConfig.ImagePath).CombinedOutput() if err != nil { ui.Error(fmt.Sprintf("error while copying file %v: %s", err, out)) return multistep.ActionHalt } return multistep.ActionContinue } // Cleanup after step execution func (s *StepExtractAndCopyImage) Cleanup(state multistep.StateBag) {}
package algorithm_400 import "strconv" func compress(chars []byte) int { if len(chars) < 2 { return len(chars) } var idx, i, j = 0, 0, 1 for j <= len(chars) { if j < len(chars) && chars[i] == chars[j] { j++ continue } if chars[idx] != chars[i] { chars[idx] = chars[i] } idx++ if j-i > 1 { numStr := []byte(strconv.Itoa(j - i)) for n := 0; n < len(numStr); n++ { chars[idx] = numStr[n] idx++ } } i = j j++ } return idx }
import React, { Component } from 'react'; import { Button, Modal, ModalHeader, ModalBody, ModalFooter } from 'reactstrap' import './ROM.css'; export class ROMCore extends Component { constructor(props) { super(props); this.handlesetupsubmit = this. handlesetupsubmit.bind(this); this.state = { setupdrop: false, } } static defaultProps = { name: "ROM", ClassName: "ROM", size: [3, 3, 3, 3] } handleClicksetupdrop = () => { this.setState({ setupdrop: !this.state.setupdrop, }) } handlesetupsubmit = (event) => { let reader = new FileReader(); reader.readAsText(event.target.files[0]); reader.onload = () => { let origin_data = reader.result; let result_data = handletxt(origin_data); this.props.setcuroutput(result_data); } } display = (result_data) => { let len_space = " "; return result_data.slice(0,4) + len_space + result_data.slice(4,8) + len_space + result_data.slice(8,12) + len_space + result_data.slice(12,16); } render() { return ( <div> <Button id="setupbutton" onClick={this.handleClicksetupdrop}>setup</Button> <Modal isOpen={this.state.setupdrop}> <ModalHeader>setup</ModalHeader> <ModalBody> <form> <input type="file" accept="text/plain" onChange={this.handlesetupsubmit} /> <br /><br /> <Button disabled id="display_output">{this.display(this.props.curoutput.slice(0,16))}</Button> <br /><br /> <Button disabled id="display_output">{this.display(this.props.curoutput.slice(16,32))}</Button> </form> </ModalBody> <ModalFooter> <Button color="info" onClick={this.handleClicksetupdrop}>确认</Button> </ModalFooter> </Modal> </div> ); } } function handletxt(origin_data) { let temp_data = origin_data.replace(/\s/g, ""); if (temp_data[0] === "'") { if (!((temp_data[1] === "b") || (temp_data[1] === "d") || (temp_data[1] === "h"))) { return "请输入正确的格式:第二位用 b/d/h 表示进制"; } else { let temp_num_data = temp_data.slice(2); if (!(/^\d+$/.test(temp_num_data))) { return "请输入正确的格式:除前两位外只允许输入数字"; } else { switch (temp_data[1]) { case "b": { temp_num_data = temp_num_data; break; } case "d": { temp_num_data = parseInt(temp_num_data).toString(2); break; } case "h": { temp_num_data = hex_to_bin(temp_num_data); break; } } let data_length = temp_num_data.length; if (data_length > 32) { return "请输入正确的格式:数据不超过32位2进制数"; } else { return "0".repeat(32 - data_length) + temp_num_data; } } } } else { return "请输入正确的格式:以 ' 开头"; } } function hex_to_bin(str) { let hex_array = [{ key: 0, val: "0000" }, { key: 1, val: "0001" }, { key: 2, val: "0010" }, { key: 3, val: "0011" }, { key: 4, val: "0100" }, { key: 5, val: "0101" }, { key: 6, val: "0110" }, { key: 7, val: "0111" }, { key: 8, val: "1000" }, { key: 9, val: "1001" }, { key: 'a', val: "1010" }, { key: 'b', val: "1011" }, { key: 'c', val: "1100" }, { key: 'd', val: "1101" }, { key: 'e', val: "1110" }, { key: 'f', val: "1111" }] let value = "" for (let i = 0; i < str.length; i++) { for (let j = 0; j < hex_array.length; j++) { if (str.charAt(i).toLowerCase() == hex_array[j].key) { value = value.concat(hex_array[j].val) break } } } console.log(value) return value }
<filename>app/src/main/java/com/piercelbrooks/mlkit/common/BitmapUtils.java package com.piercelbrooks.mlkit.common; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.graphics.ImageFormat; import android.graphics.Matrix; import android.graphics.Rect; import android.graphics.YuvImage; import android.hardware.Camera.CameraInfo; import androidx.annotation.Nullable; import android.util.Log; import com.google.firebase.ml.vision.common.FirebaseVisionImageMetadata; import java.io.ByteArrayOutputStream; import java.nio.ByteBuffer; /** Utils functions for bitmap conversions. */ public class BitmapUtils { // Convert NV21 format byte buffer to bitmap. @Nullable public static Bitmap getBitmap(ByteBuffer data, FrameMetadata metadata) { byte[] bytes = data.array(); if (bytes.length < 4) { return null; } int alpha = 255; int[] colors = new int[bytes.length/4]; for (int i = 0; i < bytes.length - 3; i += 4) { colors[i / 4] = (alpha << 24) | (bytes[i] << 16) | (bytes[i + 1] << 8) | bytes[i + 2]; } return Bitmap.createBitmap(colors, metadata.getWidth(), metadata.getHeight(), Bitmap.Config.ARGB_8888); } // Rotates a bitmap if it is converted from a bytebuffer. private static Bitmap rotateBitmap(Bitmap bitmap, int rotation, int facing) { Matrix matrix = new Matrix(); int rotationDegree = 0; switch (rotation) { case FirebaseVisionImageMetadata.ROTATION_90: rotationDegree = 90; break; case FirebaseVisionImageMetadata.ROTATION_180: rotationDegree = 180; break; case FirebaseVisionImageMetadata.ROTATION_270: rotationDegree = 270; break; default: break; } // Rotate the image back to straight.} matrix.postRotate(rotationDegree); if (facing == CameraInfo.CAMERA_FACING_BACK) { return Bitmap.createBitmap(bitmap, 0, 0, bitmap.getWidth(), bitmap.getHeight(), matrix, true); } else { // Mirror the image along X axis for front-facing camera image. matrix.postScale(-1.0f, 1.0f); return Bitmap.createBitmap(bitmap, 0, 0, bitmap.getWidth(), bitmap.getHeight(), matrix, true); } } }
// Copyright 2016 The etcd Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package concurrency import ( "context" "math" v3 "github.com/nxgtw/dp-etcd/clientv3" ) // STM is an interface for software transactional memory. type STM interface { // Get returns the value for a key and inserts the key in the txn's read set. // If Get fails, it aborts the transaction with an error, never returning. Get(key ...string) string // Put adds a value for a key to the write set. Put(key, val string, opts ...v3.OpOption) // Rev returns the revision of a key in the read set. Rev(key string) int64 // Del deletes a key. Del(key string) // commit attempts to apply the txn's changes to the server. commit() *v3.TxnResponse reset() } // Isolation is an enumeration of transactional isolation levels which // describes how transactions should interfere and conflict. type Isolation int const ( // SerializableSnapshot provides serializable isolation and also checks // for write conflicts. SerializableSnapshot Isolation = iota // Serializable reads within the same transaction attempt return data // from the at the revision of the first read. Serializable // RepeatableReads reads within the same transaction attempt always // return the same data. RepeatableReads // ReadCommitted reads keys from any committed revision. ReadCommitted ) // stmError safely passes STM errors through panic to the STM error channel. type stmError struct{ err error } type stmOptions struct { iso Isolation ctx context.Context prefetch []string } type stmOption func(*stmOptions) // WithIsolation specifies the transaction isolation level. func WithIsolation(lvl Isolation) stmOption { return func(so *stmOptions) { so.iso = lvl } } // WithAbortContext specifies the context for permanently aborting the transaction. func WithAbortContext(ctx context.Context) stmOption { return func(so *stmOptions) { so.ctx = ctx } } // WithPrefetch is a hint to prefetch a list of keys before trying to apply. // If an STM transaction will unconditionally fetch a set of keys, prefetching // those keys will save the round-trip cost from requesting each key one by one // with Get(). func WithPrefetch(keys ...string) stmOption { return func(so *stmOptions) { so.prefetch = append(so.prefetch, keys...) } } // NewSTM initiates a new STM instance, using serializable snapshot isolation by default. func NewSTM(c *v3.Client, apply func(STM) error, so ...stmOption) (*v3.TxnResponse, error) { opts := &stmOptions{ctx: c.Ctx()} for _, f := range so { f(opts) } if len(opts.prefetch) != 0 { f := apply apply = func(s STM) error { s.Get(opts.prefetch...) return f(s) } } return runSTM(mkSTM(c, opts), apply) } func mkSTM(c *v3.Client, opts *stmOptions) STM { switch opts.iso { case SerializableSnapshot: s := &stmSerializable{ stm: stm{client: c, ctx: opts.ctx}, prefetch: make(map[string]*v3.GetResponse), } s.conflicts = func() []v3.Cmp { return append(s.rset.cmps(), s.wset.cmps(s.rset.first()+1)...) } return s case Serializable: s := &stmSerializable{ stm: stm{client: c, ctx: opts.ctx}, prefetch: make(map[string]*v3.GetResponse), } s.conflicts = func() []v3.Cmp { return s.rset.cmps() } return s case RepeatableReads: s := &stm{client: c, ctx: opts.ctx, getOpts: []v3.OpOption{v3.WithSerializable()}} s.conflicts = func() []v3.Cmp { return s.rset.cmps() } return s case ReadCommitted: s := &stm{client: c, ctx: opts.ctx, getOpts: []v3.OpOption{v3.WithSerializable()}} s.conflicts = func() []v3.Cmp { return nil } return s default: panic("unsupported stm") } } type stmResponse struct { resp *v3.TxnResponse err error } func runSTM(s STM, apply func(STM) error) (*v3.TxnResponse, error) { outc := make(chan stmResponse, 1) go func() { defer func() { if r := recover(); r != nil { e, ok := r.(stmError) if !ok { // client apply panicked panic(r) } outc <- stmResponse{nil, e.err} } }() var out stmResponse for { s.reset() if out.err = apply(s); out.err != nil { break } if out.resp = s.commit(); out.resp != nil { break } } outc <- out }() r := <-outc return r.resp, r.err } // stm implements repeatable-read software transactional memory over etcd type stm struct { client *v3.Client ctx context.Context // rset holds read key values and revisions rset readSet // wset holds overwritten keys and their values wset writeSet // getOpts are the opts used for gets getOpts []v3.OpOption // conflicts computes the current conflicts on the txn conflicts func() []v3.Cmp } type stmPut struct { val string op v3.Op } type readSet map[string]*v3.GetResponse func (rs readSet) add(keys []string, txnresp *v3.TxnResponse) { for i, resp := range txnresp.Responses { rs[keys[i]] = (*v3.GetResponse)(resp.GetResponseRange()) } } // first returns the store revision from the first fetch func (rs readSet) first() int64 { ret := int64(math.MaxInt64 - 1) for _, resp := range rs { if rev := resp.Header.Revision; rev < ret { ret = rev } } return ret } // cmps guards the txn from updates to read set func (rs readSet) cmps() []v3.Cmp { cmps := make([]v3.Cmp, 0, len(rs)) for k, rk := range rs { cmps = append(cmps, isKeyCurrent(k, rk)) } return cmps } type writeSet map[string]stmPut func (ws writeSet) get(keys ...string) *stmPut { for _, key := range keys { if wv, ok := ws[key]; ok { return &wv } } return nil } // cmps returns a cmp list testing no writes have happened past rev func (ws writeSet) cmps(rev int64) []v3.Cmp { cmps := make([]v3.Cmp, 0, len(ws)) for key := range ws { cmps = append(cmps, v3.Compare(v3.ModRevision(key), "<", rev)) } return cmps } // puts is the list of ops for all pending writes func (ws writeSet) puts() []v3.Op { puts := make([]v3.Op, 0, len(ws)) for _, v := range ws { puts = append(puts, v.op) } return puts } func (s *stm) Get(keys ...string) string { if wv := s.wset.get(keys...); wv != nil { return wv.val } return respToValue(s.fetch(keys...)) } func (s *stm) Put(key, val string, opts ...v3.OpOption) { s.wset[key] = stmPut{val, v3.OpPut(key, val, opts...)} } func (s *stm) Del(key string) { s.wset[key] = stmPut{"", v3.OpDelete(key)} } func (s *stm) Rev(key string) int64 { if resp := s.fetch(key); resp != nil && len(resp.Kvs) != 0 { return resp.Kvs[0].ModRevision } return 0 } func (s *stm) commit() *v3.TxnResponse { txnresp, err := s.client.Txn(s.ctx).If(s.conflicts()...).Then(s.wset.puts()...).Commit() if err != nil { panic(stmError{err}) } if txnresp.Succeeded { return txnresp } return nil } func (s *stm) fetch(keys ...string) *v3.GetResponse { if len(keys) == 0 { return nil } ops := make([]v3.Op, len(keys)) for i, key := range keys { if resp, ok := s.rset[key]; ok { return resp } ops[i] = v3.OpGet(key, s.getOpts...) } txnresp, err := s.client.Txn(s.ctx).Then(ops...).Commit() if err != nil { panic(stmError{err}) } s.rset.add(keys, txnresp) return (*v3.GetResponse)(txnresp.Responses[0].GetResponseRange()) } func (s *stm) reset() { s.rset = make(map[string]*v3.GetResponse) s.wset = make(map[string]stmPut) } type stmSerializable struct { stm prefetch map[string]*v3.GetResponse } func (s *stmSerializable) Get(keys ...string) string { if wv := s.wset.get(keys...); wv != nil { return wv.val } firstRead := len(s.rset) == 0 for _, key := range keys { if resp, ok := s.prefetch[key]; ok { delete(s.prefetch, key) s.rset[key] = resp } } resp := s.stm.fetch(keys...) if firstRead { // txn's base revision is defined by the first read s.getOpts = []v3.OpOption{ v3.WithRev(resp.Header.Revision), v3.WithSerializable(), } } return respToValue(resp) } func (s *stmSerializable) Rev(key string) int64 { s.Get(key) return s.stm.Rev(key) } func (s *stmSerializable) gets() ([]string, []v3.Op) { keys := make([]string, 0, len(s.rset)) ops := make([]v3.Op, 0, len(s.rset)) for k := range s.rset { keys = append(keys, k) ops = append(ops, v3.OpGet(k)) } return keys, ops } func (s *stmSerializable) commit() *v3.TxnResponse { keys, getops := s.gets() txn := s.client.Txn(s.ctx).If(s.conflicts()...).Then(s.wset.puts()...) // use Else to prefetch keys in case of conflict to save a round trip txnresp, err := txn.Else(getops...).Commit() if err != nil { panic(stmError{err}) } if txnresp.Succeeded { return txnresp } // load prefetch with Else data s.rset.add(keys, txnresp) s.prefetch = s.rset s.getOpts = nil return nil } func isKeyCurrent(k string, r *v3.GetResponse) v3.Cmp { if len(r.Kvs) != 0 { return v3.Compare(v3.ModRevision(k), "=", r.Kvs[0].ModRevision) } return v3.Compare(v3.ModRevision(k), "=", 0) } func respToValue(resp *v3.GetResponse) string { if resp == nil || len(resp.Kvs) == 0 { return "" } return string(resp.Kvs[0].Value) } // NewSTMRepeatable is deprecated. func NewSTMRepeatable(ctx context.Context, c *v3.Client, apply func(STM) error) (*v3.TxnResponse, error) { return NewSTM(c, apply, WithAbortContext(ctx), WithIsolation(RepeatableReads)) } // NewSTMSerializable is deprecated. func NewSTMSerializable(ctx context.Context, c *v3.Client, apply func(STM) error) (*v3.TxnResponse, error) { return NewSTM(c, apply, WithAbortContext(ctx), WithIsolation(Serializable)) } // NewSTMReadCommitted is deprecated. func NewSTMReadCommitted(ctx context.Context, c *v3.Client, apply func(STM) error) (*v3.TxnResponse, error) { return NewSTM(c, apply, WithAbortContext(ctx), WithIsolation(ReadCommitted)) }
package e100920.Server; import java.io.PrintWriter; import java.util.ArrayList; class Sender implements Runnable { // si occupa solamente di inviare al client le coppie di numeri private final ArrayList<Integer> numberList = new ArrayList<>(); private final PrintWriter output; public boolean interrupted = false; public Sender(PrintWriter output, ClientHandler cs) { this.output = output; for (int i=1; i<=90; i++) { numberList.add(i); } } @Override public void run() { while (numberList.size()>60 && !interrupted) { int randomNumber = (int) (Math.random()*numberList.size()); output.println(numberList.get(randomNumber)); output.flush(); System.out.println("Sent: " + numberList.get(randomNumber)); numberList.remove(randomNumber); // l'array numberList viene consumato ad ogni iterazione try { Thread.sleep(500); } catch (InterruptedException e) { e.printStackTrace(); } } if (interrupted) { output.println("+"); output.flush(); System.out.println("Forced termination sent"); } else { output.println("+"); output.flush(); System.out.println("Ending sent"); } } }
from abc import ABC, abstractmethod from typing import List from src.domain.common.event.event import Event class EventPublisher(ABC): @abstractmethod def publish(self, events: List[Event]) -> None: pass
; define(function (require) { require('../services/queryPluginsManager'); require('../services/backendService'); var backendModel = require('../models/backend'); require('../ngModule').controller('BackendListController', function ($scope, backendService, queryPluginsManager, $modal) { $scope.list = []; $scope.createNew = function(){ var newBackend = backendModel.factory({ endPoint:'http://localhost:9090/query' }); $scope.editInDialog(newBackend); }; $scope.remove = function(backend){ var decision = confirm('Are you sure you want to remove the backend ' + backend.name + '?'); if(decision){ backendService.removeById(backend.id); } }; $scope.editInDialog = function (backend) { $modal.open({ templateUrl: '/app/views/editBackend.html', controller: function ($scope, $modalInstance, backend, plugins) { $scope.backend = backend; $scope.plugins = plugins; if(!backend.language && plugins.length > 0){ backend.language = plugins[0].queryLanguage; } $scope.close = function () { $modalInstance.close(); }; $scope.save = function () { backendService.saveOne($scope.backend); $modalInstance.close(); }; }, size: '', resolve: { plugins: function(){ var plugins = queryPluginsManager.getAll().filter(function(p){ return p.needBackend === true; }); return plugins; }, backend: function () { return backend; } } }); }; backendService.getAll().then(function (backend) { $scope.list = backend; }); }); });
<reponame>rasenplanscher/eslint-config-rp<filename>src/rules-configurations/eslint/semi.d.ts import { RuleConfiguration } from '../../../support/Rule' type Options = (("never") | { beforeStatementContinuationChars?: "always" | "any" | "never" })[] | (("always") | { omitLastInOneLineBlock?: boolean })[] type Configuration = RuleConfiguration<'semi', 'eslint', Options> export default Configuration
/* * $Id$ * * Copyright (c) 2006 */ package com.horowitz.mickey; import java.io.Serializable; import org.apache.commons.lang.builder.CompareToBuilder; import org.apache.commons.lang.builder.EqualsBuilder; import org.apache.commons.lang.builder.HashCodeBuilder; /** * * @author zhristov */ public class Pixel implements Comparable<Pixel>, Cloneable, Serializable { private static final long serialVersionUID = -5887402026506046524L; public int weight = 0; public int x; public int y; public Pixel(Pixel p) { this(p.x, p.y); } public Pixel(int x, int y) { super(); this.x = x; this.y = y; this.weight = 0; } public Pixel(int x, int y, int weight) { super(); this.x = x; this.y = y; this.weight = weight; } public void setX(int x) { this.x = x; } public int getX() { return x; } public void setY(int y) { this.y = y; } public int getY() { return y; } @Override public int compareTo(Pixel o) { return new CompareToBuilder().append(weight, o.weight).append(y, o.y).append(x, o.x).toComparison(); } public boolean equals(final Object other) { if (this == other) { return true; } if (!(other instanceof Pixel)) { return false; } Pixel castOther = (Pixel) other; return new EqualsBuilder().append(weight, castOther.weight).append(x, castOther.x).append(y, castOther.y) .isEquals(); } public String toString() { return "[" + x + "," + y + ":" + weight + "]"; } public int hashCode() { return new HashCodeBuilder(17, 37).append(weight).append(x).append(y).toHashCode(); } @Override protected Object clone() throws CloneNotSupportedException { return super.clone(); } }
#!/usr/bin/env bash # Copyright 2022 The Cockroach Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. set -o nounset set -o errexit set -o pipefail # enable **/*.yaml shopt -s globstar if [[ -z "${TEST_WORKSPACE:-}" ]]; then echo 'Must be run with bazel via "bazel test //hack:verify-crds"' >&2 exit 1 fi FILES=(apis cmd config external go.mod go.sum hack pkg) echo "Verifying generated CRD manifests are up-to-date..." >&2 create_working_dir() { local dir="${TEST_TMPDIR}/files" mkdir -p "${dir}" # copy necessary files cp -RL ${FILES[@]} "${dir}" # copy config to config_ pushd "${dir}" >/dev/null cp -RHL config config_ } # TODO: Ideally we'd back able to share this with the makefile generate_crds() { HOME="${TEST_TMPDIR}/home" "${1}" crd:trivialVersions=true \ rbac:roleName=role \ webhook \ paths=./... \ output:crd:artifacts:config=config/crd/bases hack/boilerplaterize hack/boilerplate/boilerplate.yaml.txt config/**/*.yaml } run_diff() { # Avoid diff -N so we handle empty files correctly echo "diffing" diff=$(diff -upr "config_" "config" 2>/dev/null || true) if [[ -n "${diff}" ]]; then echo "${diff}" >&2 echo >&2 echo "generated CRDs are out of date. Please run 'make dev/update-crds" >&2 exit 1 fi echo "SUCCESS: generated CRDs up-to-date" } main() { create_working_dir generate_crds "${2}" run_diff } main "$@"
<gh_stars>0 #include <stdio.h> #include <stdlib.h> #include <string.h> #include <openssl/evp.h> #include "arg.h" #define MIN(x, y) ((x) < (y)) ? (x) : (y) #define MAX(x, y) ((x) > (y)) ? (x) : (y) struct range { size_t min, max; struct range * next; }; char * argv0; static char * separators = "\t"; static struct range * list = NULL; static void usage() { fprintf(stderr, "Usage: %s [-s separators] [-d digest_algorithm] list\n", argv0); exit(1); } static void insert(struct range * r) { struct range * l, *p, *t; for (p = NULL, l = list; l; p = l, l = l->next) { if (r->max && r->max + 1 < l->min) { r->next = l; break; } else if (!l->max || r->min < l->max + 2) { l->min = MIN(r->min, l->min); for (p = l, t = l->next; t; p = t, t = t->next) if (r->max && r->max + 1 < t->min) break; l->max = (p->max && r->max) ? MAX(p->max, r->max) : 0; l->next = t; return; } } if (p) p->next = r; else list = r; } static void parselist(char * str) { char * s; size_t n = 1; struct range * r; if (!*str) { fprintf(stderr, "empty list\n"); exit(1); } for (s = str; *s; s++) if (*s == ',') n++; r = malloc(n * sizeof(struct range)); if (!r) { perror("Cannot allocate memory"); exit(1); } for (s = str; n; n--, s++) { r->min = (*s == '-') ? 1 : strtoul(s, &s, 10); r->max = (*s == '-') ? strtoul(s + 1, &s, 10) : r->min; r->next = NULL; if (!r->min || (r->max && r->max < r->min) || (*s && *s != ',')) { fprintf(stderr, "bad list value\n"); exit(1); } insert(r++); } } int main(int argc, char * argv[]) { char * line = NULL; char * digest_algorithm = NULL; char * tok; size_t cap = 0; ssize_t len; int i, c; unsigned char hash[EVP_MAX_MD_SIZE]; unsigned int hash_len; EVP_MD_CTX * ctx; const EVP_MD * md = EVP_ripemd160(); ARGBEGIN { case 's': separators = EARGF(usage()); if (!*separators) { fprintf(stderr, "Empty separator is forbiden\n"); exit(1); } break; case 'd': digest_algorithm = EARGF(usage()); break; default: usage(); } ARGEND if (argc != 1) usage(); parselist(*argv); if (digest_algorithm) { OPENSSL_init_crypto(OPENSSL_INIT_ADD_ALL_DIGESTS, NULL); md = EVP_get_digestbyname(digest_algorithm); if (!md) { fprintf(stderr, "Unknown message digest '%s'\n", digest_algorithm); exit(1); } } ctx = EVP_MD_CTX_create(); while ((len = getline(&line, &cap, stdin)) > 0) { EVP_DigestInit(ctx, md); struct range * r = list; tok = line + strspn(line, separators); for (c = 1; *tok; c++) { size_t tok_len = 0; tok_len = strcspn(tok, separators); if (!r) break; if (c >= r->min && c <= r->max) { #ifdef DEBUG fprintf(stderr, "W[%02d,%03ld]<", c, tok_len); fwrite(tok, 1, tok_len, stderr); fputs(">\n", stderr); #endif EVP_DigestUpdate(ctx, tok, tok_len); if (c == r->max) r = r->next; } tok += tok_len; if (!*tok) { break; } tok += strspn(tok, separators); } EVP_DigestFinal(ctx, hash, &hash_len); for (i = 0; i < hash_len; i++) { printf("%02x", hash[i]); } fwrite(separators, 1, 1, stdout); fwrite(line, 1, len, stdout); } free(line); free(list); EVP_MD_CTX_destroy(ctx); OPENSSL_cleanup(); return 0; }
package simplenet; import java.io.IOException; import java.net.InetSocketAddress; import java.net.StandardSocketOptions; import java.nio.channels.AlreadyBoundException; import java.nio.channels.AsynchronousChannelGroup; import java.nio.channels.AsynchronousServerSocketChannel; import java.nio.channels.AsynchronousSocketChannel; import java.nio.channels.Channel; import java.nio.channels.CompletionHandler; import java.util.Objects; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; import java.util.function.Consumer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import simplenet.channel.Channeled; import simplenet.receiver.Receiver; /** * The entity that all {@link Client}s will connect to. * * @author <NAME>. * @since November 1, 2017 */ public final class Server extends Receiver<Consumer<Client>> implements Channeled<AsynchronousServerSocketChannel> { private static Logger logger = LoggerFactory.getLogger(Server.class); /** * The backing {@link ThreadPoolExecutor} used for I/O. */ private final ThreadPoolExecutor executor; /** * The backing {@link Channel} of the {@link Server}. */ private final AsynchronousServerSocketChannel channel; /** * Instantiates a new {@link Server} by attempting * to open the backing {@link AsynchronousServerSocketChannel}. * * @throws IllegalStateException If multiple {@link Server} instances are created. */ public Server() { this(4096); } public Server(int bufferSize) { super(bufferSize); try { int numThreads = Math.max(1, Runtime.getRuntime().availableProcessors() - 1); executor = new ThreadPoolExecutor(numThreads, numThreads, 0L, TimeUnit.MILLISECONDS, new LinkedBlockingQueue<>(), runnable -> { Thread thread = new Thread(runnable); thread.setDaemon(false); return thread; }); executor.prestartAllCoreThreads(); channel = AsynchronousServerSocketChannel.open(AsynchronousChannelGroup.withThreadPool(executor)); channel.setOption(StandardSocketOptions.SO_RCVBUF, bufferSize); } catch (IOException e) { logger.error("Unable to open the channel!"); } } /** * Attempts to bind the {@link Server} to a * specific {@code address} and {@code port}. * * @param address The IP address to bind to. * @param port The port to bind to {@code 0 <= port <= 65535}. * @throws IllegalArgumentException If {@code port} is less than 0 or greater than 65535. * @throws AlreadyBoundException If a server is already running on any address/port. * @throws RuntimeException If the server is unable to be bound to a specific * address or port. */ public void bind(String address, int port) { Objects.requireNonNull(address); if (port < 0 || port > 65535) { logger.error("The port must be between 0 and 65535!"); return; } try { channel.bind(new InetSocketAddress(address, port)); final Client.Listener listener = new Client.Listener() { @Override public void failed(Throwable t, Client client) { client.close(); } }; channel.accept(null, new CompletionHandler<AsynchronousSocketChannel, Void>() { @Override public void completed(AsynchronousSocketChannel channel, Void attachment) { var client = new Client(bufferSize, channel); connectListeners.forEach(consumer -> consumer.accept(client)); Server.this.channel.accept(null, this); channel.read(client.getBuffer(), client, listener); } @Override public void failed(Throwable t, Void attachment) { } }); logger.info(String.format("Successfully bound to %s:%d!", address, port)); } catch (AlreadyBoundException e) { logger.error("A server is already running!"); } catch (IOException e) { logger.error("Unable to bind the server!"); } } @Override public void close() { Channeled.super.close(); executor.shutdownNow(); } /** * Gets the backing {@link Channel} of this {@link Server}. * * @return A {@link Channel}. */ @Override public AsynchronousServerSocketChannel getChannel() { return channel; } }
/*jshint node:true, white:true, undef:true, maxlen:100 */ var fs = require('fs'); exports.fixture = function (name) { return fs.readFileSync(__dirname + '/../unit/fixtures/' + name).toString(); };
<reponame>sergeytkachenko/siesta-template var AppDispatcher = require('../dispatchers/app.dispatcher'); var ArtworkConstants = require('../constants/artwork.constants'); // Define actions object var ArtWorkActions = { search: function (query) { AppDispatcher.handleViewAction({ actionType: ArtworkConstants.SEARCH_ART, query: query }); }, clearResult: function () { AppDispatcher.handleViewAction({ actionType: ArtworkConstants.CLEAR_RESULT }); } }; module.exports = ArtWorkActions;
module Nomis class PrisonerAvailability include MemoryModel attribute :available, :boolean attribute :dates, :date_list end end
set -e cd $GOPATH/src/github.com/v3io/v3io-go echo Installing impi go get -u github.com/pavius/impi/cmd/impi echo Linting imports with impi $GOPATH/bin/impi \ --local github.com/v3io/v3io-go \ --scheme stdLocalThirdParty \ --skip=pkg/dataplane/schemas/node/common \ ./pkg/... echo Getting all packages go get ./... echo Installing gometalinter go get -u gopkg.in/alecthomas/gometalinter.v2 $GOPATH/bin/gometalinter.v2 --install echo Linting with gometalinter $GOPATH/bin/gometalinter.v2 \ --deadline=300s \ --disable-all \ --enable-gc \ --enable=deadcode \ --enable=goconst \ --enable=gofmt \ --enable=golint \ --enable=gosimple \ --enable=ineffassign \ --enable=interfacer \ --enable=misspell \ --enable=staticcheck \ --enable=unconvert \ --enable=varcheck \ --enable=vet \ --enable=vetshadow \ --enable=errcheck \ --exclude="_test.go" \ --exclude="comment on" \ --exclude="error should be the last" \ --exclude="should have comment" \ --skip=pkg/platform/kube/apis \ --skip=pkg/platform/kube/client \ --skip=pkg/dataplane/schemas/node/common \ ./pkg/...
public static void swap(int[] arr, int index1, int index2) { int temp = arr[index1]; arr[index1] = arr[index2]; arr[index2] = temp; }
#!/bin/bash nx build $1 --skip-nx-cache rm -rf tmp/nx-e2e/proj/node_modules/@trafilea/$1/src mkdir -p tmp/nx-e2e/proj/node_modules/@trafilea/$1/src cp -r dist/packages/$1/src/* tmp/nx-e2e/proj/node_modules/@trafilea/$1/src
<filename>SDLSim/Graphics.hpp // // Graphics.hpp // walls3duino // // Created by <NAME> on 4/24/20. // Copyright © 2020 <NAME>. All rights reserved. // #ifndef Graphics_hpp #define Graphics_hpp #include <string> #include <cmath> #include <cstdint> #include <memory> #include "SDLHeader.hpp" #include "Vec2.hpp" // this class simulates an SSD1306 128x64 black/white OLED display, and somewhat the // Adafruit SSD1306 display driver, in one of two modes: // 1) horizontal addressing mode, where the display is addressed left to right and then // top to bottom, in 8-bit pages which each represent a chunk of a vertical column, or // 2) vertical addressing mode, where the display is addressed top to bottom and then // left to right, using the same 8-bit vertical pages // see the SSD1306 datasheet for details // // this class uses a full screen pixel buffer for horizontal addressing mode, and only // a single column pixel buffer for vertical addressing mode class Graphics { public: class Exception { public: virtual std::string GetMsg() const = 0; }; Graphics(); Graphics(const Graphics&) = delete; Graphics& operator=(const Graphics&) = delete; void EndFrame(); void EndColumn(); uint8_t* GetScreenBuffer(); uint8_t* GetColumnBuffer(); ~Graphics(); private: class SDLException : public Exception { public: SDLException(std::string msg); std::string GetMsg() const override; private: std::string error; std::string msg; }; void WritePageToSimScreenPixelBuf(uint8_t pageData, uint32_t x, uint32_t pageNum); void FlushSimScreenPixelBuf(); SDL_Window* pWindow; SDL_Renderer* pRenderer; SDL_Texture* pScreenTexture; std::unique_ptr<uint32_t[]> pSimScreenPixelBuf; // full-screen pixel buffer arranged as in SSD1306 horizontal addressing mode std::unique_ptr<uint8_t[]> pPixelBuf; // single-column pixel buffer arranged as in SSD1306 vertical addressing mode std::unique_ptr<uint8_t[]> pColumnBuf; uint32_t currColumn; // used for column-drawing mode only public: static constexpr uint32_t ScreenWidth {128u}; static constexpr uint32_t ScreenHeight {64u}; static constexpr uint32_t ScreenHeightPages {ScreenHeight/8}; static constexpr uint32_t SimScreenScale {4u}; static constexpr uint32_t SimScreenWidth {ScreenWidth * SimScreenScale}; static constexpr uint32_t SimScreenHeight {ScreenHeight * SimScreenScale}; }; #endif /* Graphics_hpp */
#!/usr/bin/env zsh git/is-available() >/dev/null 2>/dev/null { git/is-enabled && \git rev-parse --is-inside-work-tree } git/is-enabled() { return 0 } (( ${+gitrp_safeparms} == 1 )) || local -ar gitrp_safeparms=( --show-toplevel --git-dir --is-bare-repository --show-superproject-working-tree ) # NOTE: --show-superproject-working-tree This must be last; it returns nothing (not even a blank line) when there's no submodule # Checks whether or not the last call to git/vcs-details occurred from $PWD git/default-git_property_map() { typeset -gA git_property_map=( nearest-root '' git-rev '' local-branch '' remote-branch "${${${${(M)REPO_CONFIG:#* on *}:+[none]}:-${REPO_CONFIG##*...}}%%[[:space:]\[]*}" has-commits "${${${(M)REPO_CONFIG:#No commits yet on *}:+0}:-1}" has-remotes "${${${(M)${#git_remotes[@]}:#0}:+no}:-yes}" ahead-by "${${${(M)REPO_CONFIG:#*\[ahead*}:+${${REPO_CONFIG##*\[ahead[[:space:]]}%%[\],]*}}:-0}" behind-by "${${${(M)REPO_CONFIG:#*(\[|, )behind*}:+${${REPO_CONFIG##*(\[|, )behind[[:space:]]}%%[\],]*}}:-0}" git-dir "${${${git_props[3]}:A}##${git_props[2]}/}" is-bare "${${${(M)${git_props[4]}:#true}:+1}:-0}" parent-repo "${git_props[5]:-}" git-prop-dir "${PWD}" is-submodule 0 ) } # Associations set - git_property_map repo_status_unstaged repo_status_staged repo_subtrees repo_submodule_branches # Arrays set - repo_remotes git_status repo_submodules git/vcs-details() { get-gitinfo() { \git remote -v 2>/dev/null || { return 1 } print -- -- \git rev-parse HEAD 2>/dev/null || print detached \git rev-parse "${gitrp_safeparms[@]}" 2>/dev/null || { print -- 'not initialized' return 1 } print -- -- \git submodule --quiet foreach 'git rev-parse --show-toplevel --abbrev-ref HEAD' 2>/dev/null print -- -- \git status --porcelain -b 2>/dev/null } store-array-slice() { # If the first element in the array is a --, then the "section" was empty [[ "${2:---}" != '--' ]] || { set -A "$1"; shift 2; typeset -ga new_argv=( "$@" ); return 0 } local __AR_NAME="$1"; shift (( $# == 0 )) && set -A "$__AR_NAME" || set -A "$__AR_NAME" "${(@)${(@)argv[1,$(( ${argv[(i)--]} - 1 ))]}[@]}" shift $(( ${#${(P@)__AR_NAME}} + 1 )) typeset -ga new_argv=( "$@" ) } search-git-root-for-dotgit-subtrees() { typeset -g repo_subtrees=( **/.git(N/on) ) (( ${#${(@)repo_subtrees}} == 0 )) || repo_subtrees=( ${(@)${(@)repo_subtrees##.git}:A} ) } typeset -gA repo_status_unstaged=( ) repo_remote_url_to_name=( ) repo_submodule_branches=( ) \ repo_status_staged=( ) repo_remote_name_to_url=( ) git_property_map=( ) typeset -ga repo_remotes=( ) repo_submodules=( ) repo_subtrees=( ) git/is-available || return $? local -a git_remotes=( ) git_props=( ) submod_result=( ) git_submodule_branches=( ) () { if (( REFREZSH_IS_DEBUG == 1 )); then print -- "---- Output from git/vcs-details/get-gitinfo ----" >> ~/tmp/last_prompt.info print -l -- "$@" >> ~/tmp/last_prompt.info print -- "-------------------------------------------------" >> ~/tmp/last_prompt.info fi local -a new_argv=( ); store-array-slice git_remotes "$@"; argv=( "${new_argv[@]}" ) [[ "$1" == '--' ]] && { shift; typeset -gxa git_props=( ) } || { [[ "${argv[1]}" != 'HEAD' ]] || shift store-array-slice git_props "$@"; argv=( "${new_argv[@]}" ) } search-git-root-for-dotgit-subtrees store-array-slice submod_result "$@"; argv=( "${new_argv[@]}" ) typeset -gxa git_status=( "$@" ) } "${${(f)$(get-gitinfo)}[@]}" || return $? local -r REPO_CONFIG="${${(M)git_status[@]:#\#*}##\#\# }" local -ra git_status=( "${git_status[@]:#\#\# *}" ) local -A prop_map=( nearest-root "${git_props[2]}" git-rev "${git_props[1]}" local-branch "${${${(M)REPO_CONFIG:#* on *}:+${REPO_CONFIG##* }}:-${REPO_CONFIG%%...*}}" remote-branch "${${${${(M)REPO_CONFIG:#* on *}:+[none]}:-${REPO_CONFIG##*...}}%%[[:space:]\[]*}" has-commits "${${${(M)REPO_CONFIG:#No commits yet on *}:+0}:-1}" has-remotes "${${${(M)${#git_remotes[@]}:#0}:+no}:-yes}" ahead-by "${${${(M)REPO_CONFIG:#*\[ahead*}:+${${REPO_CONFIG##*\[ahead[[:space:]]}%%[\],]*}}:-0}" behind-by "${${${(M)REPO_CONFIG:#*(\[|, )behind*}:+${${REPO_CONFIG##*(\[|, )behind[[:space:]]}%%[\],]*}}:-0}" git-dir "${${${git_props[3]}:A}##${git_props[2]}/}" is-bare "${${${(M)${git_props[4]}:#true}:+1}:-0}" parent-repo "${git_props[5]:-}" git-prop-dir "${PWD}" is-submodule 0 ) [[ "${${prop_map[git-dir]}:h:t}" != "modules" ]] || prop_map[is-submodule]=1 [[ "${prop_map[local-branch]}" != "${prop_map[remote-branch]}" ]] || prop_map[remote-branch]='' [[ -z "${submod_result}" ]] || { typeset -ga repo_submodules=( "${${(M@)submod_result[@]:#/*}[@]##${prop_map[nearest-root]}/}" ) typeset -gA repo_submodule_branches=( "${submod_result[@]}" ) } typeset -ga u_ren=( ${(@)${(M)git_status:#([AMDRU ]R *)}##???} ) s_ren=( ${(@)${(M)git_status:#R[AMDRU ] *}##???} ) \ u_mod=( ${(@)${(M)git_status:#([AMDRU ]M *)}##???} ) s_mod=( ${(@)${(M)git_status:#M[AMDRU ] *}##???} ) \ u_add=( ${(@)${(M)git_status:#([AMDRU ]A *)}##???} ) s_add=( ${(@)${(M)git_status:#A[AMDRU ] *}##???} ) \ u_del=( ${(@)${(M)git_status:#([AMDRU ]D *)}##???} ) s_del=( ${(@)${(M)git_status:#D[AMDRU ] *}##???} ) \ u_unm=( ${(@)${(M)git_status:#([AMDRU ]U *)}##???} ) s_unm=( ${(@)${(M)git_status:#U[AMDRU ] *}##???} ) \ u_new=( ${(@)${(M)git_status:#\?\?*}##???} ) local RP for RP in u_{ren,mod,add,del,new,unm}; do repo_status_unstaged+=( "${RP##u_}-paths" "${(j.:.)${(q@)${(P@)RP}}}" "${RP##u_}-len" ${#${(P@)RP}} ); done for RP in s_{ren,mod,add,del,unm}; do repo_status_staged+=( "${RP##s_}-paths" "${(j.:.)${(q@)${(P@)RP}}}" "${RP##s_}-len" ${#${(P@)RP}} ); done typeset -gA git_property_map=( "${(kv)prop_map[@]}" ) local ITEM='' TAB=$'\t' for ITEM in ${git_remotes[@]}; do repo_remotes+=( "${${ITEM##*$TAB}%% *}" ) repo_remote_url_to_name+=( "${repo_remotes[-1]}" "${ITEM%%$TAB*}" ) repo_remote_name_to_url+=( "${repo_remote_url_to_name[${repo_remotes[-1]}]}" "${repo_remotes[-1]}" ) done set +x if [[ "${git_property_map[remote-branch]}" == '[none]' ]]; then git_property_map[remote-branch]=''; fi }
## # Proc(Ext) Test assert('Proc#source_location') do loc = Proc.new {}.source_location next true if loc.nil? assert_equal loc[0][-7, 7], 'proc.rb' assert_equal loc[1], 5 end assert('Proc#inspect') do ins = Proc.new{}.inspect assert_kind_of String, ins end assert('Proc#lambda?') do assert_true lambda{}.lambda? assert_true !Proc.new{}.lambda? end assert('Proc#===') do proc = Proc.new {|a| a * 2} assert_equal 20, (proc === 10) end assert('Proc#yield') do proc = Proc.new {|a| a * 2} assert_equal 20, proc.yield(10) end assert('Proc#curry') do b = proc {|x, y, z| (x||0) + (y||0) + (z||0) } assert_equal 6, b.curry[1][2][3] assert_equal 6, b.curry[1, 2][3, 4] assert_equal 6, b.curry(5)[1][2][3][4][5] assert_equal 6, b.curry(5)[1, 2][3, 4][5] assert_equal 1, b.curry(1)[1] b = lambda {|x, y, z| (x||0) + (y||0) + (z||0) } assert_equal 6, b.curry[1][2][3] assert_raise(ArgumentError) { b.curry[1, 2][3, 4] } assert_raise(ArgumentError) { b.curry(5) } assert_raise(ArgumentError) { b.curry(1) } assert_false(proc{}.curry.lambda?) assert_true(lambda{}.curry.lambda?) end assert('Proc#parameters') do assert_equal([], Proc.new {}.parameters) assert_equal([], Proc.new {||}.parameters) assert_equal([[:opt, :a]], Proc.new {|a|}.parameters) assert_equal([[:req, :a]], lambda {|a|}.parameters) assert_equal([[:opt, :a]], lambda {|a=nil|}.parameters) assert_equal([[:req, :a]], ->(a){}.parameters) assert_equal([[:rest, :a]], Proc.new {|*a|}.parameters) assert_equal([[:opt, :a], [:opt, :b], [:opt, :c], [:opt, :d], [:rest, :e], [:opt, :f], [:opt, :g], [:block, :h]], Proc.new {|a,b,c=:c,d=:d,*e,f,g,&h|}.parameters) assert_equal([[:req, :a], [:req, :b], [:opt, :c], [:opt, :d], [:rest, :e], [:req, :f], [:req, :g], [:block, :h]], lambda {|a,b,c=:c,d=:d,*e,f,g,&h|}.parameters) end assert('Proc#to_proc') do proc = Proc.new {} assert_equal proc, proc.to_proc end assert('Kernel#proc') do assert_true !proc{|a|}.lambda? end assert('mrb_proc_new_cfunc_with_env') do ProcExtTest.mrb_proc_new_cfunc_with_env(:test) ProcExtTest.mrb_proc_new_cfunc_with_env(:mruby) t = ProcExtTest.new assert_equal :test, t.test assert_equal :mruby, t.mruby end assert('mrb_cfunc_env_get') do ProcExtTest.mrb_cfunc_env_get :get_int, [0, 1, 2] t = ProcExtTest.new assert_raise(TypeError) { t.cfunc_without_env } assert_raise(IndexError) { t.get_int(-1) } assert_raise(IndexError) { t.get_int(3) } assert_equal 1, t.get_int(1) end
""" Utils for data-driven method """ import xml.etree.ElementTree as ET import cv2 import pandas as pd def save_image( img_filename, image, acc, model, score_thresh, top_left_crner, btm_right_crner): cv2.rectangle( image, top_left_crner, btm_right_crner, color=(0, 0, 255), thickness=1) cv2.imwrite( '/Users/victor/Google Drive/detection_pics/' + model + "_" + str(score_thresh) + acc + "_" + img_filename + '.jpg', image) def read_xml(path, in_file): """ Iterates through all .xml files in a given directory and combines them in a single Pandas dataframe. Parameters: ---------- path : str The path containing the .xml files Returns: Numpy array """ xml_list = [] full_path = path+in_file+'.xml' tree = ET.parse(full_path) root = tree.getroot() for member in root.findall('object'): # the number of 'object' in the file dictates how many targets we have if len(member) == 7: # some xml files contain extra info on "pixels" value = (root.find('filename').text, int(member[6][0].text), int(member[6][1].text), int(member[6][2].text), int(member[6][3].text)) elif len(member) == 5: # 1 object value = (root.find('filename').text, int(member[4][0].text), int(member[4][1].text), int(member[4][2].text), int(member[4][3].text)) xml_list.append(value) column_name = ['filename', 'xmin', 'ymin', 'xmax', 'ymax'] xml_df = pd.DataFrame(xml_list, columns=column_name) xml_np = xml_df.to_numpy() return xml_np def get_iou(gt_bbx, pred_bbx): """ Calculate the Intersection over Union (IoU) of two bounding boxes. Based on: https://stackoverflow.com/questions/25349178/ calculating-percentage-of-bounding-box-overlap-for-image-detector-evaluation Parameters ---------- gt_bbx : dict ymin, xmin, ymax, xmax] Keys: {'xmin', 'xmax', 'ymin', 'ymax'} The (xmin, ymin) position is at the top left corner, the (xmax, ymax) position is at the bottom right corner pred_bbx : dict Keys: {'xmin', 'xmax', 'ymin', 'ymax'} The (xmin, ymin) position is at the top left corner, the (xmax, ymax) position is at the bottom right corner Returns ------- float in [0, 1] """ assert gt_bbx['xmin'] < gt_bbx['xmax'] assert gt_bbx['ymin'] < gt_bbx['ymax'] assert pred_bbx['xmin'] < pred_bbx['xmax'] assert pred_bbx['ymin'] < pred_bbx['ymax'] # determine the coordinates of the intersection rectangle x_left = max(gt_bbx['xmin'], pred_bbx['xmin']) y_top = max(gt_bbx['ymin'], pred_bbx['ymin']) x_right = min(gt_bbx['xmax'], pred_bbx['xmax']) y_bottom = min(gt_bbx['ymax'], pred_bbx['ymax']) if (x_right < x_left) or (y_bottom < y_top): iou = 0.0 intersection_area = (x_right - x_left) * (y_bottom - y_top) else: # The intersection of two axis-aligned bounding boxes is always an # axis-aligned bounding box intersection_area = (x_right - x_left) * (y_bottom - y_top) # compute the area of both BBs gt_bbx_area = (gt_bbx['xmax']-gt_bbx['xmin']) * \ (gt_bbx['ymax']-gt_bbx['ymin']) pred_bbx_area = (pred_bbx['xmax']-pred_bbx['xmin']) * \ (pred_bbx['ymax']-pred_bbx['ymin']) # compute the intersection over union by taking the intersection # area and dividing it by the sum of prediction + ground-truth # areas - the interesection area iou = intersection_area / \ float(gt_bbx_area + pred_bbx_area - intersection_area) assert iou >= 0.0 assert iou <= 1.0 return iou, intersection_area
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License */ package storm.benchmark.benchmarks; import org.apache.storm.Config; import org.apache.storm.generated.StormTopology; import org.apache.storm.spout.SchemeAsMultiScheme; import org.apache.storm.task.IMetricsContext; import org.apache.storm.tuple.Fields; import org.apache.storm.utils.Utils; import com.google.common.collect.Lists; import org.apache.log4j.Logger; import storm.benchmark.metrics.DRPCMetricsCollector; import storm.benchmark.metrics.IMetricsCollector; import storm.benchmark.benchmarks.common.StormBenchmark; import storm.benchmark.tools.PageViewGenerator; import storm.benchmark.lib.operation.Distinct; import storm.benchmark.lib.operation.Expand; import storm.benchmark.lib.operation.One; import storm.benchmark.lib.operation.Print; import storm.benchmark.util.BenchmarkUtils; import storm.benchmark.util.KafkaUtils; import org.apache.storm.kafka.StringScheme; import org.apache.storm.kafka.trident.TransactionalTridentKafkaSpout; import org.apache.storm.trident.TridentState; import org.apache.storm.trident.TridentTopology; import org.apache.storm.trident.operation.builtin.MapGet; import org.apache.storm.trident.operation.builtin.Sum; import org.apache.storm.trident.spout.IPartitionedTridentSpout; import org.apache.storm.trident.state.ReadOnlyState; import org.apache.storm.trident.state.State; import org.apache.storm.trident.state.StateFactory; import org.apache.storm.trident.state.map.ReadOnlyMapState; import org.apache.storm.trident.testing.MemoryMapState; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Map; import static storm.benchmark.lib.spout.pageview.PageView.Extract; import static storm.benchmark.lib.spout.pageview.PageView.Item; public class DRPC extends StormBenchmark { private static final Logger LOG = Logger.getLogger(DRPC.class); public static final String FUNCTION = "reach"; public static final List<String> ARGS = Arrays.asList("foo.com", "foo.news.com", "foo.contact.com"); public static final String SERVER = "drpc.server"; public static final String PORT = "drpc.port"; public static final String SPOUT_ID = "spout"; public static final String SPOUT_NUM = "component.spout_num"; public static final String PAGE_ID = "page"; public static final String PAGE_NUM = "component.page_bolt_num"; public static final String VIEW_ID = "view"; public static final String VIEW_NUM = "component.view_bolt_num"; public static final String USER_NUM = "component.user_bolt_num"; public static final String FOLLOWER_NUM = "component.follower_bolt_num"; public static final int DEFAULT_SPOUT_NUM = 4; public static final int DEFAULT_PAGE_BOLT_NUM = 8; public static final int DEFAULT_VIEW_BOLT_NUM = 8; public static final int DEFAULT_USER_BOLT_NUM = 4; public static final int DEFAULT_FOLLOWER_BOLT_NUM = 4; private IPartitionedTridentSpout spout; private String server; private int port; @Override public StormTopology getTopology(Config config) { Object sObj = config.get(SERVER); if (null == sObj) { throw new IllegalArgumentException("must set a drpc server"); } server = (String) sObj; config.put(Config.DRPC_SERVERS, Lists.newArrayList(server)); Object pObj = config.get(PORT); if (null == pObj) { throw new IllegalArgumentException("must set a drpc port"); } port = Utils.getInt(pObj); config.put(Config.DRPC_PORT, port); LOG.info("drpc server: " + server + "; drpc port: " + port); final int spoutNum = BenchmarkUtils.getInt(config, SPOUT_NUM, DEFAULT_SPOUT_NUM); final int pageNum = BenchmarkUtils.getInt(config, PAGE_NUM, DEFAULT_PAGE_BOLT_NUM); final int viewNum = BenchmarkUtils.getInt(config, VIEW_NUM, DEFAULT_VIEW_BOLT_NUM); final int userNum = BenchmarkUtils.getInt(config, USER_NUM, DEFAULT_USER_BOLT_NUM); final int followerNum = BenchmarkUtils.getInt(config, FOLLOWER_NUM, DEFAULT_FOLLOWER_BOLT_NUM); spout = new TransactionalTridentKafkaSpout( KafkaUtils.getTridentKafkaConfig(config, new SchemeAsMultiScheme(new StringScheme()))); TridentTopology trident = new TridentTopology(); TridentState urlToUsers = trident.newStream("drpc", spout).parallelismHint(spoutNum).shuffle() .each(new Fields(StringScheme.STRING_SCHEME_KEY), new Extract(Arrays.asList(Item.URL, Item.USER)), new Fields("url", "user")).parallelismHint(pageNum) .groupBy(new Fields("url")) .persistentAggregate(new MemoryMapState.Factory(), new Fields("url", "user"), new Distinct(), new Fields("user_set")) .parallelismHint(viewNum); /** debug * 1. this proves that the aggregated result has successfully persisted urlToUsers.newValuesStream() .each(new Fields("url", "user_set"), new Print("(url, user_set)"), new Fields("url2", "user_set2")); */ PageViewGenerator generator = new PageViewGenerator(); TridentState userToFollowers = trident.newStaticState(new StaticSingleKeyMapState.Factory(generator.genFollowersDB())); /** debug * 2. this proves that MemoryMapState could be read correctly trident.newStream("urlToUsers", new PageViewSpout(false)) .each(new Fields("page_view"), new Extract(Arrays.asList(Item.URL)), new Fields("url")) .each(new Fields("url"), new Print("url"), new Fields("url2")) .groupBy(new Fields("url2")) .stateQuery(urlToUsers, new Fields("url2"), new MapGet(), new Fields("users")) .each(new Fields("users"), new Print("users"), new Fields("users2")); */ /** debug * 3. this proves that StaticSingleKeyMapState could be read correctly trident.newStream("userToFollowers", new PageViewSpout(false)) .each(new Fields("page_view"), new Extract(Arrays.asList(Item.USER)), new Fields("user")) .each(new Fields("user"), new Print("user"), new Fields("user2")) .stateQuery(userToFollowers, new Fields("user2"), new MapGet(), new Fields("followers")) .each(new Fields("followers"), new Print("followers"), new Fields("followers2")); */ trident.newDRPCStream(FUNCTION, null) .each(new Fields("args"), new Print("args"), new Fields("url")) .groupBy(new Fields("url")) .stateQuery(urlToUsers, new Fields("url"), new MapGet(), new Fields("users")) .each(new Fields("users"), new Expand(), new Fields("user")).parallelismHint(userNum) .groupBy(new Fields("user")) .stateQuery(userToFollowers, new Fields("user"), new MapGet(), new Fields("followers")) .each(new Fields("followers"), new Expand(), new Fields("follower")).parallelismHint(followerNum) .groupBy(new Fields("follower")) .aggregate(new One(), new Fields("one")) .aggregate(new Fields("one"), new Sum(), new Fields("reach")); return trident.build(); } @Override public IMetricsCollector getMetricsCollector(Config config, StormTopology topology) { return new DRPCMetricsCollector(config, FUNCTION, ARGS, server, port); } public static class StaticSingleKeyMapState extends ReadOnlyState implements ReadOnlyMapState<Object> { public static class Factory implements StateFactory { Map map; public Factory(Map map) { this.map = map; } @Override public State makeState(Map conf, IMetricsContext metrics, int partitionIndex, int numPartitions) { return new StaticSingleKeyMapState(map); } } Map map; public StaticSingleKeyMapState(Map map) { this.map = map; } @Override public List<Object> multiGet(List<List<Object>> keys) { List<Object> ret = new ArrayList(); for (List<Object> key : keys) { Object singleKey = key.get(0); Object value = map.get(singleKey); LOG.debug("get " + value + " for " + singleKey); ret.add(value); } return ret; } } }
#!/usr/bin/env bash set -euo pipefail shopt -s nullglob scriptDir="$(dirname "$(readlink -f "$0")")" PATH=$scriptDir:$PATH cleanup() { set +e for container in $(extra-container list | grep ^test-); do extra-container destroy $container done set -e } trap "cleanup" EXIT reportError() { echo "Error on line $1" } trap 'reportError $LINENO' ERR testMatches() { actual="$1" expected="$2" if [[ $actual != $expected ]]; then echo echo 'Pattern does not match' echo 'Expected:' echo "$expected" echo echo 'Actual:' echo "$actual" echo return 1 fi } cleanup # echo "Test attr arg and container starting " output=$(extra-container create -A 'a b' --start <<'EOF' { "a b" = { config, pkgs, ... }: { containers.test-1 = { config = {}; }; }; } EOF ) testMatches "$output" "*Installing*test-1*Starting*test-1*" # echo "Test starting and updating" output=$(extra-container create -s <<'EOF' { config, pkgs, ... }: { containers.test-1 = { config.environment.variables.foo = "a"; }; containers.test-2 = { config = {}; }; } EOF ) testMatches "$output" "*Starting*test-2*Updating*test-1*" # echo "Test unchanged" output=$(extra-container create -s <<'EOF' { config, pkgs, ... }: { containers.test-1 = { config.environment.variables.foo = "a"; }; } EOF ) testMatches "$output" "*test-1 (unchanged, skipped)*" # echo "Test updating and restarting" output=$(extra-container create -u <<'EOF' { config, pkgs, ... }: { containers.test-1 = { config.environment.variables.foo = "b"; }; containers.test-2 = { privateNetwork = true; config = {}; }; } EOF ) testMatches "$output" "*Updating*test-1*Restarting*test-2*" # echo "Test manual build" storePath=$(extra-container build <<'EOF' { config, pkgs, ... }: { containers.test-3 = { config = {}; }; } EOF ) testMatches "$storePath" "/nix/store/*" output=$(extra-container create -s $storePath) testMatches "$output" "*Starting*test-3*" # echo "Test list" output=$(extra-container list | grep ^test- || true) testMatches "$output" "test-1*test-2*test-3" # echo "Test destroy" [[ $(echo /var/lib/containers/test-*) ]] cleanup output=$(extra-container list | grep ^test- || true) testMatches "$output" "" [[ ! $(echo /var/lib/containers/test-*) ]]
import { MovePoint, template_config_bullet} from "stg/entity/MovePoint"; import { Scheduler } from "stg/stage/Scheuler"; import * as Res from "stg/util/sprites"; import * as SRes from "stg/util/shaped_sprites"; import { EntityPool } from "stg/stage/EntityPool"; import { StageEntry } from "stg/stage/StageInit"; import { repeat } from "stg/data/stage/StageBase"; export const stage_000: StageEntry = { name: "波与粒的境界", default_scale: 3, init: (time_scale: number) => { const n = 5; const sprite = Res.get_middle(Res.M_Type.Oval, Res.M_Color.Red, Res.Sprite_Mode.Overlay); const ss = SRes.getSSCircle(sprite, 1); return [new Scheduler([ 30 * time_scale, repeat((i0) => [ repeat((i1) => [ () => EntityPool.INSTANCE.add(new MovePoint(ss, template_config_bullet) .simpleInit(0, 0, 8 / time_scale, 0.003 / time_scale * i0 * i0 + Math.PI * 2 / n * i1)), ], n), 1 * time_scale ], Infinity) ])] } }
#!/bin/bash if [ ! command -v cmake &> /dev/null ] then echo "Could not find cmake. Make sure it is installed." exit fi if [ ! -d ".build" ] then mkdir .build fi cd .build cmake .. -G "Unix Makefiles"
#include <bits/stdc++.h> #define endl '\n' using namespace std; int main() { // ios::sync_with_stdio(false); // cin.tie(0); int k, n; cin>>k>>n; long long v[70]={0, 1}; for(int q=1; q<=n; q++){ for(int w=q; w>=1; w--){ v[w]+=v[w-1]; } if(q>=k+1){ for(int w=0; w<n-q; w++){ cout<<" "; } for(int w=1; w<=q; w++){ cout<<v[w]<<" "; } cout<<endl; } } }
#!/bin/bash source /usr/local/zippy/venv/bin/activate python run.py
package flesch_test import ( "github.com/PaluMacil/flesch-index/flesch" "testing" ) func TestTypeOfRune(t *testing.T) { vowels := []string{"a", "e", "i", "o", "u", "A", "E", "I", "O", "U"} for _, vowel := range vowels { r := []rune(vowel)[0] if flesch.TypeOfRune(r) != flesch.RuneTypeVowel { t.Errorf("For %v, didn't get vowel", string(r)) } } consonants := []string{"b", "B", "L", "f", "T", "Q"} for _, consonant := range consonants { r := []rune(consonant)[0] if flesch.TypeOfRune(r) != flesch.RuneTypeConsonant { t.Errorf("For %v, didn't get consonant", string(r)) } } whitespace := []string{"\n", "\t", " ", "\r"} for _, ws := range whitespace { r := []rune(ws)[0] if flesch.TypeOfRune(r) != flesch.RuneTypeWhiteSpace { t.Errorf("For %v, didn't get whitespace", string(r)) } } wordStops := []string{"”", "\"", ",", ")"} for _, ws := range wordStops { r := []rune(ws)[0] if flesch.TypeOfRune(r) != flesch.RuneTypeWordStop { t.Errorf("For %s, didn't get word stop", string(r)) } } numbers := []string{"0", "1", "2", "3", "4", "5", "6", "7", "8", "9"} for _, number := range numbers { r := []rune(number)[0] if flesch.TypeOfRune(r) != flesch.RuneTypeNumber { t.Errorf("For %s, didn't get number", string(r)) } } sentenceStop := []string{".", ";", "!", "?"} for _, stop := range sentenceStop { r := []rune(stop)[0] if flesch.TypeOfRune(r) != flesch.RuneTypeSentenceStop { t.Errorf("For %s, didn't get sentence stop", string(r)) } } others := []string{"語", "—"} for _, stop := range others { r := []rune(stop)[0] if flesch.TypeOfRune(r) != flesch.RuneTypeOther { t.Errorf("For %s, didn't get other", string(r)) } } } type SyllableTestResult struct { Word string Expected int } func TestSyllablesFromString(t *testing.T) { testCases := []SyllableTestResult{ {"car", 1}, {"test", 1}, {"beer", 1}, {"care", 1}, {"carrot", 2}, {"consecrate", 3}, {"abraham", 3}, } for _, test := range testCases { result := flesch.SyllablesFromString(test.Word) if test.Expected != result { t.Errorf("%s: expected %d syllables, got %d", test.Word, test.Expected, result) } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.smartloli.kafka.eagle.web.controller; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpSession; import org.smartloli.kafka.eagle.common.util.KConstants; import org.smartloli.kafka.eagle.web.service.BScreenService; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Controller; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.servlet.ModelAndView; /** * Big screen controller to viewer data. * * @author smartloli. * * Created by Aug 28, 2019. * */ @Controller public class BScreenController { @Autowired private BScreenService bscreen; /** Big screen viewer. */ @RequestMapping(value = "/bs", method = RequestMethod.GET) public ModelAndView indexView(HttpSession session) { ModelAndView mav = new ModelAndView(); mav.setViewName("/bscreen/bscreen"); return mav; } /** Get producer and consumer real rate data by ajax. */ @RequestMapping(value = "/bs/brokers/ins/outs/realrate/ajax", method = RequestMethod.GET) public void getProducerAndConsumerRealRateAjax(HttpServletResponse response, HttpServletRequest request) { HttpSession session = request.getSession(); String clusterAlias = session.getAttribute(KConstants.SessionAlias.CLUSTER_ALIAS).toString(); try { byte[] output = bscreen.getProducerAndConsumerRate(clusterAlias).getBytes(); BaseController.response(output, response); } catch (Exception ex) { ex.printStackTrace(); } } /** Get producer and consumer real rate data by ajax. */ @RequestMapping(value = "/bs/topic/total/logsize/ajax", method = RequestMethod.GET) public void getTopicTotalLogSizeAjax(HttpServletResponse response, HttpServletRequest request) { HttpSession session = request.getSession(); String clusterAlias = session.getAttribute(KConstants.SessionAlias.CLUSTER_ALIAS).toString(); try { byte[] output = bscreen.getTopicTotalLogSize(clusterAlias).getBytes(); BaseController.response(output, response); } catch (Exception ex) { ex.printStackTrace(); } } /** Get producer history bar data by ajax. */ @RequestMapping(value = "/bs/{type}/history/ajax", method = RequestMethod.GET) public void getProducerOrConsumerHistoryAjax(@PathVariable("type") String type, HttpServletResponse response, HttpServletRequest request) { HttpSession session = request.getSession(); String clusterAlias = session.getAttribute(KConstants.SessionAlias.CLUSTER_ALIAS).toString(); try { byte[] output = bscreen.getProducerOrConsumerHistory(clusterAlias, type).getBytes(); BaseController.response(output, response); } catch (Exception ex) { ex.printStackTrace(); } } /** Get today consumer and producer data and lag data by ajax. */ @RequestMapping(value = "/bs/{dtype}/day/ajax", method = RequestMethod.GET) public void getTodayOrHistoryConsumerProducerAjax(@PathVariable("dtype") String dtype, HttpServletResponse response, HttpServletRequest request) { HttpSession session = request.getSession(); String clusterAlias = session.getAttribute(KConstants.SessionAlias.CLUSTER_ALIAS).toString(); try { byte[] output = bscreen.getTodayOrHistoryConsumerProducer(clusterAlias, dtype).getBytes(); BaseController.response(output, response); } catch (Exception ex) { ex.printStackTrace(); } } }
<reponame>danxmc/domotica // Make connection let socket = io.connect(window.location.hostname + ':' + 80); /* Event emitters */ //Light emitter event $(".lightBtn").on('click', (e) => { e.preventDefault; let btn = e.target.id; console.log("boton: ", btn); let status; // Check if button is currently active $('#' + btn).attr('aria-pressed') != 'true' ? status = true : status = false; console.log('Button status:', status); // Emit event to server with the according status socket.emit('toggleLight', { status: status, btnNum: btn }); }); //RGB control event $(".colorPicker").on('change', (e) => { e.preventDefault; //Separates the hex into 3, the RGB values in hex let cpid = e.target.id; let btnRGB = document.getElementById(cpid).value; let hexvals = btnRGB.split("#"); let hexval = hexvals[1].match(/.{1,2}/g); let color = "#"; // Inverts each hex portion hexval.forEach(element => { let digi = (255 - (parseInt(element, 16))).toString(16); if (digi.length < 2) { color += "0" + digi; } else { color += digi; } }); // Emitts an event to the server console.log('color: ' + color); socket.emit('RGBcontrol', { invHex: color, origHex: btnRGB, id: cpid }); }); /* Event listeners */ // Listen for stream socket.on("playStream", (image) => { document.getElementById("play").src = image; }); // Light class button listener socket.on('toggleBtn', (data) => { console.log('listener status:', data.status); console.log('listener btn:', data.btnNum); if (data.status == true) { $('#' + data.btnNum).attr('aria-pressed', 'true'); $('#' + data.btnNum).addClass('active'); document.getElementById("soundON" + data.btnNum).play(); } else { $('#' + data.btnNum).attr('aria-pressed', 'false'); $('#' + data.btnNum).removeClass('active'); document.getElementById("soundOFF" + data.btnNum).play(); } }); // Input pin listener socket.on('inputEvent', () => { let dt = new Date(); let time = dt.getHours() + ":" + dt.getMinutes() + ":" + dt.getSeconds(); //document.getElementById('logRoom').innerHTML = ""; $('#logRoom').prepend("<div class='alert alert-info'>Se ingresó al cuarto el " + time + "</div>"); //console.log(data); document.getElementById("soundAlert").play(); // Emitts an event to the server socket.emit('RGBcontrol', { origHex: "#FF0000" }); $('#CP1').val("#FF0000"); }); // Color change listener socket.on('colorChangeInput', (data) => { let id = data.id; let color = data.origHex; document.getElementById("colorChange").play(); // Sets the color picker's color to the one the emitter sent $('#' + id).val(color); //console.log(data); }); // Temperature listener socket.on('showtemperature', (temp)=> { //console.log(temp); document.getElementById("temp").innerHTML = "<div class='alert alert-warning'>La temperatura del cuarto es: " + temp.C + "°C / " + temp.F + "°F / " + temp.K + "°K</div>"; })
import torch import logging.config import math from math import floor from copy import deepcopy from six import string_types from .regime import Regime from .param_filter import FilterParameters from . import regularization import torch.nn as nn from torch.optim.lr_scheduler import _LRScheduler _OPTIMIZERS = {name: func for name, func in torch.optim.__dict__.items()} _LRSCHEDULERS = {name: func for name, func in torch.optim.lr_scheduler.__dict__.items()} try: from adabound import AdaBound _OPTIMIZERS['AdaBound'] = AdaBound except ImportError: pass def cosine_anneal_lr(lr0, lrT, T, t0=0): return f"lambda t: {{'lr': {lrT} + {(lr0 - lrT)} * (1 + math.cos(math.pi * (t - {t0}) / {T-t0})) / 2}}" def linear_scale_lr(lr0, lrT, T, t0=0): rate = (lrT - lr0) / T return f"lambda t: {{'lr': max({lr0} + (t - {t0}) * {rate}, 0)}}" class _EmptySchedule(torch.optim.lr_scheduler._LRScheduler): def __init__(self, optimizer, last_epoch=-1): super(_EmptySchedule, self).__init__(optimizer, last_epoch=-1) self.last_epoch = 0 def step(self, epoch=None): if epoch is None: epoch = self.last_epoch + 1 def copy_params(param_target, param_src): with torch.no_grad(): for p_src, p_target in zip(param_src, param_target): p_target.copy_(p_src) def copy_params_grad(param_target, param_src): for p_src, p_target in zip(param_src, param_target): if p_target.grad is None: p_target.backward(p_src.grad.to(dtype=p_target.dtype)) else: p_target.grad.detach().copy_(p_src.grad) class ModuleFloatShadow(nn.Module): def __init__(self, module): super(ModuleFloatShadow, self).__init__() self.original_module = module self.float_module = deepcopy(module) self.float_module.to(dtype=torch.float) def parameters(self, *kargs, **kwargs): return self.float_module.parameters(*kargs, **kwargs) def named_parameters(self, *kargs, **kwargs): return self.float_module.named_parameters(*kargs, **kwargs) def modules(self, *kargs, **kwargs): return self.float_module.modules(*kargs, **kwargs) def named_modules(self, *kargs, **kwargs): return self.float_module.named_modules(*kargs, **kwargs) def original_parameters(self, *kargs, **kwargs): return self.original_module.parameters(*kargs, **kwargs) def original_named_parameters(self, *kargs, **kwargs): return self.original_module.named_parameters(*kargs, **kwargs) def original_modules(self, *kargs, **kwargs): return self.original_module.modules(*kargs, **kwargs) def original_named_modules(self, *kargs, **kwargs): return self.original_module.named_modules(*kargs, **kwargs) class OptimRegime(Regime): """ Reconfigures the optimizer according to setting list. Exposes optimizer methods - state, step, zero_grad, add_param_group Examples for regime: 1) "[{'epoch': 0, 'optimizer': 'Adam', 'lr': 1e-3}, {'epoch': 2, 'optimizer': 'Adam', 'lr': 5e-4}, {'epoch': 4, 'optimizer': 'Adam', 'lr': 1e-4}, {'epoch': 8, 'optimizer': 'Adam', 'lr': 5e-5} ]" 2) "[{'step_lambda': "lambda t: { 'optimizer': 'Adam', 'lr': 0.1 * min(t ** -0.5, t * 4000 ** -1.5), 'betas': (0.9, 0.98), 'eps':1e-9} }]" """ def __init__(self, model, regime, defaults={}, filter=None, use_float_copy=False, log=True): super(OptimRegime, self).__init__(regime, defaults) if filter is not None: model = FilterParameters(model, **filter) if use_float_copy: model = ModuleFloatShadow(model) self._original_parameters = list(model.original_parameters()) self.parameters = list(model.parameters()) self.optimizer = torch.optim.SGD(self.parameters, lr=0) self.regularizer = regularization.Regularizer(model) self.use_float_copy = use_float_copy self.lr_scheduler = _EmptySchedule(self.optimizer, last_epoch=-1) self.schedule_time_frame = 'epoch' self.log = log def update(self, epoch=None, train_steps=None, metrics=None): """adjusts optimizer according to current epoch or steps and training regime. """ updated = False if super(OptimRegime, self).update(epoch, train_steps): self.adjust(self.setting) updated = True if self.schedule_time_frame == 'epoch': time = int(floor(epoch)) + 1 elif self.schedule_time_frame == 'step': time = train_steps + 1 else: raise ValueError if time != self.lr_scheduler.last_epoch: prev_lr = self.get_lr()[0] if isinstance(self.lr_scheduler, torch.optim.lr_scheduler.ReduceLROnPlateau): self.lr_scheduler.step(metrics, epoch=time) self.lr_scheduler.step(epoch=time) updated = True if prev_lr != self.get_lr()[0] and self.log: logging.debug('OPTIMIZER - lr scheduled = %s' % self.get_lr()[0]) return updated def adjust(self, setting): """adjusts optimizer according to a setting dict. e.g: setting={optimizer': 'Adam', 'lr': 5e-4} """ reset = setting.get('reset', False) if 'optimizer' in setting or reset: optim_method = _OPTIMIZERS[setting.get('optimizer', 'SGD')] if reset: # reset the optimizer cache: self.optimizer = torch.optim.SGD(self.parameters, lr=0) if self.log: logging.debug('OPTIMIZER - reset setting') if not isinstance(self.optimizer, optim_method): self.optimizer = optim_method(self.optimizer.param_groups) if self.log: logging.debug('OPTIMIZER - setting method = %s' % setting['optimizer']) for param_group in self.optimizer.param_groups: for key in param_group.keys(): if key in setting: new_val = setting[key] if new_val != param_group[key]: if self.log: logging.debug('OPTIMIZER - setting %s = %s' % (key, setting[key])) param_group[key] = setting[key] if key == 'lr': param_group['initial_lr'] = param_group['lr'] base_lrs = list(map(lambda group: group['lr'], self.optimizer.param_groups)) self.lr_scheduler.base_lrs = base_lrs # fix for AdaBound if hasattr(self.optimizer, 'base_lrs'): self.optimizer.base_lrs = base_lrs if 'regularizer' in setting: reg_list = deepcopy(setting['regularizer']) if not (isinstance(reg_list, list) or isinstance(reg_list, tuple)): reg_list = (reg_list,) regularizers = [] for reg in reg_list: if isinstance(reg, dict): name = reg.pop('name') regularizers.append((regularization.__dict__[name], reg)) elif isinstance(reg, regularization.Regularizer): regularizers.append(reg) else: # callable on model regularizers.append(reg(self.regularizer._model)) self.regularizer = regularization.RegularizerList(self.regularizer._model, regularizers) if 'lr_scheduler' in setting: schedule_config = setting['lr_scheduler'] if isinstance(schedule_config, _LRScheduler): self.lr_scheduler = schedule_config elif isinstance(schedule_config, dict): name = schedule_config.pop('name') self.schedule_time_frame = schedule_config.pop('time_frame', 'epoch') schedule_config['last_epoch'] = self.lr_scheduler.last_epoch self.lr_scheduler = _LRSCHEDULERS[name](self.optimizer, **schedule_config) elif schedule_config is None: self.lr_scheduler = _EmptySchedule(self.optimizer, last_epoch=self.lr_scheduler.last_epoch) else: # invalid config raise NotImplementedError def __getstate__(self): return { 'optimizer_state': self.optimizer.__getstate__(), 'regime': self.regime, } def __setstate__(self, state): self.regime = state.get('regime') self.optimizer.__setstate__(state.get('optimizer_state')) def state_dict(self): """Returns the state of the optimizer as a :class:`dict`. """ return self.optimizer.state_dict() def load_state_dict(self, state_dict): """Loads the optimizer state. Arguments: state_dict (dict): optimizer state. Should be an object returned from a call to :meth:`state_dict`. """ # deepcopy, to be consistent with module API self.optimizer.load_state_dict(state_dict) def zero_grad(self): """Clears the gradients of all optimized :class:`Variable` s.""" self.optimizer.zero_grad() if self.use_float_copy: for p in self._original_parameters: if p.grad is not None: p.grad.detach().zero_() def step(self): """Performs a single optimization step (parameter update). """ if self.use_float_copy: copy_params_grad(self.parameters, self._original_parameters) self.regularizer.pre_step() self.optimizer.step() self.regularizer.post_step() if self.use_float_copy: copy_params(self._original_parameters, self.parameters) def pre_forward(self): """ allows modification pre-forward pass - e.g for regularization """ self.regularizer.pre_forward() def pre_backward(self): """ allows modification post-forward pass and pre-backward - e.g for regularization """ self.regularizer.pre_backward() def get_value(self, key): return [group[key] for group in self.optimizer.param_groups] def get_lr(self): return self.get_value('lr') class MultiOptimRegime(OptimRegime): def __init__(self, *optim_regime_list, log=True): self.optim_regime_list = [] for optim_regime in optim_regime_list: assert isinstance(optim_regime, OptimRegime) self.optim_regime_list.append(optim_regime) self.log = log def update(self, epoch=None, train_steps=None): """adjusts optimizer according to current epoch or steps and training regime. """ updated = False for i, optim in enumerate(self.optim_regime_list): current_updated = optim.update(epoch, train_steps) if current_updated and self.log: logging.debug('OPTIMIZER #%s was updated' % i) updated = updated or current_updated return updated def zero_grad(self): """Clears the gradients of all optimized :class:`Variable` s.""" for optim in self.optim_regime_list: optim.zero_grad() def step(self): """Performs a single optimization step (parameter update). """ for optim in self.optim_regime_list: optim.step() def pre_forward(self): for optim in self.optim_regime_list: optim.pre_forward() def pre_backward(self): for optim in self.optim_regime_list: optim.pre_backward() def __repr__(self): return str([str(optim) for optim in self.optim_regime_list]) def get_value(self, key): return [[group[key] for group in optim.optimizer.param_groups] for optim in self.optim_regime_list] def get_lr(self): return self.get_value('lr')
#!/bin/bash # # Copyright (c) 2015 Red Hat, Inc # All rights reserved. # # This software may be modified and distributed under the terms # of the BSD license. See the LICENSE file for details. set -ex # /* # * CONFIGURATION # */ # # You should set these opts: # # export OS_PROXY_URL="http://auth_proxy:9443" # export OS_URL="https://openshift_master:8443" # export CURL_AUTH_OPTS="-u test_user:password" # export OS_BUILD="name-of-build-within-openshift" OS_VERSION="${OS_VERSION:-3.9.41}" if [ -n "$OS_PROXY_URL" ] ; then HTTPD_OSAPI_URL="${OS_PROXY_URL}/osapi/v1beta3" HTTPD_AUTH_URL="${OS_PROXY_URL}/oauth/authorize?client_id=openshift-challenging-client&response_type=token" fi if [ -n "$OS_URL" ] ; then OS_OSAPI_URL="${OS_URL}/osapi/v1beta3" OS_AUTH_URL="${OS_URL}/oauth/authorize?client_id=openshift-challenging-client&response_type=token" fi CURL_OPTS="--insecure -vsS" ACCESS_TOKEN="" CURL="curl ${CURL_OPTS}" CURL_AUTH="${CURL} ${CURL_AUTH_OPTS}" # /* # * RUNTIME # */ DIR=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd ) STORE_PATH="${DIR}/mock_jsons/${OS_VERSION}" mkdir -p $STORE_PATH save_output_to() { local content=$(cat) local ext=${2:-json} local file_name="${1}.${ext}" if [ "$ext" == "json" ] ; then echo "$content" | python -m json.tool >${STORE_PATH}/${file_name} else echo "$content" >${STORE_PATH}/${file_name} fi } set_access_token() { if [ -n "$HTTPD_AUTH_URL" ] ; then local curl_output="$(${CURL_AUTH} ${HTTPD_AUTH_URL} 2>&1)" printf "${curl_output}" | save_output_to authorize txt ACCESS_TOKEN=`${CURL_AUTH} ${HTTPD_AUTH_URL} 2>&1 | egrep -o 'access_token=[^&]+'` fi } set_access_token if [ -n "$OS_OSAPI_URL" ] ; then $CURL "${OS_OSAPI_URL}/users/~?${ACCESS_TOKEN}" | save_output_to "get_user" $CURL $OS_OSAPI_URL/namespaces/default/builds/ | save_output_to "builds_list" $CURL $OS_OSAPI_URL/namespaces/default/builds/${OS_BUILD}/ | save_output_to "build_${OS_BUILD}" $CURL -X PUT -H "Expect:" -H "Content-Type: application/json" -d @${STORE_PATH}/build_${OS_BUILD}.json $OS_OSAPI_URL/namespaces/default/builds/${OS_BUILD} | save_output_to "build_put" fi
#! /bin/bash export REMOTE_USER=ssg export REMOTE_IP=100.64.176.19 export cinventory=demo_scenarios/common/common.yaml export playbook=owca/workloads/run_workloads.yaml ansible-playbook -l $REMOTE_IP -i $cinventory $playbook --tags=clean_jobs -v ansible -u $REMOTE_USER -b all -i $REMOTE_IP, -msystemd -a'name=owca state=stopped' ansible -u $REMOTE_USER -b all -i $REMOTE_IP, -a'rm -f /var/lib/owca/lc-util.csv /var/lib/owca/workload-meta.json /var/lib/owca/workload-data.csv /var/lib/owca/threshold.json' ansible-playbook -l $REMOTE_IP -i demo_scenarios/complex_mbw.0/inventory.yaml -i $cinventory $playbook --tags=specjbb,tensorflow_benchmark_prediction,tensorflow_benchmark_train,cassandra_stress
docker exec -i schema-registry /usr/bin/kafka-avro-console-producer --topic ratings --broker-list broker:9092 --property value.schema="$(< src/main/avro/rating.avsc)"
package view import ( "time" ) const ( // ConfigFormatToml .. ConfigFormatToml = "toml" // ConfigFormatYaml .. ConfigFormatYaml = "yaml" // INI格式 ConfigFormatINI = "ini" // ConfigureUsedType .. ConfigureUsedTypeSupervisor = 1 ConfigureUsedTypeSystemd = 2 ) var ( // ConfigFormats Verified list ConfigFormats = []ConfigFormat{ConfigFormatToml, ConfigFormatYaml, ConfigFormatINI} ) type ( // ReqListConfig .. ReqListConfig struct { AppName string `query:"app_name" validate:"required"` Env string `query:"env" validate:"required"` } // RespListConfig .. RespListConfig []RespListConfigItem // RespListConfigItem Does not contain configuration content to prevent configuration from being too long RespListConfigItem struct { ID uint `json:"id"` AID uint `json:"aid"` Name string `json:"name"` Format string `json:"format"` // Yaml/Toml Env string `json:"env"` // 环境 Zone string `json:"zone"` // 机房Zone CreatedAt time.Time `json:"created_time"` UpdatedAt time.Time `json:"update_time"` DeletedAt *time.Time `json:"deleted_at"` PublishedAt *time.Time `json:"published"` // 未发布/发布时间 ConfigStatus uint32 `json:"config_status"` // 配置发布状态,0:未知 1:已发布 2:未发布 } // ReqDetailConfig .. ReqDetailConfig struct { ID uint `json:"id" validate:"required"` } // RespDetailConfig Contains configuration content RespDetailConfig struct { ID uint `json:"id"` AID uint `json:"aid"` Name string `json:"name"` Content string `json:"content"` Format string `json:"format"` // Yaml/Toml Env string `json:"env"` // 环境 Zone string `json:"zone"` // 机房Zone CreatedAt time.Time `json:"created_time"` UpdatedAt time.Time `json:"update_time"` PublishedAt *time.Time `json:"published"` // 未发布/发布时间 CurrentEditUser *User `json:"current_edit_user"` //当前正在编辑的用户名 } // ReqCreateConfig .. ReqCreateConfig struct { AppName string `json:"app_name" validate:"required"` Env string `json:"env" validate:"required"` Zone string `json:"zone" validate:"required"` FileName string `json:"file_name" validate:"required"` // 文件名(不带后缀) Format ConfigFormat `json:"format" validate:"required,oneof=yaml toml ini"` // 格式后缀名(比如: toml, yaml) } // ReqUpdateConfig .. ReqUpdateConfig struct { ID uint `json:"id" validate:"required"` Message string `json:"message" validate:"required"` Content string `json:"content" validate:"required"` } // ReqPublishConfig .. ReqPublishConfig struct { ID uint `json:"id" validate:"required"` // 配置ID HostName []string `json:"host_name"` // 发布的实例机器名称的列表,如果为空,则发布所有机器 Version *string `json:"version"` // 版本号 PubK8S bool `json:"pub_k8s"` // 是否发布集群 } // ReqHistoryConfig .. ReqHistoryConfig struct { ID uint `json:"id" validate:"required"` // 配置文件ID Size uint `json:"size" validate:"required"` Page uint `json:"page"` } // RespHistoryConfig .. RespHistoryConfig struct { Pagination Pagination `json:"pagination"` List []RespHistoryConfigItem `json:"list"` } // RespHistoryConfigItem .. RespHistoryConfigItem struct { ID uint `json:"id"` UID uint `json:"uid"` // 发布人ID AccessTokenID uint `json:"access_token_id"` AccessTokenName string `json:"access_token_name"` UserName string `json:"user_name"` ChangeLog string `json:"change_log"` ConfigurationID uint `json:"configuration_id"` Version string `json:"version"` // 发布到Juno Proxy的版本号 CreatedAt time.Time `json:"created_at"` } // ReqDiffConfig .. ReqDiffConfig struct { ID uint `query:"id"` // 配置ID HistoryID uint `query:"history_id"` // 版本ID AppName string `query:"appName"` // 应用名称 Env string `query:"env"` // 环境 ServiceVersion string `query:"serviceVersion"` // 服务器版本 PublishVersion string `query:"publishVersion"` // 发布版本 } // RespDiffConfig .. RespDiffConfig struct { Origin *RespDetailConfig `json:"origin,omitempty"` Modified RespDetailConfig `json:"modified"` } // ReqDiffReleaseConfig .. ReqDiffReleaseConfig struct { AppName string `query:"appName" validate:"required"` // 应用名 Env string `query:"env" validate:"required"` // 环境 ConfigName string `query:"configName"` // 配置文件名称 IpList []string `query:"ipList"` // ips } // RespDiffReleaseConfig .. RespDiffReleaseConfig struct { HasNew bool `json:"hasNew" query:"hasNew"` // 服务器配置与发布配置是否一致;true:有更新,false:没更新 DiffUrlList []DiffUrlList `json:"diffUrlList" query:"diffUrlList"` } DiffUrlList struct { Name string `json:"name" query:"name"` // 配置文件名称 DiffUrl string `json:"diffUrl" query:"diffUrl"` // diff链接地址 } // ReqDeleteConfig .. ReqDeleteConfig struct { ID uint `json:"id" validate:"required"` } ReqConfigInstanceList struct { ConfigurationID uint `json:"id" query:"id" validate:"required"` Env string `json:"env" query:"env" validate:"required"` ZoneCode string `json:"zone_code" query:"zone_code" validate:"required"` } ReqAppAction struct { Action string `json:"action" query:"action" validate:"required"` Typ uint `json:"typ" query:"typ" validate:"required"` NodeName string `json:"node_name" query:"node_name" validate:"required"` AppName string `json:"app_name" query:"app_name" validate:"required"` ZoneCode string `json:"zone_code" query:"zone_code" validate:"required"` Env string `json:"env" query:"env" validate:"required"` } RespAppAction struct { Code int `json:"code"` Msg string `json:"msg"` Data interface{} `json:"data"` } RespConfigInstanceList []RespConfigInstanceItem // RespConfigInstanceItem .. RespConfigInstanceItem struct { ConfigurationStatusID uint `json:"configuration_status_id"` Env string `json:"env"` IP string `json:"ip"` HostName string `json:"host_name"` DeviceID int `json:"device_id"` RegionCode string `json:"region_code"` RegionName string `json:"region_name"` ZoneCode string `json:"zone_code"` ZoneName string `json:"zone_name"` ConfigFilePath string `json:"config_file_path"` ConfigFileUsed uint `json:"config_file_used"` // 1 supervisor 2 systemd ConfigFileSynced uint `json:"config_file_synced"` ConfigFileTakeEffect uint `json:"config_file_take_effect"` SyncAt string `json:"sync_at"` ChangeLog string `json:"change_log"` Version string `json:"version"` // 发布到Juno Proxy的版本号 } // ConfigFormat .. ConfigFormat string // ReqConfigPublish configuration publish request struct ReqConfigPublish struct { AppName string `json:"app_name"` ZoneCode string `json:"zone_code"` Port string `json:"port"` FileName string `json:"file_name"` Format string `json:"format"` Content string `json:"content"` InstanceList []string `json:"instance_list"` Env string `json:"env"` Version string `json:"version"` PubK8S bool `json:"pub_k8s"` } ReqReadInstanceConfig struct { ConfigID uint `query:"id" validate:"required"` HostName string `query:"host_name" validate:"required"` } ReqLockConfig struct { ConfigID uint `query:"id" validate:"required"` } RespReadInstanceConfigItem struct { ConfigID uint `json:"config_id"` FileName string `json:"file_name"` Content string `json:"content"` Error string `json:"error"` } // ConfigurationPublishData .. ConfigurationPublishData struct { Content string `json:"content"` Metadata Metadata `json:"metadata"` } // Metadata .. Metadata struct { Timestamp int64 `json:"timestamp"` Version string `json:"version"` Format string `json:"format"` Paths []string `json:"paths"` } // ConfigurationStatus .. ConfigurationStatus struct { // etcd store data FileName string `json:"file_name"` Version string `json:"md5"` Hostname string `json:"hostname"` Env string `json:"env"` Timestamp int64 `json:"timestamp"` IP string `json:"ip"` HealthPort string `json:"health_port"` // attach key ZoneCode string `json:"zone_code"` EffectVersion string `json:"effect_version"` } // JunoAgent .. JunoAgent struct { HostName string `json:"host_name"` IPPort string `json:"ip"` } // UsedStatusResp .. UsedStatusResp struct { IsUsed int `json:"is_used"` } // EnvStatic .. EnvStatic struct { Env string `json:"env"` Cnt int `json:"cnt"` } // CmcCnt .. CmcCnt struct { DayTime string `json:"day_time" gorm:"day_time"` Cnt int `gorm:"cnt" json:"cnt"` } AppAction struct { Action string `json:"action" query:"action"` AppName string `json:"app_name" query:"app_name"` NodeName string `json:"node_name" query:"node_name"` Typ uint `json:"typ" query:"typ"` } // ClusterInfo .. ClusterInfo struct { Name string `json:"name"` Env []string `json:"env"` ZoneCode string `json:"zone_code"` ZoneName string `json:"zone_name"` } // ClusterList .. ClusterList struct { List []ClusterInfo `json:"list"` } // ClusterConfigInfo .. ClusterConfigInfo struct { Doc string `json:"doc"` SyncStatus string `json:"sync_status"` ChangeLog string `json:"change_log"` Version string `json:"version"` // 发布到Juno Proxy的版本号 CreatedAt time.Time `gorm:"column:created_at" json:"created_at"` ConfigFilePath string `json:"config_file_path"` } ) //CheckConfigFormat 检查配置文件格式是否符合要求 func CheckConfigFormat(format ConfigFormat) bool { for _, item := range ConfigFormats { if item == format { return true } } return false }
SCRIPT_NAME=elf OUTPUT_FORMAT="elf32-littlemips" BIG_OUTPUT_FORMAT="elf32-bigmips" LITTLE_OUTPUT_FORMAT="elf32-littlemips" TEXT_START_ADDR=0x0400000 DATA_ADDR=0x10000000 MAXPAGESIZE=0x40000 NONPAGED_TEXT_START_ADDR=0x0400000 OTHER_READONLY_SECTIONS='.reginfo : { *(.reginfo) }' OTHER_READWRITE_SECTIONS=' _gp = . + 0x8000; .lit8 : { *(.lit8) } .lit4 : { *(.lit4) } ' TEXT_START_SYMBOLS='_ftext = . ;' DATA_START_SYMBOLS='_fdata = . ;' OTHER_BSS_SYMBOLS='_fbss = .;' EXECUTABLE_SYMBOLS='_DYNAMIC_LINK = 0;' OTHER_SECTIONS=' .gptab.sdata : { *(.gptab.data) *(.gptab.sdata) } .gptab.sbss : { *(.gptab.bss) *(.gptab.sbss) } ' ARCH=mips TEMPLATE_NAME=elf32 GENERATE_SHLIB_SCRIPT=yes DYNAMIC_LINK=false
<filename>raspberrypi.js var five = require("raspi-io"); var Firebase = require("firebase"); var board = new five.Board(); var firebase = new Firebase("https://robots.firebaseio.com/robots"); board.on("ready", function() { var red = new five.Button(24); var blue = new five.Button(26); red.on("up", function () { firebase.child("red").once("value", function(snap) { firebase.child("red").set(snap.val() + 1); }); blue.on("up", function () { firebase.child("blue").once("value", function(snap) { firebase.child("blue").set(snap.val() + 1); }); });
package com.alipay.api.domain; import com.alipay.api.AlipayObject; import com.alipay.api.internal.mapping.ApiField; /** * 根据外部订单号查询发票信息 * * @author <NAME> * @since 1.0, 2020-08-29 14:30:33 */ public class AlipayEbppInvoiceOrderQueryModel extends AlipayObject { private static final long serialVersionUID = 6198278758985856349L; /** * 定义商户的一级简称,用于标识商户品牌,对应于商户入驻时填写的"商户品牌简称"。 如:肯德基:KFC */ @ApiField("m_short_name") private String mShortName; /** * 开票申请时所传入订单号,不限于支付宝体内交易订单号。如:20200520110046966071 */ @ApiField("order_no") private String orderNo; /** * 定义商户的二级简称,用于标识商户品牌下的分支机构,如门店,对应于商户入驻时填写的"商户门店简称"。 如:肯德基-杭州西湖区文一西路店:KFC-HZ-19003 要求:"商户品牌简称+商户门店简称"作为确定商户及其下属机构的唯一标识,不可重复。 */ @ApiField("sub_m_short_name") private String subMShortName; public String getmShortName() { return this.mShortName; } public void setmShortName(String mShortName) { this.mShortName = mShortName; } public String getOrderNo() { return this.orderNo; } public void setOrderNo(String orderNo) { this.orderNo = orderNo; } public String getSubMShortName() { return this.subMShortName; } public void setSubMShortName(String subMShortName) { this.subMShortName = subMShortName; } }
#!/usr/bin/env bash # Look in package.json's engines.node field for a semver range semver_range=$(cat $BUILD_DIR/package.json | $bp_dir/vendor/jq -r .engines.node) # Resolve node version using semver.io node_version=$(curl --silent --get --data-urlencode "range=${semver_range}" https://semver.io/node/resolve) # Recommend using semver ranges in a safe manner if [[ "$semver_range" == "null" ]]; then protip "Specify a node version in package.json" semver_range="" elif [[ "$semver_range" == "*" ]]; then protip "Avoid using semver ranges like '*' in engines.node" elif [[ ${semver_range:0:1} == ">" ]]; then protip "Avoid using semver ranges starting with '>' in engines.node" fi # Output info about requested range and resolved node version if [[ "$semver_range" == "" ]]; then status "Defaulting to latest stable node: $node_version" else status "Requested node range: $semver_range" status "Resolved node version: $node_version" fi # Test if a correct node is already in cache do_install_node=true # Fetch the cached node version if [[ -f "$NODE_INSTALL_TARGET_CACHE/node-version" ]]; then cached_node_version=$(cat "$NODE_INSTALL_TARGET_CACHE/node-version") status "The cached node version is $cached_node_version" status "The current node version is $node_version" # Test against desired node version if [[ "$cached_node_version" == "$node_version" ]]; then do_install_node=false fi fi if [[ "$do_install_node" = "true" ]]; then # Download node from Heroku's S3 mirror of nodejs.org/dist status "Downloading and installing node" node_url="http://s3pository.heroku.com/node/v$node_version/node-v$node_version-linux-x64.tar.gz" curl $node_url -s -o - | tar xzf - -C $NODE_INSTALL_TARGET # Move node (and npm) rm -rf $NODE_INSTALL_TARGET/node # Ensure the dest folder does not exist mv $NODE_INSTALL_TARGET/node-v$node_version-linux-x64 $NODE_INSTALL_TARGET/node chmod +x $NODE_INSTALL_TARGET/node/bin/* # Make all the things executable # Cache the node executable for future use rm -rf $NODE_INSTALL_TARGET_CACHE/node status "Caching node executable for future builds" cp -r $NODE_INSTALL_TARGET/node $NODE_INSTALL_TARGET_CACHE/node echo "$node_version" > $NODE_INSTALL_TARGET_CACHE/node-version else # Copy from cache status "Fetching node runtime from cache" rm -rf $NODE_INSTALL_TARGET/node cp -r $NODE_INSTALL_TARGET_CACHE/node $NODE_INSTALL_TARGET/node fi # Add to path export PATH=$NODE_INSTALL_TARGET/node/bin:$PATH echo "export PATH=$NODE_INSTALL_TARGET/node/bin:\$PATH" >> $BUILD_DIR/.profile.d/node.sh
#!/usr/bin/env bash # Copyright 2014 The Kubernetes Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. set -o errexit set -o nounset set -o pipefail # Unset CDPATH so that path interpolation can work correctly # https://github.com/kubernetes/kubernetes/issues/52255 unset CDPATH # The root of the build/dist directory KUBE_ROOT="$(cd "$(dirname "${BASH_SOURCE}")/../.." && pwd -P)" KUBE_OUTPUT_SUBPATH="${KUBE_OUTPUT_SUBPATH:-_output/local}" KUBE_OUTPUT="${KUBE_ROOT}/${KUBE_OUTPUT_SUBPATH}" KUBE_OUTPUT_BINPATH="${KUBE_OUTPUT}/bin" # This controls rsync compression. Set to a value > 0 to enable rsync # compression for build container KUBE_RSYNC_COMPRESS="${KUBE_RSYNC_COMPRESS:-0}" # Set no_proxy for localhost if behind a proxy, otherwise, # the connections to localhost in scripts will time out export no_proxy=127.0.0.1,localhost # This is a symlink to binaries for "this platform", e.g. build tools. THIS_PLATFORM_BIN="${KUBE_ROOT}/_output/bin" source "${KUBE_ROOT}/hack/lib/util.sh" source "${KUBE_ROOT}/hack/lib/logging.sh" kube::log::install_errexit source "${KUBE_ROOT}/hack/lib/version.sh" source "${KUBE_ROOT}/hack/lib/golang.sh" source "${KUBE_ROOT}/hack/lib/etcd.sh" KUBE_OUTPUT_HOSTBIN="${KUBE_OUTPUT_BINPATH}/$(kube::util::host_platform)" # list of all available group versions. This should be used when generated code # or when starting an API server that you want to have everything. # most preferred version for a group should appear first KUBE_AVAILABLE_GROUP_VERSIONS="${KUBE_AVAILABLE_GROUP_VERSIONS:-\ v1 \ admissionregistration.k8s.io/v1alpha1 \ admissionregistration.k8s.io/v1beta1 \ admission.k8s.io/v1beta1 \ apps/v1beta1 \ apps/v1beta2 \ apps/v1 \ authentication.k8s.io/v1 \ authentication.k8s.io/v1beta1 \ authorization.k8s.io/v1 \ authorization.k8s.io/v1beta1 \ autoscaling/v1 \ autoscaling/v2beta1 \ batch/v1 \ batch/v1beta1 \ batch/v2alpha1 \ certificates.k8s.io/v1beta1 \ coordination.k8s.io/v1beta1 \ extensions/v1beta1 \ events.k8s.io/v1beta1 \ imagepolicy.k8s.io/v1alpha1 \ networking.k8s.io/v1 \ policy/v1beta1 \ rbac.authorization.k8s.io/v1 \ rbac.authorization.k8s.io/v1beta1 \ rbac.authorization.k8s.io/v1alpha1 \ scheduling.k8s.io/v1alpha1 \ scheduling.k8s.io/v1beta1 \ settings.k8s.io/v1alpha1 \ storage.k8s.io/v1beta1 \ storage.k8s.io/v1 \ storage.k8s.io/v1alpha1 \ }" # not all group versions are exposed by the server. This list contains those # which are not available so we don't generate clients or swagger for them KUBE_NONSERVER_GROUP_VERSIONS=" abac.authorization.kubernetes.io/v0 \ abac.authorization.kubernetes.io/v1beta1 \ componentconfig/v1alpha1 \ imagepolicy.k8s.io/v1alpha1\ admission.k8s.io/v1beta1\ " # This emulates "readlink -f" which is not available on MacOS X. # Test: # T=/tmp/$$.$RANDOM # mkdir $T # touch $T/file # mkdir $T/dir # ln -s $T/file $T/linkfile # ln -s $T/dir $T/linkdir # function testone() { # X=$(readlink -f $1 2>&1) # Y=$(kube::readlinkdashf $1 2>&1) # if [ "$X" != "$Y" ]; then # echo readlinkdashf $1: expected "$X", got "$Y" # fi # } # testone / # testone /tmp # testone $T # testone $T/file # testone $T/dir # testone $T/linkfile # testone $T/linkdir # testone $T/nonexistant # testone $T/linkdir/file # testone $T/linkdir/dir # testone $T/linkdir/linkfile # testone $T/linkdir/linkdir function kube::readlinkdashf { # run in a subshell for simpler 'cd' ( if [[ -d "$1" ]]; then # This also catch symlinks to dirs. cd "$1" pwd -P else cd "$(dirname "$1")" local f f=$(basename "$1") if [[ -L "$f" ]]; then readlink "$f" else echo "$(pwd -P)/${f}" fi fi ) } # This emulates "realpath" which is not available on MacOS X # Test: # T=/tmp/$$.$RANDOM # mkdir $T # touch $T/file # mkdir $T/dir # ln -s $T/file $T/linkfile # ln -s $T/dir $T/linkdir # function testone() { # X=$(realpath $1 2>&1) # Y=$(kube::realpath $1 2>&1) # if [ "$X" != "$Y" ]; then # echo realpath $1: expected "$X", got "$Y" # fi # } # testone / # testone /tmp # testone $T # testone $T/file # testone $T/dir # testone $T/linkfile # testone $T/linkdir # testone $T/nonexistant # testone $T/linkdir/file # testone $T/linkdir/dir # testone $T/linkdir/linkfile # testone $T/linkdir/linkdir kube::realpath() { if [[ ! -e "$1" ]]; then echo "$1: No such file or directory" >&2 return 1 fi kube::readlinkdashf "$1" }
#!/bin/bash # Copyright 2019 The Volcano Authors. # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. set -o errexit set -o nounset set -o pipefail VK_ROOT=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )/.. export HELM_BIN_DIR=${VK_ROOT}/${BIN_DIR} export RELEASE_FOLDER=${VK_ROOT}/${RELEASE_DIR} export HELM_VER=${HELM_VER:-v2.13.0} export VOLCANO_IMAGE_TAG=${TAG:-"latest"} export YAML_FILENAME=volcano-${VOLCANO_IMAGE_TAG}.yaml LOCAL_OS=${OSTYPE} case $LOCAL_OS in "linux"*) LOCAL_OS='linux' ;; "darwin"*) LOCAL_OS='darwin' ;; *) echo "This system's OS ${LOCAL_OS} isn't recognized/supported" exit 1 ;; esac # Step1. install helm binary if [[ ! -f "${HELM_BIN_DIR}/version.helm.${HELM_VER}" ]] ; then TD=$(mktemp -d) cd "${TD}" && \ curl -Lo "${TD}/helm.tgz" "https://storage.googleapis.com/kubernetes-helm/helm-${HELM_VER}-${LOCAL_OS}-amd64.tar.gz" && \ tar xfz helm.tgz && \ mv ${LOCAL_OS}-amd64/helm "${HELM_BIN_DIR}/helm-${HELM_VER}" && \ cp "${HELM_BIN_DIR}/helm-${HELM_VER}" "${HELM_BIN_DIR}/helm" && \ chmod +x ${HELM_BIN_DIR}/helm rm -rf "${TD}" && \ touch "${HELM_BIN_DIR}/version.helm.${HELM_VER}" fi # Step2. generate yaml in folder if [[ ! -d ${RELEASE_FOLDER} ]];then mkdir ${RELEASE_FOLDER} fi DEPLOYMENT_FILE=${RELEASE_FOLDER}/${YAML_FILENAME} echo "Generating volcano yaml file into ${DEPLOYMENT_FILE}" if [[ -f ${DEPLOYMENT_FILE} ]];then rm ${DEPLOYMENT_FILE} fi cat ${VK_ROOT}/installer/namespace.yaml > ${DEPLOYMENT_FILE} ${HELM_BIN_DIR}/helm template ${VK_ROOT}/installer/helm/chart/volcano --namespace volcano-system \ --name volcano --set basic.image_tag_version=${VOLCANO_IMAGE_TAG} \ -x templates/admission.yaml \ -x templates/batch_v1alpha1_job.yaml \ -x templates/bus_v1alpha1_command.yaml \ -x templates/controllers.yaml \ -x templates/scheduler.yaml \ -x templates/scheduling_v1alpha1_podgroup.yaml \ -x templates/scheduling_v1alpha1_queue.yaml \ -x templates/scheduling_v1alpha2_podgroup.yaml \ -x templates/scheduling_v1alpha2_queue.yaml \ --notes >> ${DEPLOYMENT_FILE}
/* * Copyright (C) 2005-2017 Centre National d'Etudes Spatiales (CNES) * * This file is part of Orfeo Toolbox * * https://www.orfeo-toolbox.org/ * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include "mvdHistogramController.h" /*****************************************************************************/ /* INCLUDE SECTION */ // // Qt includes (sorted by alphabetic order) //// Must be included before system/custom includes. // // System includes (sorted by alphabetic order) // // ITK includes (sorted by alphabetic order) // // OTB includes (sorted by alphabetic order) // // Monteverdi includes (sorted by alphabetic order) #include "mvdVectorImageModel.h" #include "mvdHistogramWidget.h" namespace mvd { /* TRANSLATOR mvd::HistogramController Necessary for lupdate to be aware of C++ namespaces. Context comment for translator. */ /*****************************************************************************/ /* CLASS IMPLEMENTATION SECTION */ /*******************************************************************************/ HistogramController ::HistogramController( HistogramWidget* widget, QObject* p ) : AbstractModelController( widget, p ) { } /*******************************************************************************/ HistogramController ::~HistogramController() { } /*******************************************************************************/ void HistogramController ::Connect( AbstractModel * ) { // HistogramWidget* widget = GetWidget< HistogramWidget >(); // // Connect GUI to controller. // // Connect controller to model. } /*******************************************************************************/ void HistogramController ::Disconnect( AbstractModel * ) { // HistogramWidget* widget = GetWidget< HistogramWidget >(); // // Disconnect controller to model. // // Disconnect GUI from controller. } /*******************************************************************************/ void HistogramController ::ClearWidget() { assert( GetWidget()==GetWidget< HistogramWidget >() ); HistogramWidget* widget = GetWidget< HistogramWidget >(); assert( widget!=NULL ); widget->Clear(); } /*******************************************************************************/ void HistogramController ::virtual_ResetWidget( bool ) { ResetWidget( RGBW_CHANNEL_ALL ); } /*******************************************************************************/ void HistogramController ::ResetWidget( RgbwChannel channel ) { assert( GetModel()==GetModel< VectorImageModel >() ); VectorImageModel* imageModel = GetModel< VectorImageModel >(); assert( imageModel!=NULL ); HistogramModel* model = imageModel->GetHistogramModel(); assert( model!=NULL ); if( !model->IsValid() ) return; assert( GetWidget()==GetWidget< HistogramWidget >() ); HistogramWidget* widget = GetWidget< HistogramWidget >(); assert( widget!=NULL ); CountType begin = 0; CountType end = 0; if( !RgbwBounds( begin, end, channel ) ) return; const VectorImageSettings & settings = imageModel->GetSettings(); widget->SetGrayscaleActivated( settings.IsGrayscaleActivated() ); assert( std::numeric_limits< double >::has_quiet_NaN ); for( CountType i=begin; i<end; ++i ) { RgbwChannel chan = static_cast< RgbwChannel >( i ); VectorImageSettings::ChannelVector::value_type band = settings.GetRgbwChannel( chan ); size_t size = model->GetDataCount( band ); double* x = new double[ size ]; double* y = new double[ size ]; double xMin = std::numeric_limits< double >::quiet_NaN(); double yMin = std::numeric_limits< double >::quiet_NaN(); double xMax = std::numeric_limits< double >::quiet_NaN(); double yMax = std::numeric_limits< double >::quiet_NaN(); model->GetData( band, x, y, xMin, xMax, yMin, yMax ); widget->SetData( chan, x, y, size, xMin, yMin, xMax, yMax ); widget->SetPrecision( HistogramModel::GetEpsilon() ); widget->SetLowMarker( chan, settings.GetLowIntensity( chan ) ); widget->SetHighMarker( chan, settings.GetHighIntensity( chan ) ); delete[] x; x = NULL; delete[] y; y = NULL; } widget->RefreshScale( true ); widget->Replot(); } /*****************************************************************************/ /* SLOTS */ /*****************************************************************************/ void HistogramController ::OnRgbChannelIndexChanged( RgbwChannel channel, int ) { /* qDebug() << this << "::OnRgbChannelIndexChanged(" << RGBW_CHANNEL_NAMES[ channel ] << ", " << band << ")"; */ ResetWidget( channel ); } /*****************************************************************************/ void HistogramController ::OnGrayChannelIndexChanged( int ) { /* qDebug() << this << "::OnGrayChannelIndexChanged(" << band << ")"; */ ResetWidget( RGBW_CHANNEL_WHITE ); } /*****************************************************************************/ void HistogramController ::OnGrayscaleActivated( bool activated ) { /* qDebug() << this << "::OnGrayscaleActivated(" << activated << ")"; */ assert( GetWidget()==GetWidget< HistogramWidget >() ); HistogramWidget* widget = GetWidget< HistogramWidget >(); assert( widget!=NULL ); widget->SetGrayscaleActivated( activated ); widget->RefreshScale( true ); widget->Replot(); } /*****************************************************************************/ void HistogramController ::OnLowIntensityChanged( RgbwChannel channel, double value, bool refresh ) { assert( GetWidget()==GetWidget< HistogramWidget >() ); HistogramWidget* widget = GetWidget< HistogramWidget >(); assert( widget!=NULL ); widget->SetLowMarker( channel, value ); if( refresh ) widget->Replot(); } /*****************************************************************************/ void HistogramController ::OnHighIntensityChanged( RgbwChannel channel, double value, bool refresh ) { assert( GetWidget()==GetWidget< HistogramWidget >() ); HistogramWidget* widget = GetWidget< HistogramWidget >(); assert( widget!=NULL ); widget->SetHighMarker( channel, value ); if( refresh ) widget->Replot(); } /*****************************************************************************/ void HistogramController ::OnHistogramRefreshed() { ResetWidget( RGBW_CHANNEL_ALL ); } /*****************************************************************************/ } // end namespace 'mvd'
<reponame>teal-tigers/grace-shopper import React from 'react' import PropTypes from 'prop-types' import {connect} from 'react-redux' import {Link} from 'react-router-dom' import {logout, clearCart} from '../store' import NavBar from 'react-bootstrap/Navbar' import Nav from 'react-bootstrap/Nav' import {FontAwesomeIcon} from '@fortawesome/react-fontawesome' import Badge from 'react-bootstrap/Badge' const Navbar = ({handleClick, isLoggedIn, fullName}) => ( <NavBar bg="light justify-content-between" expand="lg" sticky="top" style={{marginBottom: '2rem'}} > <NavBar.Brand as={Link} to="/" style={{font: 'Yatra One', color: '#D2691E'}} > MADE FOR WALKING </NavBar.Brand> <NavBar.Toggle aria-controls="responsive-navbar-nav" /> <NavBar.Collapse id="responsive-navbar-nav"> <Nav className="mr-auto"> <Nav.Link as={Link} to="/home"> Western Boots </Nav.Link> </Nav> <Nav> <React.Fragment> {isLoggedIn ? ( <React.Fragment> <Nav.Link as={Link} to="/account"> {fullName} </Nav.Link> <Nav.Link href="#" onClick={handleClick}> Logout </Nav.Link> </React.Fragment> ) : ( <React.Fragment> <Nav.Link as={Link} to="/login"> Login </Nav.Link> <Nav.Link as={Link} to="/signup"> Sign Up </Nav.Link> </React.Fragment> )} </React.Fragment> <Nav.Link as={Link} to="/cart"> <FontAwesomeIcon icon="shopping-cart" size="1x" /> {/* <Badge pill variant="light"> <p>0</p> </Badge> */} </Nav.Link> </Nav> </NavBar.Collapse> </NavBar> ) /** * CONTAINER */ const mapState = state => { return { isLoggedIn: !!state.user.id, fullName: state.user.fullName } } const mapDispatch = dispatch => { return { handleClick() { dispatch(logout()) //SSW: clearCart action will restore the local //redux cart state to empty after a user logs out dispatch(clearCart()) } } } export default connect(mapState, mapDispatch)(Navbar) /** * PROP TYPES */ Navbar.propTypes = { handleClick: PropTypes.func.isRequired, isLoggedIn: PropTypes.bool.isRequired }
#!/usr/bin/env bash ## i2pd模组 i2pd moudle install_i2pd(){ set +e if [[ ${dist} == debian ]]; then wget -q -O - https://repo.i2pd.xyz/.help/add_repo | sudo bash -s - apt-get update apt-get install minissdpd -y #curl -LO https://github.com/PurpleI2P/i2pd/releases/download/2.39.0/i2pd_2.39.0-1bullseye1_amd64.deb #dpkg -i i2pd_2.39.0-1bullseye1_amd64.deb apt-get install i2pd -y elif [[ ${dist} == ubuntu ]]; then add-apt-repository ppa:purplei2p/i2pd -y apt-get update apt-get install i2pd -y else echo "fail" fi cat > '/etc/i2pd/i2pd.conf' << EOF ## Configuration file for a typical i2pd user ## See https://i2pd.readthedocs.io/en/latest/user-guide/configuration/ ## for more options you can use in this file. ## Lines that begin with "## " try to explain what's going on. Lines ## that begin with just "#" are disabled commands: you can enable them ## by removing the "#" symbol. ## Tunnels config file ## Default: ~/.i2pd/tunnels.conf or /var/lib/i2pd/tunnels.conf # tunconf = /var/lib/i2pd/tunnels.conf ## Tunnels config files path ## Use that path to store separated tunnels in different config files. ## Default: ~/.i2pd/tunnels.d or /var/lib/i2pd/tunnels.d # tunnelsdir = /var/lib/i2pd/tunnels.d ## Where to write pidfile (default: i2pd.pid, not used in Windows) # pidfile = /run/i2pd.pid ## Logging configuration section ## By default logs go to stdout with level 'info' and higher ## ## Logs destination (valid values: stdout, file, syslog) ## * stdout - print log entries to stdout ## * file - log entries to a file ## * syslog - use syslog, see man 3 syslog log = syslog ## Path to logfile (default - autodetect) # logfile = /var/log/i2pd/i2pd.log ## Log messages above this level (debug, info, *warn, error, none) ## If you set it to none, logging will be disabled loglevel = error ## Write full CLF-formatted date and time to log (default: write only time) # logclftime = true ## Daemon mode. Router will go to background after start # daemon = true ## Specify a family, router belongs to (default - none) # family = ## External IP address to listen for connections ## By default i2pd sets IP automatically # host = 1.2.3.4 ## Port to listen for connections ## By default i2pd picks random port. You MUST pick a random number too, ## don't just uncomment this # port = 9000 ## Enable communication through ipv4 ipv4 = true ## Enable communication through ipv6 ipv6 = true ## Enable NTCP transport (default = true) # ntcp = true ## If you run i2pd behind a proxy server, you can only use NTCP transport with ntcpproxy option ## Should be http://address:port or socks://address:port # ntcpproxy = http://127.0.0.1:8118 ## Enable SSU transport (default = true) ssu = true bandwidth = X share = 100 notransit = false floodfill = false [http] ## Web Console settings ## Uncomment and set to 'false' to disable Web Console # enabled = true ## Address and port service will listen on address = 0.0.0.0 port = 7070 strictheaders = false ## Path to web console, default "/" webroot = /${password1}_i2p/ ## Uncomment following lines to enable Web Console authentication # auth = true # user = i2pd # pass = changeme [httpproxy] ## Uncomment and set to 'false' to disable HTTP Proxy # enabled = true ## Address and port service will listen on address = 127.0.0.1 port = 4444 ## Enable address helper for adding .i2p domains with "jump URLs" (default: true) # addresshelper = true ## Address of a proxy server inside I2P, which is used to visit regular Internet # outproxy = http://false.i2p ## httpproxy section also accepts I2CP parameters, like "inbound.length" etc. #inbound.length = 2 #inbound.quantity = 16 #outbound.length = 2 #outbound.quantity = 16 [socksproxy] ## Uncomment and set to 'false' to disable SOCKS Proxy # enabled = true ## Address and port service will listen on address = 127.0.0.1 port = 4447 ## Optional keys file for proxy local destination # keys = socks-proxy-keys.dat ## Socks outproxy. Example below is set to use Tor for all connections except i2p ## Uncomment and set to 'true' to enable using of SOCKS outproxy # outproxy.enabled = true ## Address and port of outproxy # outproxy = 127.0.0.1 # outproxyport = 9050 ## socksproxy section also accepts I2CP parameters, like "inbound.length" etc. # inbound.length = 2 # inbound.quantity = 16 # outbound.length = 2 # outbound.quantity = 16 [sam] ## Uncomment and set to 'true' to enable SAM Bridge # enabled = true # singlethread = false ## Address and port service will listen on # address = 127.0.0.1 # port = 7656 [bob] ## Uncomment and set to 'true' to enable BOB command channel # enabled = true ## Address and port service will listen on # address = 127.0.0.1 # port = 2827 [i2cp] ## Uncomment and set to 'true' to enable I2CP protocol enabled = true # singlethread = false ## Address and port service will listen on # address = 127.0.0.1 # port = 7654 [i2pcontrol] ## Uncomment and set to 'true' to enable I2PControl protocol # enabled = on ## Address and port service will listen on # address = 127.0.0.1 # port = 7650 ## Authentication password. "itoopie" by default # password = ${password1} [precomputation] ## Enable or disable elgamal precomputation table ## By default, enabled on i386 hosts # elgamal = true [upnp] ## Enable or disable UPnP: automatic port forwarding (enabled by default in WINDOWS, ANDROID) enabled = true ## Name i2pd appears in UPnP forwardings list (default = I2Pd) # name = I2Pd [reseed] ## Options for bootstrapping into I2P network, aka reseeding ## Enable or disable reseed data verification. verify = true ## URLs to request reseed data from, separated by comma ## Default: "mainline" I2P Network reseeds # urls = https://reseed.i2p-projekt.de/,https://i2p.mooo.com/netDb/,https://netdb.i2p2.no/ ## Path to local reseed data file (.su3) for manual reseeding # file = /path/to/i2pseeds.su3 ## or HTTPS URL to reseed from # file = https://legit-website.com/i2pseeds.su3 ## Path to local ZIP file or HTTPS URL to reseed from # zipfile = /path/to/netDb.zip ## If you run i2pd behind a proxy server, set proxy server for reseeding here ## Should be http://address:port or socks://address:port # proxy = http://127.0.0.1:8118 ## Minimum number of known routers, below which i2pd triggers reseeding. 25 by default # threshold = 25 [addressbook] ## AddressBook subscription URL for initial setup ## Default: inr.i2p at "mainline" I2P Network # defaulturl = http://xk6ypey2az23vtdkitjxvanlshztmjs2ekd6sp77m4obszf6ocfq.b32.i2p/hosts.txt ## Optional subscriptions URLs, separated by comma # subscriptions = http://xk6ypey2az23vtdkitjxvanlshztmjs2ekd6sp77m4obszf6ocfq.b32.i2p/alive-hosts.txt,http://kqypgjpjwrphnzebod5ev3ts2vtii6e5tntrg4rnfijqc7rypldq.b32.i2p/cgi-bin/newhosts.txt,http://gh6655arkncnbrzq5tmq4xpn36734d4tdza6flbw5xppye2dt6ga.b32.i2p/hosts.txt,http://udhdrtrcetjm5sxzskjyr5ztpeszydbh4dpl3pl4utgqqw2v4jna.b32.i2p/hosts.txt,http://rus.i2p/hosts.txt [limits] ## Maximum active transit sessions (default:2500) transittunnels = 65535 ## Limit number of open file descriptors (0 - use system limit) # openfiles = 0 ## Maximum size of corefile in Kb (0 - use system limit) # coresize = 0 ## Threshold to start probabalistic backoff with ntcp sessions (0 - use system limit) # ntcpsoft = 0 ## Maximum number of ntcp sessions (0 - use system limit) # ntcphard = 0 [trust] ## Enable explicit trust options. false by default # enabled = true ## Make direct I2P connections only to routers in specified Family. # family = MyFamily ## Make direct I2P connections only to routers specified here. Comma separated list of base64 identities. # routers = ## Should we hide our router from other routers? false by default # hidden = true [exploratory] ## Exploratory tunnels settings with default values # inbound.length = 3 # inbound.quantity = 16 # outbound.length = 3 # outbound.quantity = 16 [ntcp2] enabled = true published = true [persist] ## Save peer profiles on disk (default: true) profiles = true [cpuext] ## Use CPU AES-NI instructions set when work with cryptography when available (default: true) # aesni = true ## Use CPU AVX instructions set when work with cryptography when available (default: true) # avx = true ## Force usage of CPU instructions set, even if they not found ## DO NOT TOUCH that option if you really don't know what are you doing! # force = false EOF cat > '/etc/i2pd/tunnels.conf.d/mywebsite.conf' << EOF [my-website] type = http host = 127.0.0.1 port = 80 inbound.length = 1 outbound.length = 1 #inbound.quantity = 16 #outbound.quantity = 16 keys = my-website.dat EOF cd cat > '/lib/systemd/system/i2pd.service' << EOF [Unit] Description=I2P Router written in C++ Documentation=man:i2pd(1) https://i2pd.readthedocs.io/en/latest/ After=network.target [Service] User=i2pd Group=i2pd #RuntimeDirectory=i2pd #RuntimeDirectoryMode=0700 #LogsDirectory=i2pd #LogsDirectoryMode=0700 Type=forking ExecStart=/usr/sbin/i2pd --conf=/etc/i2pd/i2pd.conf --tunconf=/etc/i2pd/tunnels.conf --tunnelsdir=/etc/i2pd/tunnels.conf.d --pidfile=/run/i2pd/i2pd.pid --logfile=/var/log/i2pd/i2pd.log --daemon --service ExecReload=/bin/sh -c "kill -HUP $MAINPID" PIDFile=/run/i2pd/i2pd.pid KillSignal=SIGQUIT # If you have the patience waiting 10 min on restarting/stopping it, uncomment this. # i2pd stops accepting new tunnels and waits ~10 min while old ones do not die. #KillSignal=SIGINT #TimeoutStopSec=10m LimitNOFILE=65536 Restart=always RestartSec=3s [Install] WantedBy=multi-user.target EOF systemctl daemon-reload systemctl enable i2pd systemctl restart i2pd #cd /etc/i2pd/ #depend="libboost-chrono-dev \ # libboost-date-time-dev \ # libboost-filesystem-dev \ # libboost-program-options-dev \ # libboost-system-dev \ # libboost-thread-dev \ # libssl-dev \ # zlib1g-dev" #apt-get install -y $depend #git clone --recursive https://github.com/purplei2p/i2pd-tools #cd /etc/i2pd/i2pd-tools #make cd }
<gh_stars>0 var path = require('path'); var webpack = require('webpack'); var ExtractTextPlugin = require('extract-text-webpack-plugin'); var extractCSS = new ExtractTextPlugin('css/[name].min.css'); var autoprefixer = require('autoprefixer'); //tu dong fix css voi cac trinh duyet var _ = require('lodash'); var HtmlWebpackPlugin = require('html-webpack-plugin'); var root = path.resolve(__dirname); module.exports = { entry: { app: ['./src/main.css', './src/main.js'], "publicJS/ex2": "./src/publicJS/ex2.js", "publicJS/ex1": "./src/publicJS/ex1.js", "app-home": "./src/app-home" }, output: { path: path.resolve(__dirname, 'dist'), filename: '[name].min.js' // publicPath: '/dist/' }, resolve: { extension: ['', '.js', '.css'] }, devServer: { progress: true, port: 82, //default: 8080 inline: true, contentBase: './dist', }, module: { loaders: [ { test: /\.js$/, exclude: /(node_module| bower_components)/, loader: 'babel', include: root }, { test: /\.css$/, // loader: extractCSS.extract(["css"]) loaders: ['style', 'css', 'resolve-url'] }, { test: /\.(png|jpg)$/, loader: 'url-loader', query: { limit: 10000, name: '[name]-[hash:7].[ext]' } }, { test: /\.(ttf|eot|svg)(\?v=[0-9]\.[0-9]\.[0-9])?$/, loader: "file-loader?name=[path]-[name].[ext]" } ] }, plugins: [ extractCSS, /*Co the tao nhieu common chunk plugin*/ new webpack.optimize.CommonsChunkPlugin({ name: "publicJS/ex-common", chunks: ["publicJS/ex2", "publicJS/ex1"] }), /*Tao 1 banner chung*/ new webpack.BannerPlugin("Author: ManhNV11 -MasterJs"), new HtmlWebpackPlugin({ template: path.resolve(root, 'src/index.html'), hash: true, //them thẻ <script> với đường link đính kèm 1 mã hash cache: true, //cache file nếu có ko co thay đổi thì ko bundle lại showErrors: false, //neu co loi sẽ ghi vào file html minify: false, //false: ko minify html ngước lại tru: minify html filename: 'index.html', favicon: 'src/favicon.ico', //them file favicon vào trang html /*nạp các nhánh javascript bundle vào file html*/ chunks: ['app', 'publicJS/ex-common', 'publicJS/ex2', 'publicJS/ex1', 'app-home'], chunksSortMode: function (a, b) { return (a.names[0] > b.names[0]) ? 1 : -1; }, //sắp xếp lại các file script chèn vào theo đúng thứ tự inject: 'body' //có 2 gia trị là body và head (chèn mã script vào nơi tương ứng) }) ] };
#!/bin/sh convert logo64.png logo32.png logo16.png favicon.ico
#!/usr/bin/perl $string = "Hello, World!"; $shift = 3; $encrypted_string = ""; foreach $char (split //, $string) { $ascii_value = ord($char); if ($ascii_value >= 97 && $ascii_value <= 122) { # lowercase $encrypted_string .= chr(($ascii_value - 97 + $shift) % 26 + 97); } elsif ($ascii_value >= 65 && $ascii_value <= 90) { # uppercase $encrypted_string .= chr(($ascii_value - 65 + $shift) % 26 + 65); } else { # not letter $encrypted_string .= $char; } } print("Encrypted message: $encrypted_string\n");
<reponame>AmatanHead/collective-blog<filename>collective_blog/settings/dev_settings.py<gh_stars>0 """Development settings - unsuitable for production See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/ """ from __future__ import unicode_literals print('\033[00;32mLoading development settings\033[0;00m') DEBUG = True from .settings import * # Log all sql queries MIDDLEWARE_CLASSES.append('collective_blog.middleware.TerminalLoggingMiddleware') # Database # https://docs.djangoproject.com/en/1.9/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), }, # 'default': { # 'ENGINE': 'django.db.backends.postgresql', # 'NAME': 'blog', # 'USER': 'zelta', # 'PASSWORD': '', # 'HOST': '127.0.0.1', # } } print('\033[01;33mWarning: recaptcha is running in the debug mode!\033[0;00m') os.environ['RECAPTCHA_TESTING'] = 'True'
<gh_stars>10-100 package com.ramusthastudio.mymultilanguageapp; import android.content.Intent; import android.os.Bundle; import android.provider.Settings; import android.support.v7.app.AppCompatActivity; import android.view.Menu; import android.view.MenuItem; import android.widget.TextView; public class MainActivity extends AppCompatActivity { private TextView helloTv; private TextView pluralTv; private TextView xliffTv; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); helloTv = findViewById(R.id.tv_hello); pluralTv = findViewById(R.id.tv_plural); xliffTv = findViewById(R.id.tv_xliff); int pokeCount = 3; String hello = String.format(getResources().getString(R.string.hello_world), "<NAME>", pokeCount, "<NAME>"); helloTv.setText(hello); int songCount = 5; String pluralText = getResources().getQuantityString(R.plurals.numberOfSongsAvailable, songCount, songCount); pluralTv.setText(pluralText); xliffTv.setText(getResources().getString(R.string.app_homeurl)); } @Override public boolean onCreateOptionsMenu(Menu menu) { getMenuInflater().inflate(R.menu.main_menu, menu); return super.onCreateOptionsMenu(menu); } @Override public boolean onOptionsItemSelected(MenuItem item) { if (item.getItemId() == R.id.action_change_settings) { Intent mIntent = new Intent(Settings.ACTION_LOCALE_SETTINGS); startActivity(mIntent); } return super.onOptionsItemSelected(item); } }
#!/usr/bin/env bash # This is for the arguments # -v is useful to run the same test multiple time without changing the shell script name. # Interactive is there as a reference for future implementation. Thanks to http://linuxcommand.org/ for the tips. play=0 version= while [ "$1" != "" ]; do case $1 in -v | --version ) shift version=$1 ;; -p | --play ) play=2 ;; -h | --help ) usage exit ;; * ) usage exit 1 esac shift done me=`basename "$0"`$version cd .. if [ $play -gt 1 ] then echo Now playing python3 run.py --alg=ppo2 --env=ServobulletInvertedPendulum-play-v0 --network=mlp2x32 --num_timesteps=5e5 \ --load_path results/${me}/save/save \ ent_coef=10 \ --play else # Name of the current file + version result_dir=results/${me} python3 run.py --alg=ppo2 --env=ServobulletInvertedPendulum-v0 --network=mlp2x32 --num_timesteps=6e4 \ --save_interval=1 --num_env=1 \ --load_path results/launch_cartpole_06.sh/save/save \ --save_path results/${me}/save/save \ --nsteps=2048 \ --nminibatches=32 \ --lam=0.95 \ --gamma=0.99 \ --noptepochs=10 \ --log_interval=1 \ --lr=3e-4 \ --progress_dir=results/${me} python3 servorobots/visualize/plot_results.py --dirs results/${me} --task_name ${me} \ --no-show --save_dir results/${me} fi
<reponame>LuChangliCN/medas-iot package com.foxconn.iot.controller; import javax.validation.Valid; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; import org.springframework.data.web.PageableDefault; import org.springframework.validation.BindingResult; import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.PostMapping; import org.springframework.web.bind.annotation.PutMapping; import org.springframework.web.bind.annotation.RequestBody; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RestController; import com.foxconn.iot.dto.DeviceAddDto; import com.foxconn.iot.dto.DeviceCompanyDto; import com.foxconn.iot.dto.DeviceDto; import com.foxconn.iot.service.DeviceService; import com.foxconn.iot.support.CommonResponse; @RestController @RequestMapping(value = "/api/admin/device") public class AdminDeviceController { @Autowired private DeviceService deviceService; @CommonResponse @PostMapping(value = "/") public void create(@Valid @RequestBody DeviceAddDto device, BindingResult result) { deviceService.create(device); } @CommonResponse @GetMapping(value = "/{id:\\d+}") public DeviceDto query(@PathVariable(value = "id") long id) { return deviceService.findById(id); } @CommonResponse @GetMapping(value = "/by/type/{id:\\d+}") public Page<DeviceDto> queryByModel(@PathVariable(value = "id") long deviceType, @PageableDefault Pageable pageable) { return deviceService.queryByDeviceType(deviceType, pageable); } @CommonResponse @GetMapping(value = "/by/version/{version:\\d+}") public Page<DeviceDto> queryByVersion(@PathVariable("version") long versionId, @PageableDefault Pageable pageable) { return deviceService.queryByDeviceVersion(versionId, pageable); } @CommonResponse @GetMapping(value = "/search/{search}") public Page<DeviceDto> search(@PathVariable(value = "search") String search, @PageableDefault Pageable pageable) { return deviceService.queryByModelOrSn(search, pageable); } /** * 给设备分配部门 * * @param dc */ @CommonResponse @PutMapping(value = "/set/company/") public void setCompany(@Valid @RequestBody DeviceCompanyDto dc, BindingResult result) { deviceService.updateCompany(dc); } @CommonResponse @PutMapping(value = "/set/group/{id:\\d+}/{company:\\d+}/{group:\\d+}") public void setGroup(@PathVariable(value = "id") long id, @PathVariable(value = "company") long companyId, @PathVariable(value = "group") long groupId) { deviceService.updateGroup(id, companyId, groupId); } @CommonResponse @PutMapping(value = "/disable/{id:\\d+}/{status:^[01]$}") public void disable(@PathVariable(value = "id") long id, @PathVariable(value = "status") int status) { deviceService.updateStatusById(status, id); } @CommonResponse @PutMapping(value = "/delete/{id:\\d+}") public void delete(@PathVariable(value = "id") long id) { deviceService.deleteById(id); } @CommonResponse @PutMapping(value = "/set/app/{id:\\d+}/{app:\\d+}") public void setApplication(@PathVariable(value = "id") long id, @PathVariable(value = "app") long appid) { deviceService.setApplication(id, appid); } @CommonResponse @GetMapping(value = "/by/app/{id:\\d+}") public Page<DeviceDto> queryByApplication(@PathVariable("id") long appid, @PageableDefault Pageable pageable) { return deviceService.queryByApplication(appid, pageable); } }
function wordCount(str){ let wordCounts = {}; for (let word of str.split(' ')) { if (wordCounts.hasOwnProperty(word)) { wordCounts[word]++ } else { wordCounts[word] = 1; } } return wordCounts; }
#!/usr/bin/env bash # Copyright (c) 2016 The Vendetta Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. export LC_ALL=C set -e INPUTFILE="Xcode_7.3.1.dmg" HFSFILENAME="5.hfs" SDKDIR="Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.11.sdk" 7z x "${INPUTFILE}" "${HFSFILENAME}" SDKNAME="$(basename "${SDKDIR}")" SDKDIRINODE=$(ifind -n "${SDKDIR}" "${HFSFILENAME}") fls "${HFSFILENAME}" -rpF ${SDKDIRINODE} | while read type inode filename; do inode="${inode::-1}" if [ "${filename:0:14}" = "usr/share/man/" ]; then continue fi filename="${SDKNAME}/$filename" echo "Extracting $filename ..." mkdir -p "$(dirname "$filename")" if [ "$type" = "l/l" ]; then ln -s "$(icat "${HFSFILENAME}" $inode)" "$filename" else icat "${HFSFILENAME}" $inode >"$filename" fi done echo "Building ${SDKNAME}.tar.gz ..." MTIME="$(istat "${HFSFILENAME}" "${SDKDIRINODE}" | perl -nle 'm/Content Modified:\s+(.*?)\s\(/ && print $1')" find "${SDKNAME}" | sort | tar --no-recursion --mtime="${MTIME}" --mode='u+rw,go+r-w,a+X' --owner=0 --group=0 -c -T - | gzip -9n > "${SDKNAME}.tar.gz" echo 'All done!'
class MyFrame extends JFrame { public MyFrame() { setSize(640, 480); setTitle("BrokenSwing"); } } public class BrokenSwing { private static void doStuff(MyFrame frame) { // BAD: Direct call to a Swing component after it has been realized frame.setTitle("Title"); } public static void main(String[] args) { MyFrame frame = new MyFrame(); frame.setVisible(true); doStuff(frame); } }
<reponame>insad/jworkflow package net.jworkflow.sample04; import net.jworkflow.sample04.steps.*; import net.jworkflow.kernel.interfaces.*; public class ForeachWorkflow implements Workflow<MyData> { @Override public String getId() { return "foreach-sample"; } @Override public Class getDataType() { return MyData.class; } @Override public int getVersion() { return 1; } @Override public void build(WorkflowBuilder<MyData> builder) { builder .startsWith(Hello.class) .foreach(data -> data.value1) .Do(each -> each .startsWith(DoSomething.class)) .then(Hello.class) .foreach(data -> new String[] { "item 1", "item 2", "item 3" }) .Do(each -> each .startsWith(DoSomething.class)) .then(Goodbye.class); } }
def sort_strings(strings): return sorted(strings) if __name__ == '__main__': strings = ['Python', 'Java', 'C++', 'C#'] sorted_strings = sort_strings(strings) print(sorted_strings)
import PyPDF2 # open and read the pdf file file = open('document.pdf', 'rb') reader = PyPDF2.PdfFileReader(file) # read complete document for page in range(reader.numPages): print(reader.getPage(page).extractText()) file.close()
export function* helloSaga() { console.log('Hello Saga!') }
#!/bin/sh # Install nodejs, npm, and elasticdump sudo apt-get -y update sudo apt-get -y install nodejs sudo apt-get -y install npm sudo npm install elasticdump -g # create a symlink for nodejs sudo ln -s /usr/bin/nodejs /usr/bin/node # Install pip, virtualenv, setup python environment sudo apt-get -y install python-pip #sudo pip install virtualenv #virtualenv env #source env/bin/activate # Install sci-kit learn dependencies sudo apt-get -y install build-essential python-dev python-setuptools \ python-numpy python-scipy \ libatlas-dev libatlas3gf-base # Install sci-kit learn sudo pip install scikit-learn # Install postgresql and psycopg2 sudo apt-get -y install postgresql postgresql-contrib sudo apt-get -y install python-psycopg2 sudo apt-get -y install libpq-dev # Install unoconv sudo apt-get install -y unoconv # Install packages for SSL sudo apt-get install -y libssl-dev libffi-dev # Install psycopg dependencies apt-get install libpq-dev # Install Python dependencies sudo pip install -r requirements.txt # Install NLTK dependencies python nltk_deps.py # Initialize unoconv unoconv -l & # Run elasticsearch as a service set up script sudo bash ElasticSearch.sh 1.5.2 # stop elasticsearch sudo service elasticsearch stop # install the elasticsearch mapper attachment plugin sudo /usr/share/elasticsearch/bin/plugin --install elasticsearch/elasticsearch-mapper-attachments/2.5.0 # install the elasticsearch carrot2 plugin sudo /usr/share/elasticsearch/bin/plugin --install org.carrot2/elasticsearch-carrot2/1.9.0 # Reboot elasticsearch as a service sudo service elasticsearch start # Sleep for 10 seconds while Elasticsearch boots up sleep 10 # delete any index call dossiers then recreate it (wipe it) curl -XDELETE "http://localhost:9200/dossiers/" ; curl -XPUT "http://localhost:9200/dossiers/" # check the index size (should be 0 documents) curl 'localhost:9200/_cat/indices?v' # use elasticdump to build our index. First, read in the mapping, then read in the data sudo elasticdump \ --input=dossiers_mapping.json \ --output=http://localhost:9200/dossiers \ --type=mapping # print the mapping and make sure it looks like a large JSON object curl -XGET 'http://localhost:9200/dossiers/_mapping/attachment?pretty' # finally, load some dummy data sudo elasticdump \ --bulk=true \ --input=dossiers.json \ --output=http://localhost:9200/dossiers \ --type=data bash db_setup.sh python createdb.py python run.py ###### uncomment the below if you do not want to initialize with any sample data! #### # curl -XDELETE "http://localhost:9200/dossiers/" ; curl -XPUT "http://localhost:9200/dossiers/" #sudo elasticdump \ # --bulk=true \ # --input=dossiers_mapping.json \ # --output=http://localhost:9200/dossiers \ # --type=mapping \ # --debug=true
/* * Copyright (C) 2005-2017 Centre National d'Etudes Spatiales (CNES) * * This file is part of Orfeo Toolbox * * https://www.orfeo-toolbox.org/ * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #ifndef otbPersistentFilterStreamingDecorator_hxx #define otbPersistentFilterStreamingDecorator_hxx #include "otbPersistentFilterStreamingDecorator.h" namespace otb { /** * Constructor */ template <class TFilter> PersistentFilterStreamingDecorator<TFilter> ::PersistentFilterStreamingDecorator() { m_Filter = FilterType::New(); m_Streamer = StreamerType::New(); } template <class TFilter> void PersistentFilterStreamingDecorator<TFilter> ::GenerateData(void) { // Reset the filter before the generation. this->GetFilter()->Reset(); /* for (unsigned int idx = 0; idx < this->GetFilter()->GetNumberOfOutputs(); ++idx) { this->GetStreamer()->SetNthInput(idx, this->GetFilter()->GetOutput(idx)); } */ this->GetStreamer()->SetInput(this->GetFilter()->GetOutput()); this->GetStreamer()->Update(); // Synthetize data after the streaming of the whole image. this->GetFilter()->Synthetize(); } template <class TFilter> void PersistentFilterStreamingDecorator<TFilter> ::Update(void) { this->GenerateData(); } /** * PrintSelf Method */ template <class TFilter> void PersistentFilterStreamingDecorator<TFilter> ::PrintSelf(std::ostream& os, itk::Indent indent) const { Superclass::PrintSelf(os, indent); } } // End namespace otb #endif
package com.createchance.imageeditor; import android.graphics.Bitmap; import android.graphics.SurfaceTexture; import android.opengl.GLES20; import com.createchance.imageeditor.gles.EglCore; import com.createchance.imageeditor.gles.WindowSurface; import com.createchance.imageeditor.utils.Logger; import com.createchance.imageeditor.utils.UiThreadUtil; import java.io.File; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.nio.IntBuffer; import java.util.concurrent.Semaphore; /** * Render target to save image. * * @author createchance * @date 2018/12/27 */ public class ImageSaver implements IRenderTarget, SurfaceTexture.OnFrameAvailableListener { private static final String TAG = "ImageSaver"; private int[] mOffScreenFrameBuffer = new int[2]; private int[] mOffScreenTextureIds = new int[2]; private int[] mSaveTextureId = new int[1]; private int mInputTextureIndex = 0, mOutputTextureIndex = 1; private WindowSurface mWindowSurface; private int mSurfaceWidth, mSurfaceHeight; private int mSaveX, mSaveY, mSaveWidth, mSaveHeight; private int mSaveFormat = IEManager.IMG_FORMAT_JPEG, mSaveQuality = 100; private File mOutputFile; private SaveListener mListener; public ImageSaver(int surfaceWidth, int surfaceHeight, int saveX, int saveY, int saveWidth, int saveHeight, int saveFormat, int saveQuality, File outputFile, SaveListener saveListener) { mSurfaceWidth = surfaceWidth; mSurfaceHeight = surfaceHeight; mSaveX = saveX; mSaveY = saveY; mSaveWidth = saveWidth; mSaveHeight = saveHeight; mSaveFormat = saveFormat; mSaveQuality = saveQuality; mOutputFile = outputFile; mListener = saveListener; } @Override public void init(EglCore eglCore) { createOffScreenFrameBuffer(); createOffScreenTextures(); createSaveTexture(); bindDefaultFrameBuffer(); attachOffScreenTexture(mSaveTextureId[0]); SurfaceTexture surfaceTexture = new SurfaceTexture(mSaveTextureId[0]); surfaceTexture.setOnFrameAvailableListener(this); mWindowSurface = new WindowSurface(eglCore, surfaceTexture); mWindowSurface.makeCurrent(); } @Override public int getInputTextureId() { return mOffScreenTextureIds[mInputTextureIndex]; } @Override public int getOutputTextureId() { return mOffScreenTextureIds[mOutputTextureIndex]; } @Override public void bindOffScreenFrameBuffer() { GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, mOffScreenFrameBuffer[0]); } @Override public void attachOffScreenTexture(int textureId) { GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0, GLES20.GL_TEXTURE_2D, textureId, 0); } @Override public void bindDefaultFrameBuffer() { GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, mOffScreenFrameBuffer[1]); } @Override public int getSurfaceWidth() { return mSurfaceWidth; } @Override public int getSurfaceHeight() { return mSurfaceHeight; } @Override public void swapTexture() { int tmp = mInputTextureIndex; mInputTextureIndex = mOutputTextureIndex; mOutputTextureIndex = tmp; } @Override public void makeCurrent() { if (mWindowSurface != null) { mWindowSurface.makeCurrent(); } } @Override public void swapBuffers() { if (mWindowSurface != null) { mWindowSurface.swapBuffers(); } } @Override public void release() { deleteOffScreenFrameBuffer(); GLES20.glDeleteTextures(mOffScreenTextureIds.length, mOffScreenTextureIds, 0); if (mWindowSurface != null) { mWindowSurface.release(); } } private void createOffScreenFrameBuffer() { GLES20.glGenFramebuffers(mOffScreenFrameBuffer.length, mOffScreenFrameBuffer, 0); } private void createOffScreenTextures() { GLES20.glGenTextures(mOffScreenTextureIds.length, mOffScreenTextureIds, 0); for (int mTextureId : mOffScreenTextureIds) { // bind to fbo texture cause we are going to do setting. GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureId); GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, mSurfaceWidth, mSurfaceHeight, 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, null); // 设置缩小过滤为使用纹理中坐标最接近的一个像素的颜色作为需要绘制的像素颜色 GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST); // 设置放大过滤为使用纹理中坐标最接近的若干个颜色,通过加权平均算法得到需要绘制的像素颜色 GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR); // 设置环绕方向S,截取纹理坐标到[1/2n,1-1/2n]。将导致永远不会与border融合 GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE); // 设置环绕方向T,截取纹理坐标到[1/2n,1-1/2n]。将导致永远不会与border融合 GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE); // unbind fbo texture. GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0); } } private void createSaveTexture() { GLES20.glGenTextures(mSaveTextureId.length, mSaveTextureId, 0); for (int mTextureId : mSaveTextureId) { // bind to texture cause we are going to do setting. GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureId); GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, mSurfaceWidth, mSurfaceHeight, 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, null); // 设置缩小过滤为使用纹理中坐标最接近的一个像素的颜色作为需要绘制的像素颜色 GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST); // 设置放大过滤为使用纹理中坐标最接近的若干个颜色,通过加权平均算法得到需要绘制的像素颜色 GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR); // 设置环绕方向S,截取纹理坐标到[1/2n,1-1/2n]。将导致永远不会与border融合 GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE); // 设置环绕方向T,截取纹理坐标到[1/2n,1-1/2n]。将导致永远不会与border融合 GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE); // unbind fbo texture. GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0); } } private void deleteOffScreenFrameBuffer() { GLES20.glDeleteFramebuffers(mOffScreenFrameBuffer.length, mOffScreenFrameBuffer, 0); } @Override public void onFrameAvailable(SurfaceTexture surfaceTexture) { // save image file here. Logger.d(TAG, "Save image, onFrameAvailable, we are going to save it, width: " + mSurfaceWidth + ", height: " + mSurfaceHeight + ", output file: " + mOutputFile.getAbsolutePath()); final Semaphore waiter = new Semaphore(0); // Take picture on OpenGL thread final int[] pixelMirroredArray = new int[mSaveWidth * mSaveHeight]; final IntBuffer pixelBuffer = IntBuffer.allocate(mSaveWidth * mSaveHeight); GLES20.glReadPixels(mSaveX, mSaveY, mSaveWidth, mSaveHeight, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, pixelBuffer); int[] pixelArray = pixelBuffer.array(); // Convert upside down mirror-reversed image to right-side up normal image. for (int i = 0; i < mSaveHeight; i++) { for (int j = 0; j < mSaveWidth; j++) { pixelMirroredArray[(mSaveHeight - i - 1) * mSaveWidth + j] = pixelArray[i * mSaveWidth + j]; } } waiter.release(); try { waiter.acquire(); } catch (InterruptedException e) { e.printStackTrace(); UiThreadUtil.post(new Runnable() { @Override public void run() { if (mListener != null) { mListener.onSaveFailed(); } } }); return; } Bitmap bitmap = Bitmap.createBitmap(mSaveWidth, mSaveHeight, Bitmap.Config.ARGB_8888); bitmap.copyPixelsFromBuffer(IntBuffer.wrap(pixelMirroredArray)); saveBitmap(bitmap, mOutputFile); } private void saveBitmap(Bitmap bitmap, final File picFile) { try { FileOutputStream out = new FileOutputStream(picFile); Bitmap.CompressFormat format = Bitmap.CompressFormat.JPEG; switch (mSaveFormat) { case IEManager.IMG_FORMAT_PNG: format = Bitmap.CompressFormat.PNG; break; case IEManager.IMG_FORMAT_JPEG: format = Bitmap.CompressFormat.JPEG; break; case IEManager.IMG_FORMAT_WEBP: format = Bitmap.CompressFormat.WEBP; break; default: break; } bitmap.compress(format, mSaveQuality, out); out.flush(); out.close(); UiThreadUtil.post(new Runnable() { @Override public void run() { if (mListener != null) { mListener.onSaved(picFile); } } }); // save done, release resource now. release(); } catch (FileNotFoundException e) { e.printStackTrace(); UiThreadUtil.post(new Runnable() { @Override public void run() { if (mListener != null) { mListener.onSaveFailed(); } } }); } catch (IOException e) { e.printStackTrace(); UiThreadUtil.post(new Runnable() { @Override public void run() { if (mListener != null) { mListener.onSaveFailed(); } } }); } } }
#!/bin/bash DIR=$(dirname $0) FILE="$DIR/main.js" COMPILE_TO="bookmarklet.js" echo "Compiling " $FILE"..." echo -n "javascript:(function(){" > $COMPILE_TO curl --data-urlencod "js_code@$FILE" -d compilation_level=SIMPLE_OPTIMIZATIONS -d output_format=text -d output_info=compiled_code https://closure-compiler.appspot.com/compile | tr '\n' ' ' >> $COMPILE_TO echo -n 'main()})()' >> $COMPILE_TO echo "Successfully processed into $COMPILE_TO!"
# frozen_string_literal: true RSpec.shared_examples_for 'a CurveHandler processor' do let(:handler) { described_class.new(curve) } context 'with an empty curve' do let(:curve) { [] } it 'is not valid' do expect(handler).not_to be_valid end it 'has an error message' do handler.valid? expect(handler.errors).to include( 'Curve must have 8760 numeric values, one for each hour in a typical year' ) end it 'does not return a sanitized curve' do expect(handler.sanitized_curve).to be_nil end it 'does not return a curve_for_storage' do expect(handler.curve_for_storage).to be_nil end end context 'with nil instead of a curve' do let(:curve) { nil } it 'is not valid' do expect(handler).not_to be_valid end it 'has an error message' do handler.valid? expect(handler.errors).to include( 'Curve must be a file containing 8760 numeric values, ' \ 'one for each hour in a typical year' ) end it 'does not return a sanitized curve' do expect(handler.sanitized_curve).to be_nil end it 'does not return a curve_for_storage' do expect(handler.curve_for_storage).to be_nil end end context 'with a curve containing nil' do let(:curve) { [1.0, nil] * 4380 } it 'is not valid' do expect(handler).not_to be_valid end it 'has an error message' do handler.valid? expect(handler.errors).to include( 'Curve must only contain numeric values' ) end it 'does not return a sanitized curve' do expect(handler.sanitized_curve).to be_nil end it 'does not return a curve_for_storage' do expect(handler.curve_for_storage).to be_nil end end context 'with a curve containing a string' do let(:curve) { [1.0, '1,0'] * 4380 } it 'is not valid' do expect(handler).not_to be_valid end it 'has an error message' do handler.valid? expect(handler.errors).to include( 'Curve must only contain numeric values' ) end it 'does not return a sanitized curve' do expect(handler.sanitized_curve).to be_nil end it 'does not return a curve_for_storage' do expect(handler.curve_for_storage).to be_nil end end context 'with a curve containing arrays' do let(:curve) { [[1.0, 1.0], [2.0, 2.0]] * 4380 } it 'is not valid' do expect(handler).not_to be_valid end it 'has an error message' do handler.valid? expect(handler.errors).to include( 'Curve must contain only a single numeric value on each line; multiple values separated ' \ 'by commas are not permitted' ) end it 'does not return a sanitized curve' do expect(handler.sanitized_curve).to be_nil end it 'does not return a curve_for_storage' do expect(handler.curve_for_storage).to be_nil end end describe '.from_string' do let(:handler) { described_class.from_string(input) } context 'when given an empty string' do let(:input) { +'' } it 'is not valid' do expect(handler).not_to be_valid end end context 'when given a string with commas' do let(:input) { +"1,2\n3,4" } it 'is not valid' do expect(handler).not_to be_valid end it 'has a relevant error message' do handler.valid? expect(handler.errors).to include( 'Curve must contain only a single numeric value on each line; multiple values ' \ 'separated by commas are not permitted' ) end end end end RSpec.shared_examples_for 'a non-normalizing CurveHandler processor' do let(:handler) { described_class.new(curve) } context 'with a curve containing 8760 floats' do let(:curve) { [1.0] * 8760 } it 'is valid' do expect(handler).to be_valid end it 'changes no values when sanitizing' do expect(handler.sanitized_curve).to eq(curve) end it 'has an identical sanitized_curve and curve_for_storage' do expect(handler.curve_for_storage).to eq(handler.sanitized_curve) end end context 'with a curve containing 8760 integers' do let(:curve) { [1] * 8760 } it 'is valid' do expect(handler).to be_valid end it 'converts each value to a float' do expect(handler.sanitized_curve).to eq(curve.map(&:to_f)) end it 'has an identical sanitized_curve and curve_for_storage' do expect(handler.curve_for_storage).to eq(handler.sanitized_curve) end end context 'with a curve containing 10 floats' do let(:curve) { [1.0] * 10 } it 'is not valid' do expect(handler).not_to be_valid end it 'has an error message' do handler.valid? expect(handler.errors).to include( 'Curve must have 8760 numeric values, one for each hour in a typical year' ) end it 'does not return a sanitized curve' do expect(handler.sanitized_curve).to be_nil end end describe '.from_string' do let(:handler) { described_class.from_string(input) } context 'when given a string containing 8760 floats' do let(:input) { "1.2\n" * 8760 } it 'is valid' do expect(handler).to be_valid end it 'sanitizes the curve' do expect(handler.sanitized_curve).to eq([1.2] * 8760) end end context 'when given a string containing 8760 integers' do let(:input) { "1\n" * 8760 } it 'is valid' do expect(handler).to be_valid end it 'sanitizes the curve' do expect(handler.sanitized_curve).to eq([1.0] * 8760) end it 'has an identical sanitized_curve and curve_for_storage' do expect(handler.curve_for_storage).to eq(handler.sanitized_curve) end end context 'when given a string containing 8760 values with trailing commas' do let(:input) { "1.2,\n" * 8760 } it 'is valid' do expect(handler).to be_valid end it 'sanitizes the curve' do expect(handler.sanitized_curve).to eq([1.2] * 8760) end end context 'when given a string with mixed values' do let(:input) { "1.2\nnope\n" * 4380 } it 'is not valid' do expect(handler).not_to be_valid end end context 'when given a malformed CSV' do let(:input) { "1.2\n1\r2\n" * 4380 } it 'is not valid' do expect(handler).not_to be_valid end end context 'with given a string with a byte order mark and 8760 floats' do let(:input) { "\xEF\xBB\xBF" + ("1.2\n" * 8760) } it 'is valid' do expect(handler).to be_valid end it 'sanitizes the curve' do expect(handler.sanitized_curve).to eq([1.2] * 8760) end end end end RSpec.shared_examples_for 'a CurveHandler which disallows negatives' do let(:handler) { described_class.new(curve) } context 'with a curve containing negatives' do let(:curve) { [1.0, -1.0] * 4380 } it 'is valid' do expect(handler).to be_valid end it 'converts negatives to zero' do expect(handler.sanitized_curve.take(4)).to eq([1.0, 0.0, 1.0, 0.0]) end end end
#!/usr/bin/env bash set -euxo pipefail # Check tar is in PATH command -v jar source Version.txt OUTPUT_DIR="docs/javadoc" rm -rf ${OUTPUT_DIR} mkdir -p ${OUTPUT_DIR} ARCHIVE=$(find "temp_java/ortools-java/target" -iname "ortools-java-${OR_TOOLS_MAJOR}.${OR_TOOLS_MINOR}.*-javadoc.jar") (cd ${OUTPUT_DIR} && jar -xvf "../../${ARCHIVE}")
import * as DebugHelpers from '../index'; beforeEach(() => { jest.spyOn(console, 'log'); }); afterEach(() => { jest.clearAllMocks(); }); describe('DebugHelpers', () => { describe('delog', () => { it('should log body of request in debug mode', () => { const body = 'Hello Snappmarket!'; process.env.DEBUG_MODE = 'true'; expect(console.log.mock.calls).toHaveLength(0); DebugHelpers.delog(body); expect(console.log.mock.calls).toHaveLength(1); expect(console.log.mock.calls[0][0]).toBe('--'); expect(console.log.mock.calls[0][1]).toBe(body); }); it('should not log body of request in non-debug mode', () => { const body = 'Hello Snappmarket!'; process.env.DEBUG_MODE = 'false'; expect(console.log.mock.calls).toHaveLength(0); DebugHelpers.delog(body); expect(console.log.mock.calls).toHaveLength(0); }); }); describe('ApiError', () => { it('should throw an API error', () => { const message = 'something bad happened!'; try { throw new DebugHelpers.ApiError(message); } catch (e) { expect(e.message).toEqual(message); } }); }); });
<filename>scr/consts-author.web.js<gh_stars>1-10 ////////////////////////////////////////////////////////////BUYER const _buyPostNum='_buyPostNum'; const _buyDone='_buyDone'; const _buyTpe='_buyTpe'; const _buyVol='_buyVol'; const _buyNum='_buyNum'; ////////////////////////////////////////////////////////////SELLER const _sellPostNum='_sellPostNum'; const _sellDone='_sellDone'; const _sellTpe='_sellTpe'; const _sellVol='_sellVol'; const _sellNum='_sellNum'; ////////////////////////////////////////////////////////////BUYING const _sellerAddress='sellerAddress'; const _sellerPostNum='sellerPostNum'; const _sellingDone='_sellingDone';//traded const _sellingTpe='_sellingTpe';//price const _sellingVol='_sellingVol';//trading const _sellingNum='_sellingNum';//count const _xut2gain='xut2gain'; const _eth2send='eth2send'; ////////////////////////////////////////////////////////////SELLING const _buyerAddress='buyerAddress'; const _buyerPostNum='buyerPostNum'; const _buyingDone='_buyingDone';//traded const _buyingTpe='_buyingTpe';//price const _buyingVol='_buyingVol';//trading const _buyingNum='_buyingNum';//count const _xut2send='xut2send'; const _eth2gain='eth2gain'; ////////////////////////////////////////////////////////////BUY const _buyrate='buyrate'; const _xut2buy='xut2buy'; const _eth2pay='eth2pay'; ////////////////////////////////////////////////////////////SELL const _sellrate='sellrate'; const _xut2sell='xut2sell'; const _eth2recv='eth2recv'; //////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////USER const _userName='userName'; const _userForm='userForm'; const _userText='userText'; ////////////////////////////////////////////////////////////DOCS const _inBlock='inBlock'; const _inHash='inHash'; const _inTyp='inType'; const _inFmt='inForm'; const _inTxt='inText'; const _inTag='inTags'; const _inHed='inHead'; const _inFut='inFoot'; const _inAut='inAuth'; const _inRef='inRef'; const _inUtc='inUtc'; const _inFrm='inFrm'; const _inTo='inTo'; //////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////MSGS const _keystore='keystore'; const _smessage='message'; const _accverify='verify'; const _signature='signature'; const _signatory='signatory'; ////////////////////////////////////////////////////////////ACCS const _newaddress='new_address'; const _newprivate='new_private'; const _oldaddress='old_address'; const _oldprivate='old_private'; const _expprivate='exp_private'; ////////////////////////////////////////////////////////////
#!/bin/bash export DISPLAY=:0.0 wallpaperdir="$HOME/Pictures/Wallpapers" randompic=$(find $wallpaperdir -maxdepth 1 -type f | shuf -n1) echo $randompic feh --bg-scale "$randompic" datetime=$(date -u) echo $datetime
#!/usr/bin/env bash ### Default Parameters Set within subjectService ## # SERVER='localhost:8888' # Retry Connection Interval: 5 sec # get commandline args - process the -h help arg args=("${@}") for i in ${!args[@]}; do if [[ ${args[i]} = "-h" ]]; then echo "USAGE: $0 [-s <server>] [-u <username>] [-p <password>]" echo -e "\t[-i <retry-connection-interval>] [--test]" exit 0 fi #echo "$i = ${args[i]}" done # activate conda python env source ~/.bashrc conda deactivate conda activate rtcloud export PYTHONPATH=./rtCommon/:$PYTHONPATH echo "python rtCommon/subjectService.py ${args[@]}" python rtCommon/subjectService.py ${args[@]}
import copy import sys sys.path.append('SetsClustering') from multiprocessing import Process ,Manager import numpy as np import LinearProgrammingInTheDarkClassVersion as LPD from multiprocessing import Pool from jgrapht.algorithms.shortestpaths import johnson_allpairs import jgrapht from SetsClustering import Utils, PointSet, KMeansAlg from SetsClustering import KMeansForSetsSensitivityBounder as SensBounder from SetsClustering import Coreset as CS from scipy.spatial.distance import cdist import seaborn as sns from copy import deepcopy import itertools from scipy.ndimage import convolve from timeit import default_timer as timer from tqdm import tqdm import dill import matplotlib matplotlib.use('agg') import matplotlib.pyplot as plt import matplotlib.pylab as pl from scipy.linalg import null_space import scipy.ndimage as ndi from scipy.spatial import ConvexHull import argparse, os, pickle from scipy.io import netcdf POWER = 4 FORCE_NEIGHBORING = 20 import psutil CPUS = psutil.cpu_count() # import multiprocessing # # from pathos.multiprocessing import ProcessingPool as Pool # # from sklearn.externals.joblib import Parallel, delayed # from multiprocessing import Process parser = argparse.ArgumentParser(description='Initial Location Generator') parser.add_argument('-d', type=str, default=None, help='Directory containing all maps') parser.add_argument('-pp', default=False, action='store_true', help='preprocess map') parser.add_argument('-ft', default='.nc', type=str, help='Type of map file') parser.add_argument('-nf', default=1, type=int, help='Number of files describing a map of velocities') parser.add_argument('-eps_g', default=None, type=float, help=r'resolution of the \varepsilon-grid') parser.add_argument('-eps_b', default=0.08, type=float, help=r'epsilon approximation for each of the patches of the currents') parser.add_argument('-k', default=10, type=int, help='Desired number of drifters') parser.add_argument('-bs', default=2, type=int, help='size of the blob prior to the clustering phase') parser.add_argument('-coreset_sample_size', default=1000, type=int, help='The size of the coreset for the clustering phase') parser.add_argument('-time', default=False, action='store_true', help='Apply our system over time') parser.add_argument('-tol', default=0.2, type=float, help='Tolerance for minimum volume ellipsoid') parser.add_argument('-resume', default=False, action='store_true', help='In case of code being killed, you can resume from last map') parser.add_argument('-show', default=False, action='store_true', help='Show only our segementation and clustering. Must have preporcessed these data before') class bcolors: HEADER = '\033[95m' OKBLUE = '\033[94m' OKCYAN = '\033[96m' OKGREEN = '\033[92m' WARNING = '\033[93m' FAIL = '\033[91m' ENDC = '\033[0m' BOLD = '\033[1m' UNDERLINE = '\033[4m' NORMAL = '\033[0m' plt.rcParams.update({'font.size': 16}) manager = Manager() def removeInclusionsJob(lst, ids, path_str): global resdict for i in range(len(lst)): resdict[ids[i]] = True if lst[i] in path_str: resdict[ids[i]] = False def removeInclusions(unified_paths, file_path='', file_prefix=''): global manager global resdict global A unified_paths_strings = [str(x[0]).strip('[]') for x in unified_paths] unified_paths_strings.sort(key=(lambda x: len(x.split(',')))) lst = [list(grp) for i, grp in itertools.groupby(unified_paths_strings, key=(lambda x: len(x.split(','))))] sizes = np.cumsum([len(x) for x in lst]) unique_ids = [list(range(sizes[i-1], sizes[i]) if i > 0 else range(sizes[i])) for i in range(len(sizes))] if len(unified_paths_strings) > 10000: with Manager() as manager: proc_list = [] resdict = manager.dict() for i, item in enumerate(lst): if i != (len(lst) - 1): proc_list.append( Process(target=removeInclusionsJob, args=(item, unique_ids[i], '\n'.join(unified_paths_strings[sizes[i]:]))) ) proc_list[-1].start() for proc in proc_list: proc.join() mask = [x[1] for x in resdict.items()] else: resdict = dict() for i, item in enumerate(lst): if i != (len(lst) - 1): removeInclusionsJob(item, unique_ids[i], '\n'.join(unified_paths_strings[sizes[i]:])) mask = [x[1] for x in resdict.items()] mask.extend([True for _ in range(len(lst[-1]))]) np.save('{}mask_unified_paths_{}.npy'.format(file_path, file_prefix), mask) return [[int(y) for y in x.split(', ')] for x in list(itertools.compress(unified_paths_strings, mask))] def removeDuplicates(list_1): list2 = list(set(list_1)) list2.sort(key=list_1.index) return list2 def makedir(dir_path): try: os.mkdir(dir_path) except OSError as error: print(error) def saveVels(data, file_path, smoothed=True): if smoothed: file_path += 'Smoothed_Vel/' else: file_path += 'Original_Vel/' makedir(file_path) temp = np.tile(data[:, :, 0][:, :, np.newaxis], 10) temp.dump(file_path + 'matrix_vel_x.dat') temp = np.tile(data[:, :, 1][:, :, np.newaxis], 10) temp.dump(file_path + 'matrix_vel_y.dat') def readNetCDFFile(file_path, over_time): file2read = netcdf.NetCDFFile(file_path, 'r') U = file2read.variables['u'].data # velocity in x-axis V = file2read.variables['v'].data # velocity in y-axis mask = np.logical_and(np.abs(U) <= 1e3, np.abs(V) <= 1e3) V = np.multiply(V, mask) U = np.multiply(U, mask) if not over_time: U = U[0, :, :, :] V = V[0, :, :, :] return U,V def innerFunction(current_possible_combs, unique_keys): global resdict for i, element in enumerate(current_possible_combs): resdict[unique_keys[i]] = (removeDuplicates(element[0][0] + element[1][0]), element[0][1] + element[1][1]) def getAllPossiblePaths(list1, list2): global CPUS global manager global resdict if len(list1) * len(list2) > 10000: manager = Manager() resdict = manager.dict() all_possible_combs = np.array_split(list(itertools.product(list1, list2)), CPUS) unique_ids = np.array_split(np.arange(sum([x.size for x in all_possible_combs])), CPUS) proc_list = [] for i, item in enumerate(all_possible_combs): proc_list.append( Process(target=innerFunction, args=(item, unique_ids[i])) ) proc_list[-1].start() for proc in proc_list: proc.join() temp = list(resdict.values()) else: temp = [] for element in itertools.product(list1, list2): temp.append((removeDuplicates(element[0][0] + element[1][0]), element[0][1] + element[1][1])) return temp class CurrentEstimation(object): def __init__(self, grid, k=10, epsilon_grid=0.06, tolerance=0.001, epsilon_body=2, is_grid=True, is_data_vectorized=True, blob_size=3, sens_file_name='sens.npz', coreset_sample_size = int(1e3), save_mode=True, matrix_of_velocities=True, save_path='', file_prefix='', show=False, verbose=False): self.grid = grid self.is_grid=is_grid self.d = (self.grid.ndim - 1) if matrix_of_velocities else self.grid.ndim self.epsilon_grid = epsilon_grid self.epsilon_body = epsilon_body self.tolerance = tolerance self.g = jgrapht.create_graph(directed=True) self.cost_func = (lambda x: self.grid[tuple(x.astype("int") if is_grid else x)]) # create a simple membership cost function self.iocsAlg = None self.segments = [] self.eps_star = None self.bodies = [] self.full_bodies = [] self.is_data_vectorized = is_data_vectorized self.k = k self.blob_size = blob_size self.coreset_sample_size = coreset_sample_size self.save_mode = save_mode self.binary_grid = None self.matrix_of_velocities = matrix_of_velocities self.sens_file_name = sens_file_name self.ellipsoids = [] self.convex_hulls = [] self.verbose = verbose self.save_path = save_path self.file_prefix = file_prefix self.show = show def polynomialGridSearchParallelizedVersion(self): with Pool() as pool: pass def checkIfContained(self, point): for i,body in enumerate((self.full_bodies if self.epsilon_body == 0 else self.bodies)): if body.ndim > 1: temp_in_body = np.equal(body, point).all(1).any() temp_in_CH = False if self.convex_hulls[i] is not None: temp_in_CH = np.all(self.convex_hulls[i][:,:-1].dot(point) <= -self.convex_hulls[i][:,-1]) if temp_in_body or temp_in_CH: return True else: if np.linalg.norm(body - point) == 0: return True return False def IOCS(self, p): cost_func = lambda x: 0.85 <= np.dot(np.nan_to_num(self.grid[tuple(p)]/np.linalg.norm(self.grid[tuple(p)])), np.nan_to_num(self.grid[tuple(x)]/np.linalg.norm(self.grid[tuple(x)]))) \ <= 1 and 0.5 <= np.linalg.norm(self.grid[tuple(p)])/np.linalg.norm(self.grid[tuple(x)]) <= 2 self.iocsAlg = LPD.LinearProgrammingInTheDark(P=self.grid,cost_func=cost_func, point=p, d=self.d, epsilon=self.tolerance, hull_hyper=None, matrix_of_vecs=True) if self.iocsAlg.lower_d <= 1: if self.iocsAlg.lower_d == 0: self.bodies.append(p) self.full_bodies.append(p) self.ellipsoids.append(None) self.convex_hulls.append(None) else: idxs = np.where(self.iocsAlg.oracle.flattened_data == 1)[0] Z = np.empty((idxs.shape[0], p.shape[0])) Z[:, self.iocsAlg.irrelevant_dims] = p[self.iocsAlg.irrelevant_dims] Z[:, self.iocsAlg.dims_to_keep[0]] = \ np.arange(*(self.iocsAlg.oracle.bounding_box[self.iocsAlg.dims_to_keep].flatten() + np.array([0, 1])).tolist())[idxs] self.bodies.append(Z) self.full_bodies.append(Z) self.ellipsoids.append(None) self.convex_hulls.append(None) elif self.iocsAlg.get_all_points: idxs = np.where(self.iocsAlg.oracle.flattened_data == 1)[0] Z = self.iocsAlg.oracle.coordinates[:-1, idxs].T self.bodies.append(Z) self.full_bodies.append(Z) self.ellipsoids.append(None) self.convex_hulls.append(None) else: self.ellipsoids.append(self.iocsAlg.computeAMVEE() + (p, )) if self.epsilon_body > 0: s = timer() self.approximateBody(self.ellipsoids[-1][0][-1], self.ellipsoids[-1][0][-2], idx_dims_retrieve=self.ellipsoids[-1][-3], dims_value=self.ellipsoids[-1][-1], rest_dims=self.ellipsoids[-1][-2]) else: self.attainWholeBody(self.ellipsoids[-1][0][-1], self.ellipsoids[-1][0][-2], idx_dims_retrieve=self.ellipsoids[-1][-3], dims_value=self.ellipsoids[-1][-1], rest_dims=self.ellipsoids[-1][-2]) def polynomialGridSearch(self): dims = list(self.grid.shape[:-1] if self.matrix_of_velocities else self.grid.shape) for i in range(len(dims)): dims[i] = np.arange(0, dims[i], int(np.round(dims[i] * self.epsilon_grid))) try: X = np.array(np.meshgrid(*dims)).T.reshape(-1, len(dims)) return X except MemoryError: raise MemoryError("Cant handle this much data! Lower your epsilon or simply run the parallelized version") @staticmethod def semiBinarizeGrid(grid, kernel_size=None): # Apply Mean-Filter kernel = np.ones(tuple([grid.ndim if kernel_size is None else kernel_size for i in range(grid.ndim)]), np.float32) / (kernel_size ** grid.ndim if kernel_size is not None else grid.ndim ** grid.ndim) return convolve(grid, kernel, mode='constant', cval=0) def generateEpsilonStar(self, degree=None): if degree is None: degree = self.epsilon_body Z = np.arange(0, 2*np.pi, degree * np.pi) V = np.array(np.meshgrid(*[Z for i in range(self.d)])).T.reshape(-1, self.d) V = np.divide(V, np.linalg.norm(V, axis=1)[:, np.newaxis], out=np.zeros_like(V), where=(V != 0)) V = np.unique(np.around(np.unique(V[1:], axis=0), self.d+1), axis=0) return V @staticmethod def run_dill_encoded(payload): fun, args = dill.loads(payload) return fun(*args) @staticmethod def apply_async(pool, fun, args): payload = dill.dumps((fun, args)) return pool.apply_async(CurrentEstimation.run_dill_encoded, (payload,)) def attainWholeBody(self, E, c, idx_dims_retrieve=None, dims_value=None, rest_dims=None): if self.iocsAlg.oracle.checkIfInsidePixelStyleNumpyVer(np.round(c)) > 1.0: raise ValueError('Something is wrong with the ellipsoid!') bounding_box = self.iocsAlg.oracle.bounding_box indices = np.vstack(map(np.ravel, np.meshgrid(*[np.arange(bounding_box[x, 0], bounding_box[x, 1]+1) for x in range(bounding_box.shape[0])]))).T body = [] temp = 0 for idx in indices: if self.iocsAlg.oracle.checkIfInsidePixelStyleNumpyVer(idx) == 1 and np.linalg.norm(E.dot(idx - c)) <= 1 \ and not self.checkIfContained(idx): temp += 1 if np.linalg.norm(self.grid[tuple(idx)]) > 1e-10: body.append(idx) if len(body) > 0: self.full_bodies.append(np.vstack(body)) def approximateBody(self, E, c, idx_dims_retrieve=None, dims_value=None, rest_dims=None): bounding_box = self.iocsAlg.oracle.bounding_box indices_of_lengths = np.argsort([x[0] - x[1] for x in bounding_box]) coeffs = np.zeros((indices_of_lengths.shape[0],)) for i in range(coeffs.shape[0]): if i == (coeffs.shape[0] - 1): coeffs[indices_of_lengths[i]] = 1 else: coeffs[indices_of_lengths[i]] = max(((bounding_box[indices_of_lengths[i],1] - bounding_box[indices_of_lengths[i],0]) * self.epsilon_body),1) V = np.vstack(map(np.ravel, np.meshgrid(*[np.arange(start=x[0], stop=x[1], step=coeffs[j]) for (j,x) in enumerate(bounding_box)]))).T V = np.unique(V.astype("int"), axis=0) body = [] for v in V: if (self.iocsAlg.oracle.checkIfInsidePixelStyleNumpyVer(v) <= 1.0) and\ (np.linalg.norm(E.dot(v - c)) <= np.sqrt(1 + (1 + self.iocsAlg.eps) * E.shape[0])) and\ (np.linalg.norm(self.grid[tuple(v)]) > 0) and (not self.checkIfContained(v)): body.append(v) if len(body) > 0: self.bodies.append(np.vstack(body)) if len(body) > (self.d + 1): try: self.convex_hulls.append(ConvexHull(self.bodies[-1]).equations) except: self.convex_hulls.append(None) else: self.convex_hulls.append(None) def createBlobs(self, body): if body.ndim == 1: return [PointSet.PointSet(body[np.newaxis,:])] elif body.shape[0] < self.blob_size: return [PointSet.PointSet(body)] else: blob = [] for x_val in np.unique(body[:,0]): idxs = np.where(body[:, 0] == x_val)[0] if body[idxs].shape[0] < self.blob_size: blob.extend([PointSet.PointSet(body[idxs])]) else: splitted_array = np.array_split(body[idxs], int(body[idxs].shape[0] / self.blob_size)) blob.extend([PointSet.PointSet(x) for x in splitted_array]) return blob def clusteringAssignment(self, set_P, Q): assignments_per_point = [] assignments_per_blob = [] for P in set_P: dists = cdist(P.P, Q) cols_idxs = np.argmin(dists, axis=1) min_idx = np.argmin(np.min(dists, axis=1)) assignments_per_point.extend([cols_idxs[min_idx] for p in P.P]) assignments_per_blob.append(cols_idxs[min_idx]) return assignments_per_point, assignments_per_blob def clusterWaves(self, continue_from=0,return_full_bodies=True): P = [] blobs = [] if self.epsilon_body != 0: for body in self.bodies: P = [] # need to make a way to make sure that there is a trade-off between the first 3 entries and last two if body.ndim == 1: body = body[np.newaxis, :] for point in body: a = self.grid[tuple(point.astype("int"))] b = np.linalg.norm(a) P.append( np.hstack((point*FORCE_NEIGHBORING, np.divide(a,b, out=np.zeros_like(a), where=b!=0) * np.linalg.norm(point)))) blobs.extend(self.createBlobs(np.array(deepcopy(P)))) else: for body in self.full_bodies: # need to make a way to make sure that there is a trade-off between the first 3 entries and last two P = [] if body.ndim == 1: body = body[np.newaxis, :] for point in body: P.append( np.hstack((point*FORCE_NEIGHBORING, self.grid[tuple(point.astype("int"))] / np.linalg.norm(self.grid[tuple(point.astype("int"))]) * np.linalg.norm(point)))) blobs.extend(self.createBlobs(np.array(deepcopy(P)))) set_P_indiced = [(P, idx) for (idx, P) in enumerate(blobs)] # taking the full! if continue_from > 0 or self.show: sensitivity = np.load(self.save_path + self.file_prefix + self.sens_file_name)['s'] print("Loaded sensitivity for sets clustering!") else: k_means_sens_bounder = SensBounder.KMeansForSetsSensitivityBounder(set_P_indiced, self.k, None, None) sensitivity = k_means_sens_bounder.boundSensitivity() if self.save_mode: np.savez(self.save_path + self.file_prefix + self.sens_file_name, s=sensitivity) print('Sum of sensitivity is {}'.format(np.sum(sensitivity))) print("Saved sensitivity for sets clustering!") if continue_from <= 1 and not self.show: k_means_alg = KMeansAlg.KMeansAlg(blobs[0].d, self.k) coreset = CS.Coreset() C = coreset.computeCoreset(set_P_indiced, sensitivity, int(self.coreset_sample_size)) _, Q, _ = k_means_alg.computeKmeans(C[0], False) np.savez('{}Optimal_clustering_{}.npz'.format(self.save_path, self.file_prefix), Q=Q) else: Q = np.load('{}Optimal_clustering_{}.npz'.format(self.save_path,self.file_prefix))['Q'] print("Loaded optimal clustering of coreset") assignments_per_point, assignments_per_blob = self.clusteringAssignment(blobs, Q) return np.array(blobs), np.array(assignments_per_blob), assignments_per_point def addConnections(self, pairs, g_all, i, j, list_of_vertices, shift_idx_root, shift_idx_leaf, is_leaf=None, enable_weights=False, connections=[]): dists = np.linalg.norm(self.clustered_bodies[i][pairs[:,0]] - self.clustered_bodies[j][pairs[:,1]], axis=1) pairs_of_interest = pairs[np.where(dists <= 2)[0]] if len(pairs_of_interest) != 0: if enable_weights: for pair in pairs_of_interest: root_of_path_of_interest = self.clustered_bodies[i][pair[0]] leaf_of_path_of_interest = self.clustered_bodies[j][pair[1]] direction = root_of_path_of_interest - leaf_of_path_of_interest direction = direction / np.linalg.norm(direction) target_direction = self.grid[tuple(root_of_path_of_interest.astype("int"))] alpha = np.dot(direction, target_direction/np.linalg.norm(target_direction)) if alpha > 0.7: try: g_all.add_edge(int(pair[0] + shift_idx_root), int(pair[1] + shift_idx_leaf)) list_of_vertices = np.delete(list_of_vertices, np.where(list_of_vertices == (pair[1]+shift_idx_leaf))) if is_leaf is not None: is_leaf = np.delete(is_leaf, np.where(is_leaf == (pair[0] + shift_idx_root))) except: continue else: roots = np.unique(pairs_of_interest[:, 0]) for root in roots: try: idxs_of_interest = np.where(pairs_of_interest[:, 0] == root)[0] pairs_of_interest_per_root = pairs_of_interest[idxs_of_interest, :] root_of_path_of_interest = self.clustered_bodies[i][root][np.newaxis, :] leaf_of_path_of_interest = self.clustered_bodies[j][pairs_of_interest_per_root[:, 1]] directions = leaf_of_path_of_interest - root_of_path_of_interest directions = np.divide(directions, np.linalg.norm(directions, axis=1)[:, np.newaxis], out=np.zeros_like(directions), where=np.linalg.norm(directions, axis=1)[:, np.newaxis]!=0, casting="unsafe") target_direction = self.grid[tuple(root_of_path_of_interest.flatten().astype("int"))] alpha = np.dot(directions, target_direction / np.linalg.norm(target_direction)) l = np.argmax(alpha) if alpha[l] >= 0.7: g_all.add_edge(int(root + shift_idx_root), int(pairs_of_interest[idxs_of_interest[l]][1] + shift_idx_leaf)) list_of_vertices = \ np.delete(list_of_vertices, np.where(list_of_vertices == (pairs_of_interest[idxs_of_interest[l]][1] + shift_idx_leaf))) if is_leaf is not None: is_leaf = np.delete(is_leaf, np.where(is_leaf == (root + shift_idx_root))) connections.append((i, int(root), j, int(pairs_of_interest[idxs_of_interest[l]][1]))) except: continue return g_all, list_of_vertices, is_leaf, connections def containedInMap(self, point): temp = point + self.grid[tuple(point.astype("int"))] if np.any(temp < 0) or np.any(temp >= np.array(list(self.grid.shape[:-1]))): return False return True def attainDiameterOfSetOfPoints(self, P): return np.max(np.linalg.norm(P - P[np.argmax(np.linalg.norm(P - np.mean(P, axis=0)[np.newaxis, :], axis=1))][np.newaxis, :], axis=1)) def avoidRedundantConnection(self, point, P, orig_idxs): norms = np.linalg.norm(P - point[np.newaxis, :], axis=1) idxs = np.argsort(norms) temp = P - point[np.newaxis, :] temp = np.around(np.multiply(temp[idxs], (1 / norms[idxs])[:, np.newaxis]), 2) _, idx2 = np.unique(temp, axis=0, return_index=True) return orig_idxs[idxs[idx2]] def generateGraph(self, is_full=True, enable_weights=False, enable_all=False): leaves = [] roots = [] all_others = [] roots_all = np.array([]) leaves_all = np.array([]) idx_shift = 0 g_all = jgrapht.create_graph(directed=True, weighted=False) graphs = [jgrapht.create_graph(directed=True, weighted=False) for i in range(self.k)] counter_bad_vertices = np.zeros((self.k, )) cnt = 0 for body_idx,body in enumerate(self.clustered_bodies): idxs_leafs = np.arange(body.shape[0]) idxs_roots = np.arange(body.shape[0]) idxs_all_others = np.arange(body.shape[0]) for i in range(idx_shift, idx_shift + body.shape[0]): graphs[body_idx].add_vertex(i-idx_shift) g_all.add_vertex(i) for i, point in enumerate(body): temp = body-point[np.newaxis, :] norms = np.linalg.norm(temp, axis=1)[:, np.newaxis] if is_full: norms = norms.flatten() neighbors = np.where(np.logical_and(norms.flatten() <= np.sqrt(2), norms.flatten() > 0))[0] norms = norms.flatten()[neighbors][:, np.newaxis] temp = temp[neighbors,:] else: norms = norms.flatten() min_dist = self.attainDiameterOfSetOfPoints(body) * self.epsilon_body neighbors = np.where(np.logical_and(norms.flatten() <= min_dist, norms.flatten() > 0))[0] norms = norms.flatten()[neighbors][:, np.newaxis] temp = temp[neighbors, :] dots = np.clip(np.dot(np.multiply(temp, np.divide(1, norms, out=np.zeros_like(norms), where=norms != 0)), self.grid[tuple(point)] / np.linalg.norm(self.grid[tuple(point)])), -1,1) vals = np.arccos(dots) normal = null_space((self.grid[tuple(point)] / np.linalg.norm(self.grid[tuple(point)]))[np.newaxis, :]) vals2 = np.linalg.norm(np.dot(np.multiply(temp, np.divide(1, norms, out=np.zeros_like(norms), where=norms != 0)), normal), axis=1) try: if not self.containedInMap(point): counter_bad_vertices[body_idx] += 1 idxs_roots = np.delete(idxs_roots, np.where(idxs_roots ==i)) idxs_all_others = np.delete(idxs_all_others, np.where(idxs_all_others == i)) raise ValueError('Will not consider coordinates {} as root.'.format(point)) idxs = np.where(np.logical_and(dots >= 0, np.logical_and(vals <= (15 * np.pi/180), vals2 <= 0.3)))[0] if idxs.size == 0: raise ValueError('Continue to next point') sign_temp = np.sign(temp[idxs]) idxs = idxs[np.where(sign_temp.dot(np.sign(self.grid[tuple(point)])) == point.size)[0]] idxs = idxs[np.argsort(vals[idxs])[:min(1, idxs.shape[0])]] if not is_full: if not enable_all: l = [np.argmin(vals[idxs.astype("int")])] # take all the points that might be reached from # current vertex via the dominating direction # of the body else: l = np.arange(idxs.shape[0]).astype("int") idxs = np.unique(self.avoidRedundantConnection(point, body[neighbors[idxs], :], neighbors[idxs])) for j in idxs: edge_endpoint = j graphs[body_idx].add_edge(int(i), int(edge_endpoint)) idxs_leafs = np.delete(idxs_leafs, np.where(idxs_leafs == i)) idxs_roots = np.delete(idxs_roots, np.where(idxs_roots == edge_endpoint)) g_all.add_edge(int(i+idx_shift), int(edge_endpoint+idx_shift)) cnt+=1 else: if enable_weights: for j in idxs: # This requires a graph with weights edge_endpoint = neighbors[j] graphs[body_idx].add_edge(int(i), int(edge_endpoint)) idxs_leafs = np.delete(idxs_leafs, np.where(idxs_leafs == (i+idx_shift))) idxs_roots = np.delete(idxs_roots, np.where(idxs_roots == (edge_endpoint + idx_shift))) g_all.add_edge(int(i + idx_shift), int(edge_endpoint + idx_shift)) else: if not enable_all: l = np.argmin(vals[idxs]) else: l = np.arange(idxs.shape[0]).astype("int") for j in l: edge_endpoint = neighbors[idxs[j]] graphs[body_idx].add_edge(int(i), int(edge_endpoint)) idxs_leafs = np.delete(idxs_leafs, np.where(idxs_leafs == (i + idx_shift))) idxs_roots = np.delete(idxs_roots, np.where(idxs_roots == (edge_endpoint + idx_shift))) g_all.add_edge(int(i + idx_shift), int(edge_endpoint + idx_shift)) except: continue idx_shift += body.shape[0] idxs_leafs = np.array(list(set(idxs_leafs) - set(idxs_roots))) idxs_all_others = np.array(list(set(idxs_all_others) - (set(idxs_leafs).union(set(idxs_roots))))) leaves.append(deepcopy(idxs_leafs)) roots.append(deepcopy(idxs_roots)) all_others.append(deepcopy(idxs_all_others)) roots_all = np.hstack((roots_all, idxs_roots+idx_shift)) leaves_all = np.hstack((leaves_all, idxs_leafs+idx_shift)) print(bcolors.BOLD + "Graph {} contains {} vertices and {} edges".format(body_idx, graphs[body_idx].number_of_vertices, graphs[body_idx].number_of_edges)) print(bcolors.NORMAL) shifts = np.cumsum([x.shape[0] for x in self.clustered_bodies]) connections = [] for i in range(len(graphs)): for j in range(len(graphs)): if i == j: continue else: from_roots = np.array(np.meshgrid(roots[i], np.unique(np.hstack((roots[j], leaves[j], all_others[j]))))).T.reshape(-1, 2) from_leaves = np.array(np.meshgrid(leaves[i], np.unique(np.hstack((roots[j], leaves[j], all_others[j]))))).T.reshape(-1, 2) from_others = np.array(np.meshgrid(all_others[i], np.unique(np.hstack((roots[j], leaves[j],all_others[j]))))).T.reshape(-1, 2) g_all, roots_all, _, connections= \ self.addConnections(from_roots, g_all, i,j,roots_all, shift_idx_root=(0 if i == 0 else shifts[i-1]), shift_idx_leaf=(0 if j == 0 else shifts[j-1]), enable_weights=enable_weights, connections=connections) g_all, roots_all, leaves_all, connections = \ self.addConnections(from_leaves, g_all, i,j,roots_all, shift_idx_root=(0 if i == 0 else shifts[i-1]), shift_idx_leaf=(0 if j == 0 else shifts[j-1]), is_leaf=leaves_all, enable_weights=enable_weights, connections=connections) g_all, roots_all, leaves_all, connections = \ self.addConnections(from_others, g_all, i,j, roots_all, shift_idx_root=(0 if i == 0 else shifts[i-1]), shift_idx_leaf=(0 if j == 0 else shifts[j-1]), enable_weights=enable_weights, connections=connections) np.savez('{}Graphs_{}.npz'.format(self.save_path, self.file_prefix), g_all=jgrapht.io.exporters.generate_csv(g_all), graphs=[jgrapht.io.exporters.generate_csv(x) for x in graphs], leaves=leaves, roots=roots, roots_all=roots_all, leaves_all=leaves_all, connections=connections) return g_all, graphs, roots, leaves, roots_all, leaves_all, connections def findTheStartingVertexOfLongestPathInGraph(self, graph=None, return_all=False): all_paths_alg = johnson_allpairs(self.g if graph is None else graph) path_lengths = [] all_paths = [] for root in (self.g if graph is None else graph).vertices: longest_path_len = 0 longest_path = None for leaf in (self.g if graph is None else graph).vertices: if root == leaf: continue path = all_paths_alg.get_path(root, leaf) if path is not None: all_paths.append((path.vertices, len(path.vertices))) if path is not None and longest_path_len <= len(path.vertices): longest_path,longest_path_len = path.vertices, len(path.vertices) if longest_path_len > 0: path_lengths.append((longest_path, longest_path_len)) if not return_all: i = np.argmax(np.array([x[1] for x in path_lengths])) return path_lengths[i][0][0], all_paths else: return all_paths,path_lengths def saveFile(self, file_name, data): with open(file_name, 'wb') as outfile: pickle.dump(data, outfile, protocol=pickle.HIGHEST_PROTOCOL) def loadFile(self, file_name): with open(file_name, 'rb') as outfile: return pickle.load(outfile) def plotResults(self): x_min, x_max, y_min, y_max = 31.0583, 33.6917, 31.5100, 35.4300 ax = plotMap(self.grid, x_min=x_min, x_max=x_max, y_min=y_min, y_max=y_max) if self.epsilon_body > 0: bodies = self.bodies else: bodies = self.full_bodies colors = pl.cm.jet(np.linspace(0, 1, len(bodies))) for i in range(len(bodies)): if bodies[i].ndim > 1: ax.scatter(bodies[i][:, 0] / (self.grid.shape[0] - 1) * (x_max - x_min) + x_min, bodies[i][:, 1]/ (self.grid.shape[1] - 1) * (y_max - y_min) + y_min, color=colors[i]) else: ax.scatter(bodies[i][0]/ (self.grid.shape[0] - 1) * (x_max - x_min) + x_min, bodies[i][1]/ (self.grid.shape[1] - 1) * (y_max - y_min) + y_min, color=colors[i]) plt.xticks(np.arange(31.5, 34 , 0.5)) plt.yticks(np.arange(32, 35.5 , 0.5)) plt.xlabel('Latitude') plt.ylabel('Longitude') plt.gcf().tight_layout() plt.savefig('{}Segmentation_{}.png'.format(self.save_path,self.file_prefix)) # plot clustering ax = plotMap(self.grid, x_min=x_min, x_max=x_max, y_min=y_min, y_max=y_max) colors = pl.cm.jet(np.linspace(0, 1, self.k)) for i in range(self.k): ax.scatter(self.clustered_bodies[i][:,0]/ (self.grid.shape[0] - 1) * (x_max - x_min) + x_min, self.clustered_bodies[i][:,1]/ (self.grid.shape[1] - 1) * (y_max - y_min) + y_min, color=colors[i]) plt.xticks(np.arange(31.5, 34 , 0.5)) plt.yticks(np.arange(32, 35.5 , 0.5)) plt.xlabel('Latitude') plt.ylabel('Longitude') plt.gcf().tight_layout() plt.savefig('{}Clustering_{}.png'.format(self.save_path,self.file_prefix)) # close all figures plt.close('all') def findSubOptimalPlacing(self, continue_from=-1): if continue_from == -1 and not self.show: start_ellip = timer() points = self.polynomialGridSearch() for point in tqdm(points,ncols=100): if np.linalg.norm(self.grid[tuple(point)]) > 0 and not self.checkIfContained(point): self.IOCS(point) end_ellip = timer() print(bcolors.BOLD + bcolors.OKGREEN + 'IOCS ended in {} seconds'.format(end_ellip - start_ellip)) print(bcolors.NORMAL) self.saveFile(file_name=('{}Ellipsoids_{}.dat'.format(self.save_path, self.file_prefix)), data=dict(zip(['ellipsoids','bodies','full_bodies'], [self.ellipsoids, self.bodies, self.full_bodies]))) else: temp = self.loadFile(file_name=('{}Ellipsoids_{}.dat'.format(self.save_path, self.file_prefix))) self.ellipsoids = temp['ellipsoids'] self.bodies = temp['bodies'] self.full_bodies = temp['full_bodies'] start_clustering = timer() blobs, assignments_per_blob, assignments_per_point = self.clusterWaves(continue_from) self.clustered_bodies = [] for idx in range(self.k): cluster_idx = np.where(assignments_per_blob == idx)[0].astype("int") self.clustered_bodies.append(np.unique(np.vstack([(x.P[:, [0, 1]] / FORCE_NEIGHBORING).astype("int") for x in blobs[cluster_idx]]), axis=0)) print(bcolors.BOLD + bcolors.OKGREEN + 'Total time for clustering WC is {} seconds'.format(timer() - start_clustering)) print(bcolors.NORMAL) self.plotResults() if self.show: exit(-9) start_graph_based = timer() if continue_from < 3: g_all, graphs, roots, leaves, roots_all, leaves_all, connections = self.generateGraph(enable_all=True, is_full=(self.epsilon_body == 0.0)) else: G = np.load('{}Graphs_{}.npz'.format(self.save_path,self.file_prefix), allow_pickle=True) g_all = jgrapht.create_graph(directed=True, weighted=False) graphs_strings = G['graphs'] graphs = [jgrapht.create_graph(directed=True,weighted=False) for i in range(self.k)] jgrapht.io.importers.parse_csv(g_all, str(G['g_all'])) for i in range(self.k): jgrapht.io.importers.parse_csv(graphs[i], str(graphs_strings[i])) roots = G['roots'].tolist() leaves = G['leaves'].tolist() roots_all = G['roots_all'].tolist() leaves_all = G['leaves_all'].tolist() connections = G['connections'].tolist() # retrieve only $k$ largest paths where for any two paths, no path is a subpath of the other positions = np.empty((3,self.k, self.d)) if continue_from < 4: # Heuristic choice for i,body in enumerate(self.clustered_bodies): A = np.vstack([self.grid[tuple(x)] for x in body]) u_vecs, counts = np.unique(np.sign(A), return_counts=True, axis=0) dominating_vec = u_vecs[np.argmax(counts)] / np.linalg.norm(u_vecs[np.argmax(counts)]) idxs = np.where(np.sign(A).dot(np.sign(dominating_vec)) == dominating_vec.shape[0])[0] vecs = body[idxs] - np.mean(body[idxs], axis=0) vals = np.dot(vecs, dominating_vec) positions[0, i, :] = body[idxs[int(np.argmin(vals))]] print(bcolors.OKGREEN + 'Finished computing initial positions for drifters via heuristical methods' + bcolors.ENDC) # Find longest path in each graph seperately paths_in_graph = [[] for i in range(len(graphs))] for i,graph in enumerate(graphs): idx, paths_in_graph[i] = self.findTheStartingVertexOfLongestPathInGraph(graph=graph, return_all=False) positions[1, i, :] = self.clustered_bodies[i][idx] print(bcolors.OKGREEN + 'Finished computing initial positions for drifters via graph based methods' + bcolors.ENDC) np.savez('{}paths_in_graphs_{}.npz'.format(self.save_path, self.file_prefix), positions=positions, paths_in_graph=paths_in_graph) # Find k longest paths in the combined graph # old technique else: temp = np.load('{}paths_in_graphs_{}.npz'.format(self.save_path, self.file_prefix), allow_pickle=True) positions = temp['positions'] paths_in_graph = temp['paths_in_graph'].tolist() print(bcolors.BOLD + 'Starting to compute initial positions for drifters via inter-connected graphs' + bcolors.ENDC) if continue_from < 5: parsed_paths = [item for sublist in paths_in_graph for item in sublist] johnson_graphs = [johnson_allpairs(x) for x in graphs] shift_idxs = np.hstack((0,np.cumsum([x.number_of_vertices for x in graphs]))) unified_paths = [] for connection in connections: i, vertex_i, j, vertex_j = connection temp_paths_from_j = [x for x in paths_in_graph[j] if x[0][0] == vertex_j] temp_paths_to_i = [x for x in paths_in_graph[i] if x[0][-1] == vertex_i] unified_temp_paths_to_i = [] # shift indices for list_i in range(len(temp_paths_to_i)): if len(temp_paths_to_i) > 0: temp_paths_to_i[list_i] = ([x + shift_idxs[i] for x in temp_paths_to_i[list_i][0]], temp_paths_to_i[list_i][1]) for list_j in range(len(temp_paths_from_j)): if len(temp_paths_from_j) > 0: temp_paths_from_j[list_j] = ([x + shift_idxs[j] for x in temp_paths_from_j[list_j][0]], temp_paths_from_j[list_j][1]) # check if there are inter_graph paths including vertex_i temp_paths_to_i = [x for x in paths_in_graph[i] if x[0][-1] == vertex_i] if len(unified_paths) > 0: unified_temp_paths_to_i = [x for x in unified_paths if x[0][-1] == (vertex_i + shift_idxs[i])] if len(temp_paths_to_i) > 0 and len(temp_paths_from_j) > 0: temp = getAllPossiblePaths(temp_paths_to_i, temp_paths_from_j) unified_paths.extend(copy.deepcopy(temp)) if len(unified_temp_paths_to_i) > 0: temp2 = getAllPossiblePaths(unified_temp_paths_to_i, temp) print('Length of temp_2 is {}'.format(len(temp2))) unified_paths.extend(copy.deepcopy(temp2)) unified_paths.sort(key = lambda x: x[1]) i = 0 if False: while True: advance_i = True if i < (len(unified_paths) - 1): for j in range(i+1, len(unified_paths)): if set(unified_paths[i][0]).issubset(unified_paths[j][0]): advance_i = False del(unified_paths[i]) break if advance_i: i += 1 else: break else: print('Removing Inclusions has been initiated') unified_paths = removeInclusions(unified_paths, self.save_path, self.file_prefix) print('Number of possibe paths is {}'.format(len(unified_paths))) np.save('{}unified_paths_{}.npy'.format(self.save_path, self.file_prefix), unified_paths) print('Saved unified paths') else: temp = np.load('{}unified_paths_{}.npy'.format(self.save_path, self.file_prefix), allow_pickle=True) unified_paths = temp.tolist() print('length of Connections is {}'.format(len(connections))) print('length of Unified Paths are {}'.format(len(unified_paths))) if False: unified_graph = johnson_allpairs(g_all) paths = [] for root in roots_all: for leaf in leaves_all: try: path = unified_graph.get_path(int(root), int(leaf)) except: continue dont = False replace_idx = None if path is not None: if len(paths) > 0: if np.any([set(path.vertices).issubset(x[0]) for x in paths]): dont = True if np.any([set(x[0]).issubset(path.vertices) for x in paths]): replace_idx = np.where([set(x[0]).issubset(path.vertices) for x in paths])[0][0] if not dont: if replace_idx is None: paths.append((path.vertices, len(path.vertices))) else: paths[replace_idx] = (path.vertices, len(path.vertices)) # make sure that paths chosen that start from different nodes temp = copy.deepcopy(unified_paths) while True: len_paths = np.array([len(x) for x in temp]) sorted_idxs = np.argsort((-1) * len_paths) idxs = sorted_idxs[:self.k].astype("int") initials = [temp[x][0] for x in idxs] to_delete = [] for i in range(self.k): for j in range(i+1, self.k): if initials[i] == initials[j]: to_delete.append(temp[idxs[j]]) if len(to_delete) == 0 or len(len_paths) == self.k: break else: for element in to_delete: try: temp.remove(element) except: continue unified_paths = copy.deepcopy(temp) len_paths = np.array([len(x) for x in unified_paths]) sorted_idxs = np.argsort((-1) * len_paths) sizes = np.cumsum([x.shape[0] for x in self.clustered_bodies]) idxs = sorted_idxs[:self.k].astype("int") raw_paths = [unified_paths[x] for x in idxs] raw_paths_initial_pos = [x[0] for x in raw_paths] print('raw paths initial are {}'.format(raw_paths_initial_pos)) print('Sizes are {}'.format(sizes)) for i in range(len(raw_paths_initial_pos)): idx_shift = np.where(raw_paths_initial_pos[i] < sizes)[0][0] if self.clustered_bodies[idx_shift].shape[0] == (raw_paths_initial_pos[i] -sizes[idx_shift-1]): idx_shift += 1 positions[2, i, :] = self.clustered_bodies[idx_shift][0] else: positions[2, i, :] = self.clustered_bodies[idx_shift][raw_paths_initial_pos[i] - (0 if idx_shift == 0 else sizes[idx_shift-1])] print(bcolors.BOLD + 'Finished computing initial positions for drifters via inter-connected graphs' + bcolors.ENDC) np.save('{}initial_locations_{}.npy'.format(self.save_path,self.file_prefix), positions) print(bcolors.BOLD + bcolors.OKGREEN + 'Time for finding suboptimal dropping positions is {} seconds'.format(timer() - start_graph_based)) print(bcolors.NORMAL) return positions def plotEllipsoid(self, ellipsoid, center): """ This function serves only for plotting a 2D ellipsoid. :param ellipsoid: An orthogonal matrix representing the ellipsoid's axes lenghts and rotation :param center: The center of ellipsoid represented by a numpy array. :return: None. """ N = 10000 # numer of points on the boundary of the ellipsoid. _, D, V = np.linalg.svd(ellipsoid, full_matrices=True) # attain the axes lengthes and rotation of the ellipsoid a = 1.0 / D[0] b = 1.0 / D[1] theta = np.expand_dims(np.arange(start=0, step=1.0 / N, stop=2.0*np.pi + 1.0/N), 1).T state = np.vstack((a * np.cos(theta), b * np.sin(theta))) X = np.dot(V, state) + center[:,np.newaxis] plt.plot(X[0, :], X[1, :], color='blue') def plotMap(grid, indices=None, x_min=None, x_max=None, y_min=None, y_max=None): positions = np.indices((grid.shape[0], grid.shape[1])).T.reshape(-1, 2) fig, ax = plt.subplots() idxs = [i for i in range(positions.shape[0]) if np.linalg.norm(grid[tuple(positions[i])]) > 0] if indices is None: if x_min is None: q = ax.quiver(positions[idxs,0], positions[idxs,1], grid[positions[idxs,0], positions[idxs,1],0], grid[positions[idxs,0], positions[idxs,1],1], angles='xy') else: q = ax.quiver(positions[idxs,0] / (grid.shape[0] - 1) * (x_max - x_min) + x_min, positions[idxs,1] / (grid.shape[1] - 1) * (y_max - y_min) + y_min, grid[positions[idxs,0], positions[idxs,1],0], grid[positions[idxs,0], positions[idxs,1],1], angles='xy') else: q = ax.quiver(indices[:, 0], indices[:, 1], grid[indices[:, 0], indices[:, 1],0], grid[indices[:,0], indices[:, 1], 1], angles='xy') return ax def main(data_folder, preprocess=True, file_type='.dat', number_of_files=1, eps_g=None, eps_b=0, k=10, coreset_sample_size=1000, over_time=False, tol=0.02, resume=False, show=False): paths = [x for x in os.walk(data_folder)] done = [] if resume: with open("resume_from_maps_init.pkl", "rb") as open_file: paths = pickle.load(open_file) for i, file_path_tuple in enumerate(paths): for file_name in file_path_tuple[-1]: if file_name.endswith(file_type): if file_type =='.nc': with open('resume_from_maps_init.pkl', "wb") as open_file: pickle.dump(paths[i:], open_file) start_main = timer() print(bcolors.WARNING + '****************************************************************************') print(bcolors.BOLD + bcolors.WARNING + "Proccessing File: {}".format(file_name)) print(bcolors.NORMAL) U, V = readNetCDFFile(file_path_tuple[0]+'/'+file_name, over_time=over_time) if not over_time: preprocessed_files = [ndi.correlate(np.mean(x,0), np.full((3, 3), 1 / 9)).T[None] for x in [U,V]] preprocessed_files_2 = [np.mean(x,0).T[None] for x in [U,V]] grid = np.append(*preprocessed_files, axis=0).T grid2 = np.append(*preprocessed_files_2, axis=0).T saveVels(grid, file_path=file_path_tuple[0]+'/', smoothed=True) saveVels(grid2, file_path=file_path_tuple[0]+'/', smoothed=False) if eps_g is None: eps_g = np.around(10 / grid.shape[0], 2) drifter_placer = CurrentEstimation(grid, epsilon_grid=eps_g, k=k, epsilon_body=eps_b, coreset_sample_size=coreset_sample_size, tolerance=tol, save_path=file_path_tuple[0]+'/', show=show) drifter_placer.findSubOptimalPlacing(continue_from=-1) end_main = timer() print(bcolors.HEADER + bcolors.OKGREEN + 'Whole program took {} seconds'.format(end_main - start_main)) print(bcolors.NORMAL) np.save(file_path_tuple[0] + '/' + 'Time.npy', end_main - start_main) if __name__ == '__main__': ns = parser.parse_args() # parser main(data_folder=ns.d, preprocess=ns.pp, file_type=ns.ft, number_of_files=ns.nf, eps_g=ns.eps_g, eps_b=ns.eps_b, k=ns.k, coreset_sample_size = ns.coreset_sample_size, over_time=ns.time, tol=ns.tol, resume=ns.resume, show=ns.show)
#!/bin/sh export $(echo $(cat /tmp/.env | sed 's/#.*//g'| xargs)) docker exec -i ${APP_NAME}-php bash -c "git reset --hard" docker exec -i ${APP_NAME}-php bash -c "git pull origin master"
<gh_stars>10-100 package io.dronefleet.mavlink; import java.util.List; /** * Serves as an index of a Mavlink dialect. */ public interface MavlinkDialect { /** * Returns the name of this dialect. The returned name is a lower-case version of the * XML filename without the {@code .xml} extension. */ String name(); /** * Resolves the class of a message by its ID. * * @param messageId The ID of the message to resolve. * @return The class of the message of the specified ID. */ Class resolve(int messageId); /** * Checks whether this dialect supports the message of the specified ID. * * @param messageId The ID of the message to check support for. * @return {@code true} if this dialect supports the message of the specified ID, * or {@code false} otherwise. */ boolean supports(int messageId); /** * Returns a list of all of the message types supported by this dialect. */ List<Class> messageTypes(); }
<reponame>nokia/jspy /* * Copyright 2015 Nokia Solutions and Networks * Licensed under the Apache License, Version 2.0, * see licence.txt file for details. */ package spyAgent; import javax.swing.*; import java.awt.*; import java.awt.event.MouseEvent; import java.awt.event.MouseListener; public class CompMouseListner implements MouseListener { public static Color highlightedComponentColor; String myIndex; int compHierarchy = -1; String winTitle; String classType; String compProps = ""; static boolean flag = false; static boolean setActive = true; public CompMouseListner(String index, int compHierarchy, String title, String objType, String srcCompProps) { myIndex = index; winTitle = title; classType = objType; compProps = srcCompProps; this.compHierarchy = compHierarchy; } public void mouseClicked(MouseEvent arg0) { if (!setActive) { return; } Component comp = arg0.getComponent(); if (comp instanceof JTabbedPane) { JTabbedPane tabpane = (JTabbedPane) comp; int tabIndex = tabpane.getSelectedIndex(); String tabName = tabpane.getTitleAt(tabIndex); String tabText = ",Tab Index - " + tabIndex + ",Tab Title- " + tabName; Communicator.writeToServer("Window Title - " + winTitle + ",Index - " + myIndex + ",Instance Of - " + classType + ",Comp. Hierarchy - " + compHierarchy + "," + tabpane.toString() + tabText); } if (comp instanceof JTree) { JTree tree = (JTree) comp; int nodeCount = tree.getLeadSelectionRow(); String nodePath = tree.getSelectionPath().toString(); String tabText = ",Node Count - " + nodeCount + ",Node Path- " + nodePath; Communicator.writeToServer("Window Title - " + winTitle + ",Index - " + myIndex + ",Instance Of - " + classType + ",Comp. Hierarchy - " + compHierarchy + "," + tree.toString() + tabText); } } public void mousePressed(MouseEvent arg0) { } public void mouseReleased(MouseEvent arg0) { } public void mouseEntered(MouseEvent arg0) { if (!flag && setActive) { try { int index = compProps.indexOf("["); String name = compProps.substring(index + 1, compProps.indexOf(',')); KeyboardListener.highlightedComponentName = name; Component comp = arg0.getComponent(); Communicator.writeToServer("Window Title - " + winTitle + ",Index - " + myIndex + ",Instance Of - " + classType + ",Comp. Hierarchy - " + compHierarchy + "," + compProps); highlightedComponentColor = comp.getBackground(); comp.setBackground(new Color(90, 100, 210)); flag = true; } catch (Exception e) { e.printStackTrace(); } } } public void mouseExited(MouseEvent arg0) { if (flag) { try { Component comp = arg0.getComponent(); comp.setBackground(highlightedComponentColor); flag = false; } catch (Exception e) { e.printStackTrace(); } } } }
package it.feio.android.omninotes.utils; import java.util.List; import android.content.Context; import android.content.Intent; import android.content.pm.PackageManager; import android.content.pm.ResolveInfo; public class IntentChecker { /** * Checks intent and features availability * * @param ctx * @param intent * @param features * @return */ public static boolean isAvailable(Context ctx, Intent intent, String[] features) { boolean res = true; final PackageManager mgr = ctx.getPackageManager(); // Intent resolver List<ResolveInfo> list = mgr.queryIntentActivities(intent, PackageManager.MATCH_DEFAULT_ONLY); res = res && list.size() > 0; // Features if (features != null) { for (String feature : features) { res = res && mgr.hasSystemFeature(feature); } } return res; } }
<reponame>tactilenews/100eyes # frozen_string_literal: true require 'rspec/expectations' RSpec::Matchers.define :have_current_user do |user| match do |response| current_user(response).present? && current_user(response) == user end description do "have current user #{user&.id}" end failure_message_for_should do |_response| "have current user #{user&.id}, but got #{user&.id}" end def current_user(response) response.request.env[:clearance].current_user end end
#!/bin/bash #PBS -l pmem=1gb #PBS -l nodes=1 #PBS -l walltime=2:00:00 if [ -e "/etc/profile.d/modules.sh" ]; then source /etc/profile.d/modules.sh module load matlab fi echo "Starting Matlab..." matlab -singleCompThread -r "jobmgr.qsub.job('$job_name')" # Rely on the memoise framework to save the result rm -r ~/scratch/cache/matlab-job-manager/qsub/in-progress/$job_name
""" Test Stts Services class """ import unittest from flask_sqlalchemy import get_state from app.main.service.stats_service import * from app.test.base import BaseTestCase class TestStatsServices(BaseTestCase): def test_stats_serivces_create_new_stats(self): """ [ Test checks if creation stats process conducted properly. ] """ stats = get_all_stats() self.assertEqual(len(stats), 0) create_new_stats(1) stats = get_all_stats() self.assertEqual(len(stats), 1) self.assertEqual(stats[0].userid, 1) self.assertEqual(stats[0].total, 0) self.assertEqual(stats[0].fails, 0) self.assertEqual(stats[0].wins, 0) delete_all_stats() def test_stats_serivces_get_all_stats(self): """ [ Test checks if get all stats process conducted properly. ] """ stats = get_all_stats() self.assertEqual(len(stats), 0) create_new_stats(1) create_new_stats(2) stats = get_all_stats() self.assertEqual(len(stats), 2) delete_all_stats() def test_stats_serivces_get_stats_by_id(self): """ [ Test checks if get stats by id process conducted properly. ] """ stats = get_all_stats() self.assertEqual(len(stats), 0) create_new_stats(1) create_new_stats(2) stats = get_user_stats(1) self.assertEqual(stats.userid, 1) self.assertEqual(stats.total, 0) self.assertEqual(stats.fails, 0) self.assertEqual(stats.wins, 0) stats = get_user_stats(2) self.assertEqual(stats.userid, 2) self.assertEqual(stats.total, 0) self.assertEqual(stats.fails, 0) self.assertEqual(stats.wins, 0) delete_all_stats() def test_stats_serivces_put_stats_by_id_200(self): """ [ Test checks if put stats by id process conducted properly and return 200 status code. ] """ create_new_stats(1) request_dict = { "total" : 3, "wins" : 2, "fails" : 1 } status_code = stats_put(1, request_dict) stats = get_user_stats(1) self.assertEqual(status_code, 200) self.assertEqual(stats.userid, 1) self.assertEqual(stats.total, 3) self.assertEqual(stats.wins, 2) self.assertEqual(stats.fails, 1) delete_all_stats() def test_stats_serivces_put_stats_by_id_400(self): """ [ Test checks if put stats by id process handle error and return 400 status code. ] """ create_new_stats(1) request_dict = {} status_code = stats_put(1, request_dict) stats = get_user_stats(1) self.assertEqual(status_code, 400) self.assertEqual(stats.userid, 1) self.assertEqual(stats.total, 0) self.assertEqual(stats.wins, 0) self.assertEqual(stats.fails, 0) delete_all_stats() def test_stats_serivces_put_stats_by_id_404(self): """ [ Test checks if put stats by id process handle error and return 404 status code. ] """ request_dict = { "total" : 3, "wins" : 2, "fails" : 1 } status_code = stats_put(1, request_dict) stats = get_user_stats(1) self.assertEqual(status_code, 404) self.assertEqual(stats, None) def test_stats_serivces_delete_all_stats(self): """ [ Test checks if delete all stats process conducted properly. ] """ create_new_stats(1) create_new_stats(2) create_new_stats(3) stats = get_all_stats() self.assertEqual(len(stats), 3) delete_all_stats() stats = get_all_stats() self.assertEqual(len(stats), 0) def test_stats_serivces_delete_stats_by_id_200(self): """ [ Test checks if delete stats by id process conducted properly and return 200 status code. ] """ create_new_stats(1) create_new_stats(2) stats = get_all_stats() self.assertEqual(len(stats), 2) status_code = delete_stats(1) stats = get_all_stats() self.assertEqual(stats[0].userid, 2) self.assertEqual(status_code, 200) delete_all_stats() def test_stats_serivces_delete_stats_by_id_404(self): """ [ Test checks if delete stats by id process handle error and return 404 status code. ] """ status_code = delete_stats(1) stats = get_all_stats() self.assertEqual(len(stats), 0) self.assertEqual(status_code, 404) delete_all_stats() def test_stats_serivces_patch_stats_by_id_200(self): """ [ Test checks if patch stats by id process conducted properly and return 200 status code. ] """ create_new_stats(1) request_dict = { "total" : 3, "wins" : 2, "fails" : 1 } status_code = stats_patch(1, request_dict) stats = get_user_stats(1) self.assertEqual(status_code, 200) self.assertEqual(stats.userid, 1) self.assertEqual(stats.total, 3) self.assertEqual(stats.wins, 2) self.assertEqual(stats.fails, 1) delete_all_stats() def test_stats_serivces_patch_stats_by_id_400(self): """ [ Test checks if patch stats by id process handle error and return 400 status code. ] """ create_new_stats(1) request_dict = {} status_code = stats_patch(None, request_dict) self.assertEqual(status_code, 400) delete_all_stats() def test_stats_serivces_patch_stats_by_id_404(self): """ [ Test checks if patch stats by id process handle error and return 404 status code. ] """ request_dict = { "total" : 3, "wins" : 2, "fails" : 1 } status_code = stats_patch(1, request_dict) stats = get_user_stats(1) self.assertEqual(status_code, 404) self.assertEqual(stats, None) def test_stats_serivces_add_win_200(self): """ [ Test checks if add_win process conducted properly and return 200 status code. ] """ create_new_stats(1) status_code = stats_add_win(1) stats = get_user_stats(1) self.assertEqual(status_code, 200) self.assertEqual(stats.userid, 1) self.assertEqual(stats.total, 1) self.assertEqual(stats.wins, 1) self.assertEqual(stats.fails, 0) status_code = stats_add_win(1) stats = get_user_stats(1) self.assertEqual(status_code, 200) self.assertEqual(stats.userid, 1) self.assertEqual(stats.total, 2) self.assertEqual(stats.wins, 2) self.assertEqual(stats.fails, 0) delete_all_stats() def test_stats_serivces_add_win_400(self): """ [ Test checks if add_win process handle error and return 400 status code. ] """ status_code = stats_add_win(None) self.assertEqual(status_code, 400) def test_stats_serivces_add_win_404(self): """ [ Test checks if add_win process handle error and return 404 status code. ] """ status_code = stats_add_win(1) self.assertEqual(status_code, 404) def test_stats_serivces_add_fail_200(self): """ [ Test checks if add_fail process conducted properly and return 200 status code. ] """ create_new_stats(1) status_code = stats_add_fail(1) stats = get_user_stats(1) self.assertEqual(status_code, 200) self.assertEqual(stats.userid, 1) self.assertEqual(stats.total, 1) self.assertEqual(stats.wins, 0) self.assertEqual(stats.fails, 1) status_code = stats_add_fail(1) stats = get_user_stats(1) self.assertEqual(status_code, 200) self.assertEqual(stats.userid, 1) self.assertEqual(stats.total, 2) self.assertEqual(stats.wins, 0) self.assertEqual(stats.fails, 2) delete_all_stats() def test_stats_serivces_add_fail_400(self): """ [ Test checks if add_fail process handle error and return 400 status code. ] """ status_code = stats_add_fail(None) self.assertEqual(status_code, 400) def test_stats_serivces_add_fail_404(self): """ [ Test checks if add_fail process handle error and return 404 status code. ] """ status_code = stats_add_fail(1) self.assertEqual(status_code, 404) if __name__ == '__main__': unittest.main()
<gh_stars>0 module.exports = function check(str, bracketsConfig) { for (let i = str.length; i >= 0; i--) { for (let j=0; j< bracketsConfig.length; j++) { let bracket = bracketsConfig[j].join('') if (str.includes(bracket)) { str = str.replace(bracket, '') } } } return str === '' ? true: false; }
import matplotlib.pyplot as plt from matplotlib import cm from numpy import exp, sin, sqrt from numpy import linspace, zeros, array, meshgrid from numpy.random import multivariate_normal as mvn from numpy.random import normal, random, seed from inference.gp import GpRegressor seed(4) """ Code demonstrating the use of the GpRegressor class found in inference.gp_tools """ # create some testing data Nx = 24*2 x = list( linspace(-3,1,Nx//2) ) x.extend( list( linspace(4,9,Nx//2) ) ) x = array(x) # generate points q at which to evaluate the # GP regression estimate Nq = 200 q = linspace(-4, 10, Nq) # cover whole range, including the gap sig = 0.05 # assumed normal error on the data points y_c = ( 1. / (1 + exp(-q)) ) + 0.1*sin(2*q) # underlying function y = ( 1. / (1 + exp(-x)) ) + 0.1*sin(2*x) + sig*normal(size=len(x)) # sampled y data errs = zeros(len(y)) + sig # y data errors # plot the data points plus the underlying function # from which they are sampled fig = plt.figure( figsize = (9,6) ) ax = fig.add_subplot(111) ax.plot(q, y_c, lw = 2, color = 'black', label = 'test function') ax.plot(x, y, 'o', color = 'red', label = 'sampled data') ax.errorbar(x, y, yerr = errs, fmt = 'none', ecolor = 'red') ax.set_ylim([-0.5, 1.5]) ax.set_xlim([-4, 10]) ax.set_title('Generate simulated data from a test function', fontsize = 12) ax.set_ylabel('function value', fontsize = 12) ax.set_xlabel('spatial coordinate', fontsize = 12) ax.grid() ax.legend(loc=2, fontsize = 12) plt.tight_layout() plt.savefig('sampled_data.png') plt.close() # initialise the class with the data and errors GP = GpRegressor(x, y, y_err = errs) # call the instance to get estimates for the points in q mu_q, sig_q = GP(q) # now plot the regression estimate and the data together c1 = 'red'; c2 = 'blue'; c3 = 'green' fig = plt.figure( figsize = (9,6) ) ax = fig.add_subplot(111) ax.plot(q, mu_q, lw = 2, color = c2, label = 'posterior mean') ax.fill_between(q, mu_q-sig_q, mu_q-sig_q*2, color = c2, alpha = 0.15, label = r'$\pm 2 \sigma$ interval') ax.fill_between(q, mu_q+sig_q, mu_q+sig_q*2, color = c2, alpha = 0.15) ax.fill_between(q, mu_q-sig_q, mu_q+sig_q, color = c2, alpha = 0.3, label = r'$\pm 1 \sigma$ interval') ax.plot(x, y, 'o', color = c1, label = 'data', markerfacecolor = 'none', markeredgewidth = 2) ax.set_ylim([-0.5, 1.5]) ax.set_xlim([-4, 10]) ax.set_title('Prediction using posterior mean and covariance', fontsize = 12) ax.set_ylabel('function value', fontsize = 12) ax.set_xlabel('spatial coordinate', fontsize = 12) ax.grid() ax.legend(loc=2, fontsize = 12) plt.tight_layout() plt.savefig('regression_estimate.png') plt.close() # As the estimate itself is defined by a multivariate normal distribution, # we can draw samples from that distribution. # to do this, we need to build the full covariance matrix and mean for the # desired set of points using the 'build_posterior' method: mu, sigma = GP.build_posterior(q) # now draw samples samples = mvn(mu, sigma, 100) # and plot all the samples fig = plt.figure( figsize = (9,6) ) ax = fig.add_subplot(111) for i in range(100): ax.plot(q, samples[i,:], lw = 0.5) ax.set_title('100 samples drawn from the posterior distribution', fontsize = 12) ax.set_ylabel('function value', fontsize = 12) ax.set_xlabel('spatial coordinate', fontsize = 12) ax.set_xlim([-4, 10]) plt.grid() plt.tight_layout() plt.savefig('posterior_samples.png') plt.close() # The gradient of the Gaussian process estimate also has a multivariate normal distribution. # The mean vector and covariance matrix of the gradient distribution for a series of points # can be generated using the GP.gradient() method: gradient_mean, gradient_variance = GP.gradient(q) # in this example we have only one spatial dimension, so the covariance matrix has size 1x1 sigma = sqrt(gradient_variance) # get the standard deviation at each point in 'q' # plot the distribution of the gradient fig = plt.figure( figsize = (9,6) ) ax = fig.add_subplot(111) ax.plot(q, gradient_mean, lw = 2, color = 'blue', label = 'gradient mean') ax.fill_between(q, gradient_mean-sigma, gradient_mean+sigma, alpha = 0.3, color = 'blue', label = r'$\pm 1 \sigma$ interval') ax.fill_between(q, gradient_mean+sigma, gradient_mean+2*sigma, alpha = 0.15, color = 'blue', label = r'$\pm 2 \sigma$ interval') ax.fill_between(q, gradient_mean-sigma, gradient_mean-2*sigma, alpha = 0.15, color = 'blue') ax.set_title('Distribution of the gradient of the GP', fontsize = 12) ax.set_ylabel('function gradient value', fontsize = 12) ax.set_xlabel('spatial coordinate', fontsize = 12) ax.set_xlim([-4, 10]) ax.grid() ax.legend(fontsize = 12) plt.tight_layout() plt.savefig('gradient_prediction.png') plt.close() # """ # 2D example # """ # from mpl_toolkits.mplot3d import Axes3D # # define an 2D function as an example # def solution(v): # x, y = v # f = 0.5 # return sin(x*0.5*f)+sin(y*f) # # # Sample the function value at some random points # # to use as our data # N = 50 # x = random(size=N) * 15 # y = random(size=N) * 15 # # # build coordinate list for all points in the data grid # coords = list(zip(x,y)) # # # evaluate the test function at all points # z = list(map(solution, coords)) # # # build a colormap for the points # colmap = cm.viridis((z - min(z)) / (max(z) - min(z))) # # # now 3D scatterplot the test data to visualise # fig = plt.figure() # ax = fig.add_subplot(111, projection='3d') # ax.scatter([i[0] for i in coords], [i[1] for i in coords], z, color = colmap) # plt.tight_layout() # plt.show() # # # Train the GP on the data # GP = GpRegressor(coords, z) # # # if we provide no error data, a small value is used (compared with # # spread of values in the data) such that the estimate is forced to # # pass (almost) through each data point. # # # make a set of axes on which to evaluate the GP estimate # gp_x = linspace(0,15,40) # gp_y = linspace(0,15,40) # # # build a coordinate list from these axes # gp_coords = [ (i,j) for i in gp_x for j in gp_y ] # # # evaluate the estimate # mu, sig = GP(gp_coords) # # # build a colormap for the surface # Z = mu.reshape([40,40]).T # Z = (Z-Z.min())/(Z.max()-Z.min()) # colmap = cm.viridis(Z) # rcount, ccount, _ = colmap.shape # # # surface plot the estimate # fig = plt.figure() # ax = fig.add_subplot(111, projection='3d') # surf = ax.plot_surface(*meshgrid(gp_x, gp_y), mu.reshape([40,40]).T, rcount=rcount, # ccount=ccount, facecolors=colmap, shade=False) # surf.set_facecolor((0,0,0,0)) # # # overplot the data points # ax.scatter([i[0] for i in coords], [i[1] for i in coords], z, color = 'black') # plt.tight_layout() # plt.show()
import { createRouter, createWebHashHistory } from 'vue-router' import HomeView from '../views/HomeView.vue' import VideosView from '../views/VideosView.vue' const routes = [ { path: '/', name: 'home', component: HomeView }, { path: '/videos', name: 'videos', component: VideosView }, ] const router = createRouter({ history: createWebHashHistory(), routes }) export default router
<gh_stars>0 $( ".btnListadoServicios" ).click(function() { var nombreServicio = $(this).attr("nombre"); MostrarServicio(nombreServicio); }); function MostrarServicio(nombreServicio) { var nombreDiv = ObtenerNombreDivServicio(nombreServicio); productosBuscado=false; OcultarServicios(); ActivarPanelServicios(); $(nombreDiv).show(); } function ActivarPanelServicios() { $("#dvProductosBuscados").hide(); $("#dvProductosListado").hide(); $("#dvDetalleCompra").hide(); $("#dvServicios").show(); $("#promotions").hide(); $("#dvIncio").hide(); $("#slider").hide(); } function ObtenerNombreDivServicio(nombre){ var nombreDiv = ''; switch(nombre) { case "SERVICIO_CARITASPINTADAS": nombreDiv = '#dvServicioCaritasPintadas'; break; case "SERVICIO_COFFEBREAK": nombreDiv = '#dvServicioCoffeBreak'; break; case "SERVICIO_COMIDASEVENTOS": nombreDiv = '#dvServicioComidaEventos'; break; case "SERVICIO_DECORACION_Y_MENAJE": nombreDiv = '#dvServicioDecoraciones'; break; case "SERVICIO_DEGUSTACIONES": nombreDiv = '#dvServicioDegustaciones'; break; case "SERVICIO_MOZOS_Y_BARMAN": nombreDiv = '#dvServicioBarmanyMozos'; break; case "SERVICIO_SANCK_CART": nombreDiv = '#dvServicioSnackCart'; break; } return nombreDiv; } function OcultarServicios() { $("#dvServicios").hide(); $("#dvServicioCaritasPintadas").hide(); $("#dvServicioCoffeBreak").hide(); $("#dvServicioComidaEventos").hide(); $("#dvServicioDecoraciones").hide(); $("#dvServicioDegustaciones").hide(); $("#dvServicioBarmanyMozos").hide(); $("#dvServicioSnackCart").hide(); }
package com.devculture.tools.AppleSalesReporter.Data; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.io.Serializable; import java.util.Date; import com.devculture.util.DateConverter; public class ReportFilter implements Serializable { /** variables **/ private static final long serialVersionUID = 1L; public static final String FILTER_COMMAND_INVALID = "-"; public static final String FILTER_COMMAND_AND = "AND"; public static final String FILTER_COMMAND_OR = "OR"; public static final String FILTER_COMMAND_LPAREN = "("; public static final String FILTER_COMMAND_RPAREN = ")"; private String text; private String filter; private FilterType type; private FilterComparison compare; public static final String[] DEFAULT_FILTER_COMMANDS = { FILTER_COMMAND_INVALID, FILTER_COMMAND_AND, FILTER_COMMAND_OR, FILTER_COMMAND_LPAREN, FILTER_COMMAND_RPAREN, }; public static enum FilterType { DATE, /* >, <, >=, <= */ TEXT, /* ==, != */ OPERATOR, /* AND, OR */ PARENTHESIS, /* (, ) */ } public static enum FilterComparison { NONE, LESS_THAN, LESS_THAN_AND_EQUAL, GREATER_THAN, GREATER_THAN_AND_EQUAL, EQUAL, NOT_EQUAL, } /** constructor **/ public ReportFilter(String text, FilterType type, FilterComparison compare) { this.text = text; this.type = type; this.compare = compare; } /** methods **/ public void setFilter(String filter) { // we can only set the filter value if TEXT/DATE type if(type == FilterType.DATE || type == FilterType.TEXT) { this.filter = filter; } } public String getInequality() { switch(compare) { case LESS_THAN: return "<"; case LESS_THAN_AND_EQUAL: return "<="; case GREATER_THAN: return ">"; case GREATER_THAN_AND_EQUAL: return ">="; case EQUAL: return "=="; case NOT_EQUAL: return "!="; } return ""; } public String getText() { return text; } public String getFilter() { return filter; } public FilterType getType() { return type; } /** filtering **/ public boolean isFiltered(String string) { if(filter == null) { // if filter is null, filter nothing return false; } else if(type == FilterType.TEXT) { switch(compare) { case LESS_THAN: return !(filter.compareTo(string) > 0); case LESS_THAN_AND_EQUAL: return !(filter.compareTo(string) >= 0); case GREATER_THAN: return !(filter.compareTo(string) < 0); case GREATER_THAN_AND_EQUAL: return !(filter.compareTo(string) <= 0); case EQUAL: return filter.compareTo(string) != 0; case NOT_EQUAL: return filter.compareTo(string) == 0; } } else if(type == FilterType.DATE) { // apple date format Date stringDate = DateConverter.getDate(string); Date filterDate = DateConverter.getDate(filter); // if string or filter are NOT valid dates, filter them out if(stringDate == null || filterDate == null) { return true; } switch(compare) { case LESS_THAN: return !(filterDate.compareTo(stringDate) > 0); case LESS_THAN_AND_EQUAL: return !(filterDate.compareTo(stringDate) >= 0); case GREATER_THAN: return !(filterDate.compareTo(stringDate) < 0); case GREATER_THAN_AND_EQUAL: return !(filterDate.compareTo(stringDate) <= 0); case EQUAL: return filterDate.compareTo(stringDate) != 0; case NOT_EQUAL: return filterDate.compareTo(stringDate) == 0; } } // by default, filter nothing return false; } /** debug **/ public String toString() { String typeStr = null; switch(type) { case DATE: typeStr = "Type:DATE"; break; case TEXT: typeStr = "Type:TEXT"; break; case OPERATOR: typeStr = "Type:OPER"; break; case PARENTHESIS: typeStr = "Type:PARE"; break; } return "[" + text + " " + getInequality() + " " + filter+ " :: " + typeStr + "]"; } /** serializable **/ public void readObject(ObjectInputStream stream) throws Exception { stream.defaultReadObject(); } public void writeObject(ObjectOutputStream stream) throws Exception { stream.defaultWriteObject(); } }
package spring_data.ex_spring_data_intro.utils; import spring_data.ex_spring_data_intro.entities.Author; public interface RandomAuthorUtil { Author getRandom(); }
const strings = ["hello", "world", "foo", "bar"] for (let i = 0; i < strings.length; i++) { console.log(strings[i]); }
python transformers/examples/language-modeling/run_language_modeling.py --model_type gpt2 --tokenizer_name model-configs/1536-config --config_name model-configs/1536-config/config.json --train_data_file ../data/wikitext-103-raw/wiki.train.raw --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir train-outputs/1024+0+512-SS/model --do_train --do_eval --evaluate_during_training --per_device_train_batch_size 3 --per_device_eval_batch_size 3 --num_train_epochs 10 --dataloader_drop_last --save_steps 500 --save_total_limit 20 --augmented --augmentation_function shuffle_sentences_first_two_thirds --train_function augmented_training --eval_function augmented_eval
<reponame>Zhuravld/Portfolio<filename>GamesRL/pirate-passage/game_spec_validator.py<gh_stars>0 from utils import AdjacencyList, points_adjacent, value_is_integer class ValidationSummary: """Not yet used. Summarizes all diagnostic strings from GameSpecValidator into a single object. Access overall severity with `self.severity_code`. Add warnings and errors with `self.add_failure_condition`. """ def __init__(self, failure_conditions=[]): self.failure_conditions = failure_conditions self._severity_to_code_map = {"OK": 0, "warning": 1, "error": 2} self.severity_code = self._severity_to_code_map["OK"] for unique_severity in set([cond[0] for cond in failure_conditions]): self._update_severity(unique_severity) def __repr__(self): severity_code_to_string_map = { code: string for string, code in self._severity_to_code_map.items() } severity_string = severity_code_to_string_map[self.severity_code].upper() return "{}\n{}".format( severity_string, "\n".join([cond[1] for cond in self.failure_conditions]) ) def add_failure_condition(self, condition): """Add failure condition to the list of conditions. Elevate severity where needed.""" severity, diagnosis_string = condition self.failure_conditions.append(diagnosis_string) self._update_severity(incoming_severity=severity) def _update_severity(self, incoming_severity): """Set severity to maximum of current and incoming severities.""" current_code = self.severity_code incoming_code = self._severity_to_code_map[incoming_severity] self.severity_code = max([current_code, incoming_code]) return self.severity_code class GameSpecValidator: def validate_spec(self, spec): """Assert that `spec` creates a valid game instance. Return a list of conditions of format: ("error_severity_string", "descriptor_string") for every condition that invalidates provided specification `spec` """ shape = tuple(spec["shape"]) start = tuple(spec["start"]) goal = tuple(spec["goal"]) inaccessible = spec["inaccessible"] pirate_routes = spec["pirate_routes"] shape_conditions = self._validate_shape(shape) inaccessible_conditions = self._validate_inaccessible( shape=shape, inaccessible=inaccessible ) start_field_conditions = self._validate_start( start_point=start, shape=shape, inaccessible=inaccessible ) goal_field_conditions = self._validate_goal( start_point=start, goal_point=goal, shape=shape, inaccessible=inaccessible ) pirate_conditions = self._validate_pirates( shape=shape, pirates_dict=pirate_routes ) # Flatten all_conditions = sum( [ shape_conditions, inaccessible_conditions, start_field_conditions, goal_field_conditions, pirate_conditions, ], [], ) return all_conditions def _validate_shape(self, shape): """Asserts input shape is valid. For each prerequisite failed, adds a condition to output list: (error_severity_string, descriptor_string) """ failure_conditions = [] components_are_nonnegative = (shape[0] >= 0) and (shape[1] >= 0) if not components_are_nonnegative: failure_conditions.append(("error", "Shape must be non-negative")) is_integer = self._point_in_integer_space(point=shape) if not is_integer: failure_conditions.append (("error", "Shape must be integer")) return failure_conditions def _validate_start(self, start_point, shape, inaccessible): """Asserts input start_point is valid. For each prerequisite failed, adds a condition to output list: (error_severity_string, descriptor_string) """ failure_conditions = [] is_int = self._point_in_integer_space(point=start_point) if not is_int: failure_conditions.append(("error", "Start coordinates must be integer")) in_grid = self._point_in_grid(point=start_point, grid_shape=shape) if not in_grid: failure_conditions.append( ("error", "Start point must be located within input grid") ) is_accessible = self._point_accessible( point=start_point, inaccessible=inaccessible ) if not is_accessible: failure_conditions.append( ("error", "Start point must not be in list of inaccessible fields") ) return failure_conditions def _validate_goal(self, start_point, goal_point, shape, inaccessible): """Asserts input goal_point is valid and reachable. For each prerequisite failed, adds a condition to output list: (error_severity_string, descriptor_string) """ failure_conditions = [] is_int = self._point_in_integer_space(point=goal_point) if not is_int: failure_conditions.append(("error", "Goal coordinates must be integer")) in_grid = self._point_in_grid(point=goal_point, grid_shape=shape) if not in_grid: failure_conditions.append( ("error", "Goal point must be located within input grid") ) is_accessible = self._point_accessible( point=goal_point, inaccessible=inaccessible ) if not is_accessible: failure_conditions.append( ("error", "Goal point must not be in list of inaccessible fields") ) accessible_from_start = self._goal_reachable_from_start( start_point=start_point, goal_point=goal_point, grid_shape=shape, inaccessible=inaccessible, ) if not accessible_from_start: failure_conditions.append(("error", "No path between start and goal")) return failure_conditions def _validate_inaccessible(self, shape, inaccessible): """Assert all inaccessible points are valid. Return list of failed prerequisites.""" in_grid = [ self._point_in_grid(point, grid_shape=shape) for point in inaccessible ] if sum(in_grid) == len(inaccessible) - 1: point_outside_grid = inaccessible[in_grid.index(False)] return [ ( "error", f"Point {point_outside_grid} marked inaccessible, but is outside grid", ) ] elif sum(in_grid) < len(inaccessible) - 1: return [ ("error", "Multiple points marked inaccessible, but are outside grid") ] else: return [] def _validate_pirates(self, shape, pirates_dict): """Assert that all pirate routes are valid. For each prerequisite failed, adds a condition to output list: (error_severity_string, descriptor_string) """ failure_conditions = [] is_integer = {} in_grid = {} is_circular = {} for id, route in pirates_dict.items(): is_integer[id] = all( [self._point_in_integer_space(point) for point in route] ) if not is_integer[id]: failure_conditions.append( ("error", f"Pirate: {id} has points not in integer space") ) in_grid[id] = all( [self._point_in_grid(point, grid_shape=shape) for point in route] ) if not in_grid[id]: failure_conditions.append( ("error", f"Pirate: {id} has points outside grid") ) is_circular[id] = self._route_is_circular(route) if not is_circular[id]: failure_conditions.append( ("error", f"Pirate: {id} route is not circular") ) return failure_conditions def _point_in_integer_space(self, point): """Assert that all components in `point` are integer-valued. Warning: Does not raise errors for nonsense inputs; simply returns False. """ return all([self._value_is_integer(comp) for comp in point]) def _point_in_grid(self, point, grid_shape): """Assert that point exists in grid""" row_coord_valid = (point[0] < grid_shape[0]) and (point[0] >= 0) col_coord_valid = (point[1] < grid_shape[1]) and (point[0] >= 0) return row_coord_valid and col_coord_valid def _point_accessible(self, point, inaccessible): return point not in inaccessible def _goal_reachable_from_start( self, start_point, goal_point, grid_shape, inaccessible ): """Depth-first search to validate inaccessible fields""" graph = AdjacencyList(grid_shape=grid_shape, inaccessible=inaccessible) return graph.depth_first_search(start=start_point, end=goal_point) def _route_is_circular(self, route): """Check that points in `route` form a continuous loop. Each point must differ from the next by exactly -1/1 in row or column direction. """ adjacent_to_previous = [ self._points_adjacent(a=point, b=route[i - 1]) for i, point in enumerate(route) ] return all(adjacent_to_previous) @staticmethod def _points_adjacent(a, b): return points_adjacent(a, b) @staticmethod def _value_is_integer(n): return value_is_integer(n)
class SocketIOError(Exception): pass class ConnectionError(SocketIOError): pass class ConnectionRefusedError(ConnectionError): """Connection refused exception. This exception can be raised from a connect handler when the connection is not accepted. The positional arguments provided with the exception are returned with the error packet to the client. """ def __init__(self, *args): if len(args) == 0: self.error_args = {'message': 'Connection rejected by server'} elif len(args) == 1: self.error_args = {'message': str(args[0])} else: self.error_args = {'message': str(args[0])} if len(args) == 2: self.error_args['data'] = args[1] else: self.error_args['data'] = args[1:] class TimeoutError(SocketIOError): pass class BadNamespaceError(SocketIOError): pass