text
stringlengths
1
1.05M
import string def count_word_occurrences(file_name): word_counts = {} with open(file_name, 'r') as file: for line in file: words = line.lower().translate(str.maketrans('', '', string.punctuation)).split() for word in words: if word in word_counts: word_counts[word] += 1 else: word_counts[word] = 1 return word_counts
#!/bin/bash routine () { rm -rf barrier # hard coded NUM_THREADS make NUM_THREADS="2" PROTECTED="$1" barrier if [ ! -f "barrier" ]; then echo "The build for binary file \"barrier\" failed!" exit 1 fi for i in {1..1000}; do diff --suppress-common-lines -c <(./barrier) <(printf "1\n2\n") if [ $? -ne 0 ]; then return 2 fi done echo "Success: all running instances outputted the same correct result" rm -rf barrier } echo "=== Testing without barrier protection ===" routine echo "=== Testing with barrier protection ===" routine "-DPROTECTED"
echo "Downloading latest MTProto scheme..." wget -O scheme.tl -q https://raw.githubusercontent.com/telegramdesktop/tdesktop/master/Telegram/Resources/scheme.tl echo "Generating Go file..." ./generate.sh > types.go
import random # Simulate rolling two dice and summing the values dice1 = random.randint(1, 6) # Generate a random number between 1 and 6 for the first die dice2 = random.randint(1, 6) # Generate a random number between 1 and 6 for the second die sumadeambos = dice1 + dice2 # Calculate the sum of the values obtained from rolling the two dice # Print the type of the variable sumadeambos print(type(sumadeambos)) # Replace <NAME> with your own name as the programmer who wrote the program print("# Este programa fue escrito por <Your Name>")
#!/bin/bash ################################## # Zabbix monitoring script # # iostat: # - IO # - running / blocked processes # - swap in / out # - block in / out # # Info: # - vmstat data are gathered via cron job ################################## # Contact: # vincent.viallet@gmail.com ################################## # ChangeLog: # 20100922 VV initial creation ################################## # Zabbix requested parameter ZBX_REQ_DATA="$2" ZBX_REQ_DATA_MOUNT_POINT="$1" # source data file NFS_IO_STAT_CMD=$(nfsiostat-sysstat |grep -v Linux |grep -v Filesystem |grep -v -e "^$"|cut -f2 -d':') # # Error handling: # - need to be displayable in Zabbix (avoid NOT_SUPPORTED) # - items need to be of type "float" (allow negative + float) # ERROR_NO_DATA_FILE="-0.9900" ERROR_OLD_DATA="-0.9901" ERROR_WRONG_PARAM="-0.9902" ERROR_MISSING_PARAM="-0.9903" # Missing device to get data from if [ -z "$ZBX_REQ_DATA_MOUNT_POINT" ]; then echo $ERROR_MISSING_PARAM exit 1 fi case $ZBX_REQ_DATA in rkB_nor/s) echo $NFS_IO_STAT_CMD | grep -E "^$ZBX_REQ_DATA_MOUNT_POINT" | tail -1 | awk '{print $2}';; wkB_nor/s) echo $NFS_IO_STAT_CMD | grep -E "^$ZBX_REQ_DATA_MOUNT_POINT" | tail -1 | awk '{print $3}';; rkB_dir/s) echo $NFS_IO_STAT_CMD | grep -E "^$ZBX_REQ_DATA_MOUNT_POINT" | tail -1 | awk '{print $4}';; wkB_dir/s) echo $NFS_IO_STAT_CMD | grep -E "^$ZBX_REQ_DATA_MOUNT_POINT" | tail -1 | awk '{print $5}';; rkB_svr/s) echo $NFS_IO_STAT_CMD | grep -E "^$ZBX_REQ_DATA_MOUNT_POINT" | tail -1 | awk '{print $6}';; wkB_svr/s) echo $NFS_IO_STAT_CMD | grep -E "^$ZBX_REQ_DATA_MOUNT_POINT" | tail -1 | awk '{print $7}';; ops/s) echo $NFS_IO_STAT_CMD | grep -E "^$ZBX_REQ_DATA_MOUNT_POINT" | tail -1 | awk '{print $8}';; rops/s) echo $NFS_IO_STAT_CMD | grep -E "^$ZBX_REQ_DATA_MOUNT_POINT" | tail -1 | awk '{print $9}';; wops/s) echo $NFS_IO_STAT_CMD | grep -E "^$ZBX_REQ_DATA_MOUNT_POINT" | tail -1 | awk '{print $10}';; *) echo $ERROR_WRONG_PARAM; exit 1;; esac exit 0
<filename>src/icons/PhGitCommit.js /* GENERATED FILE */ import { html, svg, define } from "hybrids"; const PhGitCommit = { color: "currentColor", size: "1em", weight: "regular", mirrored: false, render: ({ color, size, weight, mirrored }) => html` <svg xmlns="http://www.w3.org/2000/svg" width="${size}" height="${size}" fill="${color}" viewBox="0 0 256 256" transform=${mirrored ? "scale(-1, 1)" : null} > ${weight === "bold" && svg`<circle cx="128.00244" cy="128" r="52" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="24"/> <line x1="12.00244" y1="128" x2="76.00244" y2="128" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="24"/> <line x1="180.00244" y1="128" x2="244.00244" y2="128" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="24"/>`} ${weight === "duotone" && svg`<circle cx="128.00244" cy="128" r="52" opacity="0.2"/> <circle cx="128.00244" cy="128" r="52" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="16"/> <line x1="8.00244" y1="128" x2="76.00244" y2="128" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="16"/> <line x1="180.00244" y1="128" x2="248.00244" y2="128" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="16"/>`} ${weight === "fill" && svg`<path d="M248.00244,120H187.45312a59.98446,59.98446,0,0,0-118.90136,0H8.00244a8,8,0,0,0,0,16H68.55176a59.98446,59.98446,0,0,0,118.90136,0h60.54932a8,8,0,0,0,0-16Z"/>`} ${weight === "light" && svg`<circle cx="128.00244" cy="128" r="52" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="12"/> <line x1="8.00244" y1="128" x2="76.00244" y2="128" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="12"/> <line x1="180.00244" y1="128" x2="248.00244" y2="128" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="12"/>`} ${weight === "thin" && svg`<circle cx="128.00244" cy="128" r="52" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="8"/> <line x1="8.00244" y1="128" x2="76.00244" y2="128" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="8"/> <line x1="180.00244" y1="128" x2="248.00244" y2="128" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="8"/>`} ${weight === "regular" && svg`<circle cx="128.00244" cy="128" r="52" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="16"/> <line x1="8.00244" y1="128" x2="76.00244" y2="128" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="16"/> <line x1="180.00244" y1="128" x2="248.00244" y2="128" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="16"/>`} </svg> `, }; define("ph-git-commit", PhGitCommit); export default PhGitCommit;
import React, {Component} from "react"; import html2canvas from 'html2canvas'; import ReactDOM from 'react-dom'; const fileType = { PNG: 'image/png', JPEG: 'image/jpeg', PDF: 'application/pdf' }; export default class ReactComponentToHTMLImageRenderer extends Component { static getHiddenContainer(){ let hidden = true; let style = {}; if(hidden){ style={position: "absolute", overflow: "hidden", clip: "rect(0 0 0 0)", height: "1px", width: "1px", margin: "-1px", padding: 0, border: 0 }; } return( //render it, but hide it, so it isnt displayed <div id="divCheckbox" style={style}> <div id={"renderContainer"} > </div> </div> ) } static async reactNodeToDataURL(componentRef){ if(!componentRef.current) { return "FEHLER !"; } let type = fileType.PNG; const element = ReactDOM.findDOMNode(componentRef.current); let html2CanvasOptions = { scale: 1 }; let canvas = await html2canvas(element, { scrollY: -window.scrollY, useCORS: true, ...html2CanvasOptions }); let dataURL = canvas.toDataURL(type, 1.0); return dataURL; } static DEFAULT_WIDTH = 1000; static DEFAULT_HEIGHT = 1000; static sleep(milliseconds) { return new Promise(resolve => setTimeout(resolve, milliseconds)); } static async renderElement(element, promiseFinishedRendering){ let promiseClear = new Promise(function(resolve, reject) { // executor (the producing code, "singer") ReactDOM.render(null, document.getElementById('renderContainer'), async () => { console.log("Callback from ReactDom renderClear"); resolve(true); }); }); let promise = new Promise(function(resolve, reject) { // executor (the producing code, "singer") ReactDOM.render(element, document.getElementById('renderContainer'), async () => { console.log("Callback from ReactDom render"); if(!!promiseFinishedRendering){ console.log("THe component gave us a promise when their rendering is finished, lets wait"); await promiseFinishedRendering; } resolve(true); }); }); await promiseClear; //in order to update the image, we need to clear it first return await promise; } static async reactComponentToImageDataURL(children, promiseFinishedRendering, height=ReactComponentToHTMLImageRenderer.DEFAULT_HEIGHT, width=ReactComponentToHTMLImageRenderer.DEFAULT_WIDTH){ let componentRef = React.createRef(); const element = <div ref={componentRef} style={{width: width, height: height}}> {children} </div>; console.log("Start Render"); await ReactComponentToHTMLImageRenderer.renderElement(element, promiseFinishedRendering); //await ReactComponentToHTMLImageRenderer.sleep(2000); console.log("Render Finished completly now getting image"); let dataURL = await ReactComponentToHTMLImageRenderer.reactNodeToDataURL(componentRef); return dataURL; } static async reactComponentToImgage(children, promiseFinishedRendering){ let dataURL = await ReactComponentToHTMLImageRenderer.reactComponentToImageDataURL(children, promiseFinishedRendering); return '<img src="'+dataURL+'" />'; } static async reactComponentToImgageTag(children, promiseFinishedRendering){ let dataURL = await ReactComponentToHTMLImageRenderer.reactComponentToImageDataURL(children, promiseFinishedRendering); return <img src={dataURL} />; } }
/* * Copyright 2013 Stanford University. * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * - Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * - Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the * distribution. * * - Neither the name of the copyright holders nor the names of * its contributors may be used to endorse or promote products derived * from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL * THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED * OF THE POSSIBILITY OF SUCH DAMAGE. */ /* * A Nimbus job. * * Author: <NAME> <<EMAIL>> */ #ifdef __MACH__ #include <mach/clock.h> #include <mach/mach.h> #endif #include <time.h> #include "src/shared/fast_log.h" #include "src/worker/job.h" #include "src/worker/application.h" #include "src/worker/app_data_manager.h" using namespace nimbus; // NOLINT Job::Job() { app_is_set_ = false; sterile_ = true; use_threading_ = false; core_quota_ = 1; run_time_ = 0; wait_time_ = 0; max_alloc_ = 0; worker_thread_ = NULL; spawn_state_ = INIT; dependency_query_ = new DependencyQuery(); shadow_job_id_ = NIMBUS_KERNEL_JOB_ID; execution_template_ = NULL; } Job::~Job() { delete dependency_query_; } // TODO(omidm) should remove this later. left it now so the tests // that use it still pass. Job::Job(Application* app, JobType type) { application_ = app; type_ = type; run_time_ = 0; wait_time_ = 0; max_alloc_ = 0; } Job* Job::Clone() { std::cout << "cloning the base job\n"; Job* j = new Job(); return j; } bool Job::SpawnComputeJob(const std::string& name, const job_id_t& id, const IDSet<logical_data_id_t>& read, const IDSet<logical_data_id_t>& write, const IDSet<job_id_t>& before, const IDSet<job_id_t>& after, const Parameter& params, const bool& sterile, const GeometricRegion& region, const job_id_t& future_job_id) { IDSet<logical_data_id_t> empty_ids; std::string no_combiner = ""; return SpawnComputeJob(name, id, read, write, empty_ids, empty_ids, before, after, params, no_combiner, sterile, region, future_job_id); } bool Job::SpawnComputeJob(const std::string& name, const job_id_t& id, const IDSet<logical_data_id_t>& read, const IDSet<logical_data_id_t>& write, const IDSet<logical_data_id_t>& scratch, const IDSet<logical_data_id_t>& reduce, const IDSet<job_id_t>& before, const IDSet<job_id_t>& after, const Parameter& params, const bool& sterile, const GeometricRegion& region, const job_id_t& future_job_id) { std::string no_combiner = ""; return SpawnComputeJob(name, id, read, write, scratch, reduce, before, after, params, no_combiner, sterile, region, future_job_id); } bool Job::SpawnComputeJob(const std::string& name, const job_id_t& id, const IDSet<logical_data_id_t>& read, const IDSet<logical_data_id_t>& write, const IDSet<logical_data_id_t>& scratch, const IDSet<logical_data_id_t>& reduce, const IDSet<job_id_t>& before, const IDSet<job_id_t>& after, const Parameter& params, const std::string& combiner, const bool& sterile, const GeometricRegion& region, const job_id_t& future_job_id) { if (sterile_) { dbg(DBG_ERROR, "ERROR: the job is sterile, it cannot spawn jobs.\n"); exit(-1); return false; } if (!app_is_set_) { dbg(DBG_ERROR, "ERROR: SpawnComputeJob, application has not been set.\n"); exit(-1); return false; } CombinerVector combiners; switch (spawn_state_) { case START_KNOWN_TEMPLATE: template_inner_job_ids_.push_back(id); template_parameters_.push_back(params); return true; break; case END_TEMPLATE: dbg(DBG_ERROR, "ERROR: currently we do not support both normal jobs and templates in same non-sterile job!\n"); // NOLINT exit(-1); return false; break; case INIT: spawn_state_ = NORMAL; case NORMAL: case START_UNKNOWN_TEMPLATE: if (combiner != "") { assert(reduce.size() > 0); IDSet<logical_data_id_t>::IDSetIter iter = reduce.begin(); for (; iter != reduce.end(); ++iter) { combiners.push_back(std::make_pair(*iter, combiner)); } } application_->SpawnComputeJob(name, id, read, write, scratch, reduce, before, after, id_.elem(), future_job_id, sterile, region, params, combiners); return true; break; } return true; } bool Job::SpawnCopyJob(const job_id_t& id, const logical_data_id_t& from_logical_id, const logical_data_id_t& to_logical_id, const IDSet<job_id_t>& before, const IDSet<job_id_t>& after, const Parameter& params) { if (sterile_) { dbg(DBG_ERROR, "ERROR: the job is sterile, it cannot spawn jobs.\n"); return false; } if (app_is_set_) { application_->SpawnCopyJob(id, from_logical_id, to_logical_id, before, after, id_.elem()); return true; } else { std::cout << "ERROR: SpawnCopyJob, application has not been set." << std::endl; return false; } } bool Job::DefineData(const std::string& name, const logical_data_id_t& logical_data_id, const partition_id_t& partition_id, const IDSet<partition_id_t>& neighbor_partition) { if (app_is_set_) { query_cache_.clear(); application_->DefineData(name, logical_data_id, partition_id, neighbor_partition, id_.elem()); return true; } else { std::cout << "ERROR: DefineData, application has not been set." << std::endl; return false; } } bool Job::DefinePartition(const ID<partition_id_t>& partition_id, const GeometricRegion& r) { if (app_is_set_) { application_->DefinePartition(partition_id, r); return true; } else { std::cout << "ERROR: DefinePartition, application has not been set." << std::endl; return false; } } bool Job::DefineJobGraph(const std::string& job_graph_name) { if (app_is_set_) { application_->DefineJobGraph(job_graph_name); return true; } else { std::cout << "ERROR: DefineJobGraph, application has not been set." << std::endl; return false; } } bool Job::TerminateApplication(const exit_status_t& exit_status) { if (app_is_set_) { application_->TerminateApplication(exit_status); return true; } else { std::cout << "ERROR: TerminateApplication, application has not been set." << std::endl; return false; } } bool Job::GetNewJobID(std::vector<job_id_t>* result, size_t req_num) { if (app_is_set_) { return application_->GetNewJobID(result, req_num); } else { std::cout << "ERROR: GetNewJobID, application has not been set." << std::endl; return false; } } bool Job::GetNewLogicalDataID(std::vector<logical_data_id_t>* result, size_t req_num) { if (app_is_set_) { return application_->GetNewLogicalDataID(result, req_num); } else { std::cout << "ERROR: GetNewDataID, application has not been set." << std::endl; return false; } } bool Job::GetPartition(partition_id_t id, GeometricRegion* r) const { if (app_is_set_) { return application_->GetPartition(id, r); } else { std::cout << "Error: GetLogicalObject, application has not been set." << std::endl; exit(-1); } } const LogicalDataObject* Job::GetLogicalObject(logical_data_id_t id) const { if (app_is_set_) { return application_->GetLogicalObject(id); } else { std::cout << "Error: GetLogicalObject, application has not been set." << std::endl; exit(-1); } } int Job::GetCoveredLogicalObjects(CLdoVector* result, const std::string& variable, const GeometricRegion* r) { if (app_is_set_) { return application_->GetCoveredLogicalObjects(result, variable, r); } else { std::cout << "Error: GetCoveredLogicalObjects, application has not been set." << std::endl; return -1; } } int Job::GetAdjacentLogicalObjects(CLdoVector* result, const std::string& variable, const GeometricRegion* r) { if (app_is_set_) { return application_->GetAdjacentLogicalObjects(result, variable, r); } else { std::cout << "Error: GetAdjacentLogicalObjects, application has not been set." << std::endl; return -1; } } int Job::AddIntersectingLdoIds(const std::string& variable, const nimbus::GeometricRegion& region, IDSet<logical_data_id_t>* result) { if (app_is_set_) { LdoIndexCache* cache = NULL; if (query_cache_.find(variable) == query_cache_.end()) { cache = &(query_cache_[variable]); cache->Initialize(application_, variable); } else { cache = &(query_cache_[variable]); } cache->GetResult(region, result); return result->size(); } else { std::cout << "Error: GetAdjacentLogicalObjects, application has not been set." << std::endl; return -1; } } int Job::GetIntersectingLogicalObjects(CLdoVector* result, const std::string& variable, const GeometricRegion* r) { if (app_is_set_) { return application_->GetIntersectingLogicalObjects(result, variable, r); } else { std::cout << "Error: GetAdjacentLogicalObjects, application has not been set." << std::endl; return -1; } } void Job::LoadLdoIdsInSet(IDSet<logical_data_id_t>* set, const nimbus::GeometricRegion& region, ...) { switch (spawn_state_) { case START_KNOWN_TEMPLATE: // Neutralize the call - omidm break; case INIT: case NORMAL: case END_TEMPLATE: case START_UNKNOWN_TEMPLATE: CLdoVector result; va_list vl; va_start(vl, region); char* arg = va_arg(vl, char*); while (arg != NULL) { // AddIntersectingLdoIds(arg, region, set); GetIntersectingLogicalObjects(&result, arg, &region); for (size_t i = 0; i < result.size(); ++i) { set->insert(result[i]->id()); } arg = va_arg(vl, char*); } va_end(vl); break; } } bool Job::StageJobAndLoadBeforeSet(IDSet<job_id_t> *before_set, const std::string& name, const job_id_t& id, const IDSet<logical_data_id_t>& read, const IDSet<logical_data_id_t>& write, const bool barrier) { IDSet<logical_data_id_t> empty_ids; return StageJobAndLoadBeforeSet(before_set, name, id, read, write, empty_ids, empty_ids, barrier); } bool Job::StageJobAndLoadBeforeSet(IDSet<job_id_t> *before_set, const std::string& name, const job_id_t& id, const IDSet<logical_data_id_t>& read, const IDSet<logical_data_id_t>& write, const IDSet<logical_data_id_t>& scratch, const IDSet<logical_data_id_t>& reduce, const bool barrier) { switch (spawn_state_) { case START_KNOWN_TEMPLATE: // Neutralize the call - omidm break; case END_TEMPLATE: dbg(DBG_ERROR, "ERROR: currently dependency quey is not valid if template is already ended in a non-sterile job!\n"); // NOLINT exit(-1); return false; break; case INIT: case NORMAL: case START_UNKNOWN_TEMPLATE: return dependency_query_->StageJobAndLoadBeforeSet(before_set, name, id, read, write, scratch, reduce, barrier); break; } return true; } bool Job::MarkEndOfStage() { switch (spawn_state_) { case START_KNOWN_TEMPLATE: // Neutralize the call - omidm break; case END_TEMPLATE: dbg(DBG_ERROR, "ERROR: currently dependency quey is not valid if template is already ended in a non-sterile job!\n"); // NOLINT exit(-1); return false; break; case INIT: case NORMAL: case START_UNKNOWN_TEMPLATE: return dependency_query_->MarkEndOfStage(); break; } return true; } void Job::StartTemplate(const std::string& template_name) { if (sterile_) { dbg(DBG_ERROR, "ERROR: the job is sterile, it cannot start a template.\n"); exit(-1); } if (!app_is_set_) { dbg(DBG_ERROR, "ERROR: StartTEmplate, application has not been set.\n"); exit(-1); } switch (spawn_state_) { case START_KNOWN_TEMPLATE: case START_UNKNOWN_TEMPLATE: dbg(DBG_ERROR, "ERROR: Cannot start a template with in another template!\n"); exit(-1); break; case END_TEMPLATE: dbg(DBG_ERROR, "ERROR: currently we do not support spawning two templates in same non-sterile job!\n"); // NOLINT exit(-1); break; case NORMAL: dbg(DBG_ERROR, "ERROR: currently we do not support both normal jobs and templates in same non-sterile job!\n"); // NOLINT exit(-1); break; case INIT: template_name_ = template_name; if (IsTemplateDefined(template_name)) { spawn_state_ = START_KNOWN_TEMPLATE; } else { spawn_state_ = START_UNKNOWN_TEMPLATE; application_->StartTemplate(template_name, id_.elem()); } break; } } void Job::EndTemplate(const std::string& template_name) { if (sterile_) { dbg(DBG_ERROR, "ERROR: the job is sterile, it cannot end a template.\n"); exit(-1); } if (!app_is_set_) { dbg(DBG_ERROR, "ERROR: EndTemplate, application has not been set.\n"); exit(-1); } if (template_name_ != template_name) { dbg(DBG_ERROR, "ERROR: template name in end does not match the name in start mark!\n"); exit(-1); } switch (spawn_state_) { case INIT: case NORMAL: case END_TEMPLATE: dbg(DBG_ERROR, "ERROR: Unbalanced end and start template marks!\n"); exit(-1); break; case START_UNKNOWN_TEMPLATE: spawn_state_ = END_TEMPLATE; application_->EndTemplate(template_name, id_.elem()); break; case START_KNOWN_TEMPLATE: spawn_state_ = END_TEMPLATE; application_->SpawnTemplate(template_name, template_inner_job_ids_, template_outer_job_ids_, template_parameters_, id_.elem()); break; } } bool Job::IsTemplateDefined(const std::string& template_name) { if (!app_is_set_) { dbg(DBG_ERROR, "ERROR: IsTemplateDefined, application has not been set.\n"); exit(-1); return false; } return application_->IsTemplateDefined(template_name); } std::string Job::name() const { return name_; } ID<job_id_t> Job::id() const { return id_; } IDSet<physical_data_id_t> Job::read_set() const { return read_set_; } IDSet<physical_data_id_t> Job::write_set() const { return write_set_; } IDSet<physical_data_id_t> Job::scratch_set() const { return scratch_set_; } IDSet<physical_data_id_t> Job::reduce_set() const { return reduce_set_; } const IDSet<physical_data_id_t>& Job::get_read_set() const { return read_set_; } const IDSet<physical_data_id_t>& Job::get_write_set() const { return write_set_; } const IDSet<physical_data_id_t>& Job::get_scratch_set() const { return scratch_set_; } const IDSet<physical_data_id_t>& Job::get_reduce_set() const { return reduce_set_; } IDSet<job_id_t> Job::before_set() const { return before_set_; } const IDSet<job_id_t>* Job::before_set_p() const { return &before_set_; } IDSet<job_id_t> Job::after_set() const { return after_set_; } Parameter Job::parameters() const { return parameters_; } Application* Job::application() const { return application_; } bool Job::sterile() const { return sterile_; } GeometricRegion Job::region() const { return region_; } ID<job_id_t> Job::future_job_id() const { return future_job_id_; } job_id_t Job::shadow_job_id() const { return shadow_job_id_; } ExecutionTemplate* Job::execution_template() const { return execution_template_; } double Job::run_time() const { return run_time_; } double Job::wait_time() const { return wait_time_; } void Job::set_name(const std::string& name) { name_ = name; } void Job::set_id(const ID<job_id_t>& id) { id_ = id; } void Job::set_read_set(const IDSet<physical_data_id_t>& read_set) { read_set_ = read_set; } void Job::set_write_set(const IDSet<physical_data_id_t>& write_set) { write_set_ = write_set; } void Job::set_scratch_set(const IDSet<physical_data_id_t>& scratch_set) { scratch_set_ = scratch_set; } void Job::set_reduce_set(const IDSet<physical_data_id_t>& reduce_set) { reduce_set_ = reduce_set; } void Job::set_before_set(const IDSet<job_id_t>& before_set) { before_set_ = before_set; } void Job::set_after_set(const IDSet<job_id_t>& after_set) { after_set_ = after_set; } void Job::set_parameters(const Parameter& parameters) { parameters_ = parameters; } void Job::set_application(Application* app) { application_ = app; app_is_set_ = true; } void Job::set_sterile(const bool& sterile) { sterile_ = sterile; } void Job::set_region(const GeometricRegion& region) { region_ = region; } void Job::set_future_job_id(const ID<job_id_t>& future_job_id) { future_job_id_ = future_job_id; } void Job::set_shadow_job_id(const job_id_t& id) { shadow_job_id_ = id; } void Job::set_execution_template(ExecutionTemplate *execution_template) { execution_template_ = execution_template; } void Job::clear_template_variables() { spawn_state_ = INIT; template_inner_job_ids_.clear(); template_outer_job_ids_.clear(); template_parameters_.clear(); } void Job::set_run_time(const double& run_time) { run_time_ = run_time; } void Job::set_wait_time(const double& wait_time) { wait_time_ = wait_time; } void Job::refresh_dependency_query() { delete dependency_query_; dependency_query_ = new DependencyQuery(); } AppDataManager* Job::GetAppDataManager() const { return application_->app_data_manager(); } StaticConfigManager* Job::GetStaticConfigManager() const { return application_->static_config_manager(); } RemoteCopySendJob::RemoteCopySendJob(WorkerDataExchanger* da, Application *app) { data_exchanger_ = da; set_application(app); template_generation_id_ = NIMBUS_INVALID_TEMPLATE_ID; } RemoteCopySendJob::~RemoteCopySendJob() { } // TODO(quhang) data exchanger is thread-safe? void RemoteCopySendJob::Execute(Parameter params, const DataArray& da) { AppDataManager *am = GetAppDataManager(); am->SyncData(da[0]); SerializedData ser_data; da[0]->Serialize(&ser_data); data_exchanger_->SendSerializedData(receive_job_id().elem(), mega_rcr_job_id().elem(), to_worker_id_.elem(), ser_data, da[0]->version(), template_generation_id_); // delete ser_data.data_ptr(); // Not needed with shared pointer. } Job* RemoteCopySendJob::Clone() { return new RemoteCopySendJob(data_exchanger_, application()); } ID<job_id_t> RemoteCopySendJob::receive_job_id() { return receive_job_id_; } ID<job_id_t> RemoteCopySendJob::mega_rcr_job_id() { return mega_rcr_job_id_; } ID<worker_id_t> RemoteCopySendJob::to_worker_id() { return to_worker_id_; } std::string RemoteCopySendJob::to_ip() { return to_ip_; } ID<port_t> RemoteCopySendJob::to_port() { return to_port_; } void RemoteCopySendJob::set_receive_job_id(ID<job_id_t> receive_job_id) { receive_job_id_ = receive_job_id; } void RemoteCopySendJob::set_mega_rcr_job_id(ID<job_id_t> mega_rcr_job_id) { mega_rcr_job_id_ = mega_rcr_job_id; } void RemoteCopySendJob::set_to_worker_id(ID<worker_id_t> worker_id) { to_worker_id_ = worker_id; } void RemoteCopySendJob::set_to_ip(std::string ip) { to_ip_ = ip; } void RemoteCopySendJob::set_to_port(ID<port_t> port) { to_port_ = port; } void RemoteCopySendJob::set_template_generation_id(template_id_t id) { template_generation_id_ = id; } RemoteCopyReceiveJob::RemoteCopyReceiveJob(Application *app) { set_application(app); } RemoteCopyReceiveJob::~RemoteCopyReceiveJob() { } void RemoteCopyReceiveJob::Execute(Parameter params, const DataArray& da) { AppDataManager *am = GetAppDataManager(); timer::StartTimer(timer::kInvalidateMappings); am->InvalidateMappings(da[0]); timer::StopTimer(timer::kInvalidateMappings); Data * data_copy = NULL; da[0]->DeSerialize(*serialized_data_, &data_copy); timer::StartTimer(timer::kRCRCopy); da[0]->Copy(data_copy); da[0]->set_version(data_version_); data_copy->Destroy(); delete data_copy; timer::StopTimer(timer::kRCRCopy); // delete serialized_data_->data_ptr(); // Not needed with shared pointer. delete serialized_data_; } Job* RemoteCopyReceiveJob::Clone() { return new RemoteCopyReceiveJob(application()); } void RemoteCopyReceiveJob::set_serialized_data(SerializedData* ser_data) { serialized_data_ = ser_data; } void RemoteCopyReceiveJob::set_data_version(data_version_t version) { data_version_ = version; } MegaRCRJob::MegaRCRJob(Application *app) { set_application(app); } MegaRCRJob::MegaRCRJob(Application *app, const std::vector<job_id_t>& receive_job_ids, const std::vector<physical_data_id_t>& to_phy_ids) : receive_job_ids_(receive_job_ids), to_phy_ids_(to_phy_ids) { assert(receive_job_ids_.size() == to_phy_ids_.size()); set_application(app); } MegaRCRJob::~MegaRCRJob() { } void MegaRCRJob::Execute(Parameter params, const DataArray& da) { assert(AllDataReceived()); AppDataManager *am = GetAppDataManager(); size_t idx = 0; std::vector<job_id_t>::iterator iter = receive_job_ids_.begin(); for (; iter != receive_job_ids_.end(); ++iter, ++idx) { timer::StartTimer(timer::kInvalidateMappings); am->InvalidateMappings(da[idx]); timer::StopTimer(timer::kInvalidateMappings); Data * data_copy = NULL; std::map<job_id_t, SerializedData*>::iterator it = serialized_data_map_.find(*iter); assert(it != serialized_data_map_.end()); SerializedData *ser_data = it->second; da[idx]->DeSerialize(*ser_data, &data_copy); da[idx]->Copy(data_copy); data_copy->Destroy(); delete data_copy; // delete ser_data->data_ptr(); // Not needed with shared pointer. delete ser_data; } } const std::vector<job_id_t>* MegaRCRJob::receive_job_ids_p() { return &receive_job_ids_; } const std::vector<physical_data_id_t>* MegaRCRJob::to_phy_ids_p() { return &to_phy_ids_; } void MegaRCRJob::set_receive_job_ids(const std::vector<job_id_t>& receive_job_ids) { receive_job_ids_ = receive_job_ids; } void MegaRCRJob::set_to_phy_ids(const std::vector<physical_data_id_t>& to_phy_ids) { to_phy_ids_ = to_phy_ids; } void MegaRCRJob::set_serialized_data(job_id_t job_id, SerializedData* ser_data) { serialized_data_map_[job_id] = ser_data; } void MegaRCRJob::set_serialized_data_map(const std::map<job_id_t, SerializedData*>& map) { serialized_data_map_ = map; } void MegaRCRJob::clear_serialized_data_map() { serialized_data_map_.clear(); } bool MegaRCRJob::AllDataReceived() { return serialized_data_map_.size() == receive_job_ids_.size(); } SaveDataJob::SaveDataJob(DistributedDB *ddb, Application *app) { ddb_ = ddb; set_application(app); } SaveDataJob::~SaveDataJob() { } void SaveDataJob::Execute(Parameter params, const DataArray& da) { AppDataManager *am = GetAppDataManager(); am->SyncData(da[0]); SerializedData ser_data; da[0]->Serialize(&ser_data); std::string key = int2string(id().elem()); std::string value(ser_data.data_ptr().get(), ser_data.size()); if (!ddb_->Put(key, value, checkpoint_id_, &handle_)) { dbg(DBG_ERROR, "ERROR: could not save the data in ddb!\n"); exit(-1); } // delete ser_data.data_ptr(); // Not needed with shared pointer. } Job* SaveDataJob::Clone() { return new SaveDataJob(ddb_, application()); } std::string SaveDataJob::handle() { return handle_; } checkpoint_id_t SaveDataJob::checkpoint_id() { return checkpoint_id_; } void SaveDataJob::set_checkpoint_id(checkpoint_id_t checkpoint_id) { checkpoint_id_ = checkpoint_id; } LoadDataJob::LoadDataJob(DistributedDB *ddb, Application *app) { ddb_ = ddb; set_application(app); } LoadDataJob::~LoadDataJob() { } void LoadDataJob::Execute(Parameter params, const DataArray& da) { AppDataManager *am = GetAppDataManager(); am->InvalidateMappings(da[0]); std::string value; if (!ddb_->Get(handle_, &value)) { dbg(DBG_ERROR, "ERROR: could not load the data from ddb!\n"); exit(-1); } SerializedData serialized_data_(value); Data * data_copy = NULL; da[0]->DeSerialize(serialized_data_, &data_copy); da[0]->Copy(data_copy); da[0]->set_version(data_version_); data_copy->Destroy(); // delete serialized_data_->data_ptr(); // Not needed with shared pointer. // delete serialized_data; // Not needed, goes out of context. } Job* LoadDataJob::Clone() { return new LoadDataJob(ddb_, application()); } void LoadDataJob::set_handle(std::string handle) { handle_ = handle; } void LoadDataJob::set_data_version(data_version_t version) { data_version_ = version; } LocalCopyJob::LocalCopyJob(Application *app) { set_application(app); } LocalCopyJob::~LocalCopyJob() { } Job* LocalCopyJob::Clone() { return new LocalCopyJob(application()); } void LocalCopyJob::Execute(Parameter params, const DataArray& da) { struct timespec start_time; struct timespec t; #ifdef __MACH__ // OS X does not have clock_gettime, use clock_get_time { clock_serv_t cclock; mach_timespec_t mts; host_get_clock_service(mach_host_self(), CALENDAR_CLOCK, &cclock); clock_get_time(cclock, &mts); mach_port_deallocate(mach_task_self(), cclock); start_time.tv_sec = mts.tv_sec; start_time.tv_nsec = mts.tv_nsec; } #else clock_gettime(CLOCK_REALTIME, &start_time); #endif AppDataManager *am = GetAppDataManager(); am->SyncData(da[0]); am->InvalidateMappings(da[1]); da[1]->Copy(da[0]); da[1]->set_version(da[0]->version()); #ifdef __MACH__ // OS X does not have clock_gettime, use clock_get_time { clock_serv_t cclock; mach_timespec_t mts; host_get_clock_service(mach_host_self(), CALENDAR_CLOCK, &cclock); clock_get_time(cclock, &mts); mach_port_deallocate(mach_task_self(), cclock); t.tv_sec = mts.tv_sec; t.tv_nsec = mts.tv_nsec; } #else clock_gettime(CLOCK_REALTIME, &t); #endif GeometricRegion region = da[1]->region(); // TODO(quhang): Is this needed? // const int_dimension_t kMargin = 10; // if (region.dx() < kMargin || region.dy() < kMargin || region.dz() < kMargin) { // copy_ghost_count_++; // copy_ghost_time_ += difftime(t.tv_sec, start_time.tv_sec) // + .000000001 * (static_cast<double>(t.tv_nsec - start_time.tv_nsec)); // } else { // copy_central_count_++; // copy_central_time_ += difftime(t.tv_sec, start_time.tv_sec) // + .000000001 * (static_cast<double>(t.tv_nsec - start_time.tv_nsec)); // // TODO(quhang): temporary use. Should use dbg instead. // // printf("[PROFILE] Central Copy %s, %s\n", da[1]->name().c_str(), // // region.ToNetworkData().c_str()); // } } void LocalCopyJob::PrintTimeProfile() { // TODO(quhang): temporary use. Should use dbg instead. // printf("[PROFILE] copy of ghost: %lld, %f seconds.\n" // "[PROFILE] copy of central: %lld, %f seconds.\n", // copy_ghost_count_, copy_ghost_time_, // copy_central_count_, copy_central_time_); } double LocalCopyJob::copy_ghost_time_ = 0; double LocalCopyJob::copy_central_time_ = 0; int64_t LocalCopyJob::copy_ghost_count_ = 0; int64_t LocalCopyJob::copy_central_count_ = 0; CreateDataJob::CreateDataJob() { } CreateDataJob::~CreateDataJob() { } Job* CreateDataJob::Clone() { return new CreateDataJob(); } void CreateDataJob::Execute(Parameter params, const DataArray& da) { da[0]->Create(); da[0]->set_version(NIMBUS_INIT_DATA_VERSION); }
using System; using System.Net; using System.Net.Sockets; namespace PostMessage { class Program { static void Main(string[] args) { // Get the user's message and the recipient's IP address and port Console.WriteLine("Enter message: "); string message = Console.ReadLine(); Console.WriteLine("Enter recipient IP address: "); string ipAddress = Console.ReadLine(); Console.WriteLine("Enter recipient port: "); int port = int.Parse(Console.ReadLine()); // Create a UDP client UdpClient udpClient = new UdpClient(); IPEndPoint remoteEP = new IPEndPoint(IPAddress.Parse(ipAddress), port); // Post the message byte[] data = System.Text.Encoding.ASCII.GetBytes(message); udpClient.Send(data, data.Length, remoteEP); // Close the client udpClient.Close(); } } }
<reponame>djalilhebal/zero /** * Master is like an interface to ZeroWorker (on 0.facebook.com windows/iframes) * * @example * const username = 'dreamski21' * const job = {fn: 'getProfileInfo', url: `https://0.facebook.com/${username}?v=info`} * const master = new Master(job) * master.getResponse().then( (res) => { * console.info(`${res.name} is ${res.hasGreenDot? 'online' : 'offline'}`) * }) * // Probably outputs: "Djalil Dreamski is offline" */ class Master { /** * @param {Object} job - see setJob */ constructor(job) { this.setJob(job) const iframe = document.createElement('iframe') iframe.className = 'worker' // To hide it using CSS maybe this._iframe = iframe } /** * For re-using workers. * * @param {object} job - The Worker's `raison d'être` * @param {string?} job.url - The original URL to start with * @param {string?} job.fn - The function name that ZeroWorker will call * @param {Array<any>?} [job.args=[]] - The function's parameters * @param {boolean?} [job.reloads=false] - Does the `job` make the page reload? * @param {boolean?} [job.once=true] - Should the Worker be killed after one call? * @param {Object<string, number>?} job.times - Max load/reload/response waiting times * @public */ setJob(job) { // default parameters const { args = [], reloads = false, once = true, _times = {}, } = job; const { load = 30*1000, // 30 secs reload = 30*1000, // 30 secs response = 3*1000, // 3 secs } = _times; const times = {load, reload, response} // important for handling message events job.id = Master.jobNumber++ //FIXME: is this good enough? Object.assign(job, {args, reloads, once, times} ) this.job = job } /** @private */ load() { return new Promise ( (resolve, reject) => { this._iframe.src = this.job.url; this._iframe.onload = () => resolve({loaded: true}); this._iframe.onerror = this._iframe.onabort = (e) => { reject({error: 'WORKER: Error Loading'}) } setTimeout( () => { reject({error: 'WORKER: Loading Timeout'}) }, this.job.times.load); Master.$workplace.appendChild(this._iframe); }) } /** * @listens Window:message * @private */ launch() { return new Promise( (resolve, reject) => { // The `job` either causes the worker/iframe to reload... if (this.job.reloads) { /// @todo maybe this is doesn't work this._iframe.onload = () => resolve({reloaded: true}) this._iframe.onerror = this._iframe.onabort = (e) => { reject({error: 'WORKER: Error Reloading'}) } setTimeout( () => { reject({error: 'WORKER: Reloading Timeout'}) }, this.job.times.reload); } else { // ... or to send a message/response const onMessage = (event) => { const data = event.data; if (data && data.job && data.job.id === this.job.id) { // Perfect, this is the event we were listening for. removeListener() if (data.response && !data.response.error) { resolve(data.response) } else { const err = (data.response && data.response.error) || 'Response error' reject({error: err}) } } } const removeListener = () => window.removeEventListener('message', onMessage, false); window.addEventListener('message', onMessage, false) setTimeout( () => { reject({error: 'WORKER: Response Timeout'}) removeListener() }, this.job.times.response); } // Now tell ZeroWorker to launch the job this._iframe.contentWindow.postMessage(this.job, '*') }) } /** @public */ kill() { if (this._iframe instanceof HTMLElement) { this._iframe.remove() this._iframe = null } } /** * Launch the `job` and return the response as a Promise * @returns {Promise<Object>} Object with response data or error object * @public */ async getResponse() { let res; try { await this.load() res = await this.launch() } catch (e) { console.error('Master::getResponse', e) res = e } if (this.job.once) { this.kill() } return res } } /** * The "workplace" where `Worker`s (iframe elements) will be appended * @constant {HTMLElement} */ Master.$workplace = document.querySelector('#workplace') Master.jobNumber = 0 // for generating "unique ids"
def sum_of_squares(n, m): result = 0 for i in range(n, m+1): result += i*i return result print(sum_of_squares(n, m))
#!/usr/bin/env bash set -x -e pushd Course_01/ ./build.sh popd pushd Course_02/ ./build.sh popd pushd Course_03/ ./build.sh popd mkdir -p deploy cp Course_01/core0/core0_linux Course_01/core1/core1.bin Course_02/core0/core0_linux_c2 Course_03/core0/core0_linux_c3 Course_03/core0/core0_linux_c3_sockit deploy/
<filename>json_test.go<gh_stars>1-10 package lytics import ( "encoding/json" "net/url" "testing" "github.com/bmizerany/assert" ) func TestJsonFlatten(t *testing.T) { msg := `{ "reach":4, "source":"http://twitter.com/tweetbutton", "tweet_id":"302601869862252544", "bigint":7000000000000000000000, "twuser_id":"829488326", "twuser_location":"", "twuser_screenname":"maranda_hampton", "url":["https://www.athletepath.com/join"], "nested": { "a": [ { "b": { "c": [ [ { "d": "1" } ] ] } } ] }, "heterogeneousList": ["a", 3.14], "hasNullVal": null }` flat, err := FlattenJson([]byte(msg)) assert.Equal(t, nil, err) assert.Equal(t, 1, len(flat["reach"])) assert.Equal(t, "4", flat["reach"][0]) assert.Equal(t, "http://twitter.com/tweetbutton", flat["source"][0]) assert.Equal(t, "302601869862252544", flat["tweet_id"][0]) assert.Equal(t, "829488326", flat["twuser_id"][0]) assert.Equal(t, "7000000000000000000000", flat["bigint"][0]) assert.Equal(t, "", flat["twuser_location"][0]) assert.Equal(t, "maranda_hampton", flat["twuser_screenname"][0]) assert.Equal(t, "https://www.athletepath.com/join", flat["url"][0]) assert.Equal(t, "1", flat["nested.a[0].b.c[0][0].d"][0]) assert.Equal(t, "a", flat["heterogeneousList"][0]) assert.Equal(t, "3.14", flat["heterogeneousList"][1]) _, ok := flat["hasNullVal"] assert.Equal(t, true, ok) assert.Equal(t, 0, len(flat["hasNullVal"])) var jsonMap map[string]interface{} err = json.Unmarshal([]byte(msg), &jsonMap) assert.Tf(t, err == nil, "must not err: %v", err) qs := make(url.Values) err = FlattenJsonMapIntoQs(qs, jsonMap, "") assert.Tf(t, err == nil, "must not err: %v", err) assert.Tf(t, qs.Get("bigint") == "7000000000000000000000", "must not have exponent format") }
<filename>sysinv/cgts-client/cgts-client/cgtsclient/v1/helm.py # # Copyright (c) 2018 Wind River Systems, Inc. # # SPDX-License-Identifier: Apache-2.0 # # -*- encoding: utf-8 -*- # from cgtsclient.common import base class Helm(base.Resource): def __repr__(self): return "<helm %s>" % self._info class HelmManager(base.Manager): resource_class = Helm @staticmethod def _path(name=''): return '/v1/helm_charts/%s' % name def list_charts(self, app): """Get list of charts For each chart it will show any overrides for that chart along with the namespace of the overrides. """ return self._list(self._path() + '?app_name=' + app, 'charts') def get_overrides(self, app, name, namespace): """Get overrides for a given chart. :param app_name: name of application :param name: name of the chart :param namespace: namespace for the chart overrides This will return the end-user, system, and combined overrides for the specified chart. """ try: return self._list(self._path(app) + '?name=' + name + '&namespace=' + namespace)[0] except IndexError: return None def update_overrides(self, app, name, namespace, flag='reset', override_values=None): """Update overrides for a given chart. :param app_name: name of application :param name: name of the chart :param namespace: namespace for the chart overrides :param flag: 'reuse' or 'reset' to indicate how to handle existing user overrides for this chart :param override_values: a dict representing the overrides This will return the end-user overrides for the specified chart. """ if override_values is None: override_values = {} body = {'flag': flag, 'values': override_values, 'attributes': {}} return self._update(self._path(app) + '?name=' + name + '&namespace=' + namespace, body) def delete_overrides(self, app, name, namespace): """Delete overrides for a given chart. :param app_name: name of application :param name: name of the chart :param namespace: namespace for the chart overrides """ return self._delete(self._path(app) + '?name=' + name + '&namespace=' + namespace) def update_chart(self, app, name, namespace, attributes=None): """Update non-override attributes for a given chart. :param app_name: name of application :param name: name of the chart :param namespace: namespace for the chart overrides :param attributes: dict of chart attributes to be updated This will return the updated attributes for the specified chart. """ if not attributes: attributes = {} body = {'flag': None, 'values': {}, 'attributes': attributes} return self._update(self._path(app) + '?name=' + name + '&namespace=' + namespace, body)
// @flow import type { AxiosRequestConfig, AxiosError, AxiosResponse, AxiosInstance, } from 'axios'; import axios from 'axios'; import Router from 'next/router'; import * as cookie from 'js-cookie'; import { JWT_TOKEN } from '../definations'; import { IsSSR } from '../utils'; export class HttpRequest { private static buildHeader = (): any => { const headers: any = { 'Access-Control-Allow-Origin': '*', 'Content-Type': 'application/json', }; let token: string | undefined = ''; if (!IsSSR()) { token = cookie.get(JWT_TOKEN); } if (token && token !== 'undefined') { headers.Authorization = `Bearer ${token}`; } return headers; }; private static buildRequest = ( params?: AxiosRequestConfig ): AxiosInstance => { return axios.create({ baseURL: '/api', headers: HttpRequest.buildHeader(), ...params, }); }; private static catchRequest = ( requestFnc: () => Promise<AxiosResponse<any>> ): Promise<any> => { return requestFnc() .then(({ data }: any) => Promise.resolve(data.data)) .catch((err: AxiosError<any>): boolean | Promise<boolean> => { if (err.response && err.response.status === 401) { const currentRoute = Router.pathname; return Router.push( `/logout${ !currentRoute ? '' : `?redirect=${currentRoute}` }` ); } throw err.response?.data; }); }; static Get<T>(url: string, config?: AxiosRequestConfig): Promise<T> { return HttpRequest.catchRequest(() => HttpRequest.buildRequest().get(url, config) ); } static Post<T>( url: string, data?: object, config?: AxiosRequestConfig ): Promise<T> { return HttpRequest.catchRequest(() => HttpRequest.buildRequest().post(url, data, config) ); } static Put<T>( url: string, data?: object, config?: AxiosRequestConfig ): Promise<T> { return HttpRequest.catchRequest(() => HttpRequest.buildRequest().put(url, data, config) ); } static Delete<T>(url: string, config?: AxiosRequestConfig): Promise<T> { return HttpRequest.catchRequest(() => HttpRequest.buildRequest().delete(url, config) ); } }
// Copyright (c) FIRST and other WPILib contributors. // Open Source Software; you can modify and/or share it under the terms of // the WPILib BSD license file in the root directory of this project. package edu.wpi.first.wpiutil.math; public final class MathUtil { private MathUtil() { throw new AssertionError("utility class"); } /** * Returns value clamped between low and high boundaries. * * @param value Value to clamp. * @param low The lower boundary to which to clamp value. * @param high The higher boundary to which to clamp value. */ public static int clamp(int value, int low, int high) { return Math.max(low, Math.min(value, high)); } /** * Returns value clamped between low and high boundaries. * * @param value Value to clamp. * @param low The lower boundary to which to clamp value. * @param high The higher boundary to which to clamp value. */ public static double clamp(double value, double low, double high) { return Math.max(low, Math.min(value, high)); } /** * Constrains theta to within the range (-pi, pi]. * * @param theta The angle to normalize. * @return The normalized angle. */ @SuppressWarnings("LocalVariableName") public static double normalizeAngle(double theta) { // Constraint theta to within (-3pi, pi) int nPiPos = (int) ((theta + Math.PI) / 2.0 / Math.PI); theta -= nPiPos * 2.0 * Math.PI; // Cut off the bottom half of the above range to constrain within // (-pi, pi] int nPiNeg = (int) ((theta - Math.PI) / 2.0 / Math.PI); theta -= nPiNeg * 2.0 * Math.PI; return theta; } }
#!/bin/bash # https://adventofcode.com/days/day/7 puzzle #1 # See README.md in the parent directory in="${1:-${0%-[0-9].*}.input}"; [[ -e $in ]] || exit 1 #TEST: example 37 #TEST: input 336131 # brute force: we compute the rule for each position inside the crabs # array of positions of each crab read -r -a crabs < <(tr ',' ' ' <"$in") # compute the range of possible positions, min&max of the occupied ones positions="$(tr ',' '\n' <"$in" |sort -n |uniq)" minpos=$(echo "$positions" | head -1) maxpos=$(echo "$positions" | tail -1) # start with valid, but maybe non-optimal values optimalpos="$minpos" optimalfuel=$(( (maxpos - minpos) * ${#crabs[@]} )) # iterate by computing fuel costs for all possible positions for ((pos=minpos; pos <= maxpos; pos++)); do fuel=0 for crab in "${crabs[@]}"; do # shellcheck disable=SC2015 # B is always true in this (A && B && C) ((crab >= pos)) && ((move = crab - pos)) || ((move = pos - crab)) ((fuel += move)) done if ((fuel < optimalfuel)); then optimalpos="$pos" optimalfuel="$fuel" fi done echo "Position: $optimalpos, fuel: $optimalfuel"
#! /bin/sh #PBS -l nodes=1:ppn=1 #PBS -l walltime=40:00:00 #PBS -j oe if [ -n "$PBS_JOBNAME" ] then source "${PBS_O_HOME}/.bash_profile" cd "$PBS_O_WORKDIR" module load gcc/5.3.0 fi prefix=../../gekko-output/run-0 ecoevolity --seed 56718824 --prefix ../../gekko-output/run-0 --relax-missing-sites --relax-constant-sites --relax-triallelic-sites ../../configs/gekko-conc044-rate100.yml 1>../../gekko-output/run-0-gekko-conc044-rate100.out 2>&1
const { execSync } = require('child_process'); const path = require('path'); const packJson = require(path.resolve(process.cwd(), './package.json')); // app name and build number are required const args = process.argv.slice(2); if (args.length<2) { console.log(`Missing required parameters, usage: node ${process.argv[1]} [app name] [build to promote]`); process.exit(1); } // obligatory version let version = packJson['version']; if (version.length <= 0) { console.log('missing version in package.json ?'); process.exit(1); } const buildToPromote = args[1]; if (buildToPromote.length == 0) { console.log('Missing/blank build number to promote to production'); process.exit(1); } // copy from staging folder to production folder on the CDN const cdnSourcePath = 's3://cdn.openfin.co/services-staging/openfin/' + args[0] + '/' + version + '-' + buildToPromote const cdnDestPath = 's3://cdn.openfin.co/services/openfin/' + args[0] + '/' + version const deployCmd = `aws s3 cp ${cdnSourcePath} ${cdnDestPath} --recursive`; console.log(`dep cmd: ${deployCmd}`) //execSync(deployCmd, { stdio: [0,1,2]});
/** * Copyright 2020 The Magma Authors. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * @flow strict-local * @format */ import typeof SvgIcon from '@material-ui/core/@@SvgIcon'; import type { ErrorHandlingProps, PermissionHandlingProps, } from '../Form/FormAction'; import * as React from 'react'; import FormAction from '@fbcnms/ui/components/design-system/Form/FormAction'; import FormElementContext from '@fbcnms/ui/components/design-system/Form/FormElementContext'; import Text from '../Text'; import classNames from 'classnames'; import symphony from '../../../theme/symphony'; import {makeStyles} from '@material-ui/styles'; const useStyles = makeStyles(() => ({ option: { display: 'flex', alignItems: 'center', padding: '8px 16px', cursor: 'pointer', '&:not($disabled)&:hover': { backgroundColor: symphony.palette.background, }, '&$optionWithLeftAux': { paddingLeft: '12px', paddingTop: '6px', paddingBottom: '6px', }, }, optionWithLeftAux: {}, disabled: { opacity: 0.38, cursor: 'not-allowed', }, label: { flexGrow: 1, }, checkIcon: { marginLeft: '6px', color: symphony.palette.primary, }, leftAux: { display: 'inline-flex', marginRight: '8px', }, contentContainer: { display: 'flex', flexDirection: 'column', }, })); export type MenuItemLeftAux = $ReadOnly< | {| type: 'icon', icon: SvgIcon, |} | { type: 'node', node: React.Node, }, >; export type SelectMenuItemBaseProps<TValue> = $ReadOnly<{| label: React.Node, value: TValue, isSelected?: boolean, className?: ?string, leftAux?: MenuItemLeftAux, secondaryText?: React.Node, disabled?: boolean, skin?: 'inherit' | 'red', ...PermissionHandlingProps, ...ErrorHandlingProps, |}>; type Props<TValue> = $ReadOnly<{| ...SelectMenuItemBaseProps<TValue>, onClick: (value: TValue) => void, |}>; const SelectMenuItem = <TValue>({ label, value, onClick, isSelected = false, hideOnMissingPermissions = false, className, leftAux, secondaryText, skin = 'inherit', disabled: disabledProp = false, ...actionProps }: Props<TValue>) => { const classes = useStyles(); const LeftIcon = leftAux?.type === 'icon' ? leftAux.icon : null; const coercedSkin = disabledProp ? 'inherit' : skin === 'red' ? 'error' : skin; return ( <FormAction {...actionProps} disabled={disabledProp} hideOnMissingPermissions={hideOnMissingPermissions}> <FormElementContext.Consumer> {({disabled}) => { return ( <div className={classNames(classes.option, className, { [classes.disabled]: disabled, [classes.optionWithLeftAux]: leftAux != null, })} onClick={disabled ? null : () => onClick(value)}> {leftAux != null && ( <div className={classes.leftAux}> {leftAux.type === 'icon' ? LeftIcon != null && ( <LeftIcon color={isSelected ? 'primary' : coercedSkin} size="small" /> ) : leftAux.node} </div> )} <div className={classes.contentContainer}> <Text className={classes.label} variant="body2" color={isSelected ? 'primary' : coercedSkin}> {label} </Text> {secondaryText != null && ( <Text color="gray" variant="caption"> {secondaryText} </Text> )} </div> </div> ); }} </FormElementContext.Consumer> </FormAction> ); }; export default SelectMenuItem;
import asyncio import msgpack import pytest import websockets import wsrpc class SampleHandler: def __init__(self, rpc): self.remote = rpc async def rpc_add(self, a, b): await asyncio.sleep(0.001) return a + b async def rpc_foo(self): await asyncio.sleep(1) async def run_client(port): async with websockets.connect(f"ws://127.0.0.1:{port}/") as ws: client = wsrpc.WebsocketRPC(ws=ws, client_mode=True) client.max_id = 5 for i in range(10): r = await client.request.add(3, i) assert r == 3 + i r = await client.notify.add(3, 4) assert r is None await ws.send(msgpack.packb([3, "a"])) with pytest.raises(wsrpc.RemoteCallError): r = await client.request.none(3, 5) await client.notify.foo() await client.close() async def go(ws, path): rpc = wsrpc.WebsocketRPC(ws, SampleHandler) @rpc.exception def exc_handler(e): pass @rpc.exception async def async_exc_handler(e): pass await rpc.run() def test_rpc(unused_tcp_port): start_server = websockets.serve(go, "127.0.0.1", unused_tcp_port) loop = asyncio.get_event_loop() server = loop.run_until_complete(start_server) loop.run_until_complete(run_client(unused_tcp_port)) server.close() loop.run_until_complete(server.wait_closed()) loop.close()
<reponame>songkanggit/GodViewMap<filename>mapui/src/main/java/com/fhalo/application/adapter/IndexPagerAdapter.java package com.fhalo.application.adapter; import java.util.ArrayList; import java.util.List; import android.support.v4.view.PagerAdapter; import android.view.View; import android.view.ViewGroup; import android.widget.ImageView; public class IndexPagerAdapter extends PagerAdapter { List<ImageView> imageList=new ArrayList<ImageView>(); public IndexPagerAdapter(List<ImageView> imageList){ this.imageList=imageList; } @Override public int getCount() { // TODO Auto-generated method stub return imageList.size(); } @Override public boolean isViewFromObject(View arg0, Object arg1) { // TODO Auto-generated method stub return arg0==arg1; } @Override public Object instantiateItem(ViewGroup container, int position) { // TODO Auto-generated method stub container.addView(imageList.get(position)); return imageList.get(position); } @Override public void destroyItem(ViewGroup container, int position, Object object) { // TODO Auto-generated method stub container.removeView((View) object); } }
#!/bin/bash set -e DEPLOY_DIR="__deploy" TMP_DIR="$DEPLOY_DIR/__tmp" COMMON_DIR="../docker-common" COMMON_OUT_DIR="common" COGSTACK_OUT_DIR="cogstack" DB_DIR="db_dump" # main entry point # echo "Generating deployment scripts" if [ -e $DEPLOY_DIR ]; then rm -rf $DEPLOY_DIR; fi mkdir $DEPLOY_DIR # used services # services=(pgjobrepo pgsamples elasticsearch kibana) # document-type use-cases # doc_types=(docx pdf-text pdf-img jpg) for dt in ${doc_types[@]}; do echo "Generating use-case: ${dt}" dp="$DEPLOY_DIR/${dt}" mkdir "${dp}" # copy database dump # echo "-- copying db dump file" DATA_SIZE="small" db_file="$DB_DIR/db_samples-${dt}-$DATA_SIZE.sql.gz" if [ ! -e $db_file ]; then echo "DB dump file: $db_file does not exist" exit 1 fi mkdir "${dp}/$DB_DIR" cp $db_file "${dp}/$DB_DIR/db_samples.sql.gz" # copy the relevant common data # echo "-- copying the configuration files for the common docker images" if [ -e "${dp}/$COMMON_OUT_DIR" ]; then rm -r "${dp}/$COMMON_OUT_DIR"; fi mkdir "${dp}/$COMMON_OUT_DIR" for sv in ${services[@]}; do echo "---- setting up: ${sv}" cp -r $COMMON_DIR/${sv} "${dp}/$COMMON_OUT_DIR/" done # setup cogstack # echo "-- copying CogStack files" mkdir "${dp}/$COGSTACK_OUT_DIR" cp -r cogstack/* "${dp}/$COGSTACK_OUT_DIR/" cp cogstack/run_pipeline.sh "${dp}/$COGSTACK_OUT_DIR/" # copy docker files # echo "-- copying docker-compose file" cp docker/docker-compose.override.yml "${dp}/" cp $COMMON_DIR/docker-compose.yml "${dp}/" done echo "Done."
import { clearExecRoutes, clearMiddlewares, clearRoutes, getExecRoutes, getMiddlewareInitial, getMiddlewares, getRouterInitial, getRoutes, setApp, setMiddlewares, setParamsExecRoutes, setServer, } from "./entity.ts"; import { App } from "./app.ts"; import { DecorationApplication } from "./application.ts"; import { ListenOptions, MethodFuncArgument, MiddlewareFunc } from "../model.ts"; // deno-lint-ignore ban-types const consumeApplication: ClassDecorator = (target: Function) => { const middleware = getMiddlewareInitial(); const router = getRouterInitial(); const path = target.prototype.decorator_prefix_min || ""; getMiddlewares().forEach((val) => { middleware.push(val); }); getRoutes().forEach((val) => { router[val.method](path + val.path, val.handler, val.middleware); }); clearMiddlewares(); clearRoutes(); }; // deno-lint-ignore ban-types const consumeRoutes: ClassDecorator = (target: Function) => { const router = getRouterInitial(); const path = target.prototype.decorator_prefix_min || ""; getRoutes().forEach((val) => { router[val.method]( path + val.path, val.handler, getMiddlewares().concat(val.middleware), ); }); clearMiddlewares(); clearRoutes(); }; const consumeExecRoutes = () => { const router = getRouterInitial(); getExecRoutes().forEach((value) => { // 查找需要增加exec指令的路由 const { method, url, exec } = value; const find = router.find(method, url); if (find) { // 如果找到了, 更改exec find.exec.push(exec); } }); clearExecRoutes(); }; const StartApplication: ClassDecorator = (target) => { setApp(new DecorationApplication()); consumeApplication(target); consumeExecRoutes(); return target; }; const Route: ClassDecorator = (target) => { consumeRoutes(target); consumeExecRoutes(); return target; }; const Prefix = (path: string): ClassDecorator => { return (target) => { target.prototype.decorator_prefix_min = path; return target; }; }; const Middleware: MethodDecorator = <T>( target: unknown, propertyKey: string | symbol, descriptor: TypedPropertyDescriptor<T>, ) => { const isFunction = (func: unknown): func is MiddlewareFunc => { return typeof func === "function"; }; if (isFunction(descriptor.value)) { setMiddlewares(descriptor.value); } else { throw Error(`${descriptor.value} is not a Function`); } return descriptor; }; const ApplyMiddleware = (args: MethodFuncArgument): MethodDecorator => { args.forEach((val) => { setMiddlewares(val); }); return (target, propertyKey, descriptor) => { return descriptor; }; }; const Start = (server: ListenOptions): MethodDecorator => { setServer(server); return (target: unknown, propertyKey: string | symbol, descriptor) => { return descriptor; }; }; const Query = (qid?: string): ParameterDecorator => { const exec = qid ? `query.${qid}` : "query"; // deno-lint-ignore ban-types return (target: Object, propertyKey: string | symbol) => { const func = Reflect.getOwnPropertyDescriptor(target, propertyKey)?.value as | (() => void | Promise<void>) | undefined; if (!func) { throw Error("Query decorator can only be used as function parameter"); } // 增加执行exec操作指令到params指令数组 setParamsExecRoutes(exec, func); }; }; const Param = (pid?: string): ParameterDecorator => { const exec = pid ? `params.${pid}` : "params"; // deno-lint-ignore ban-types return (target: Object, propertyKey: string | symbol) => { const func = Reflect.getOwnPropertyDescriptor(target, propertyKey)?.value as | (() => void | Promise<void>) | undefined; if (!func) { throw Error("Query decorator can only be used as function parameter"); } // 增加执行exec操作指令到params指令数组 setParamsExecRoutes(exec, func); }; }; const Body = (bid?: string): ParameterDecorator => { const exec = bid ? `body.value.${bid}` : "body.value"; // deno-lint-ignore ban-types return (target: Object, propertyKey: string | symbol) => { const func = Reflect.getOwnPropertyDescriptor(target, propertyKey)?.value as | (() => void | Promise<void>) | undefined; if (!func) { throw Error("Query decorator can only be used as function parameter"); } // 增加执行exec操作指令到params指令数组 setParamsExecRoutes(exec, func); }; }; export { App, ApplyMiddleware, Body, Middleware, Param, Prefix, Query, Route, Start, StartApplication, }; export { Connect, Delete, Get, Head, Options, Patch, Post, Put, Trace, } from "./decorator.method.ts";
# Copyright Ⓒ 2020 "Sberbank Real Estate Center" Limited Liability Company. # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the "Software"), # to deal in the Software without restriction, including without limitation # the rights to use, copy, modify, merge, publish, distribute, sublicense, # and/or sell copies of the Software, and to permit persons to whom the Software # is furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES # OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. # IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, # DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, # ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER # DEALINGS IN THE SOFTWARE. # Возвращает текущее время в виде количества секунд с начала эпохи # Используется для измерения временных интервалов . "${lib_dir}/is_function_absent.bash" if is_function_absent 'now' then function now { date '+%s';} readonly -f now fi
package dev.arkav.openoryx.net.packets.s2c; import dev.arkav.openoryx.net.data.Packet; import dev.arkav.openoryx.net.data.WorldPosData; import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; public class GotoPacket implements Packet { public int objectId; public WorldPosData pos; public GotoPacket() { } public void read(DataInput in) throws IOException { this.objectId = in.readInt(); this.pos = new WorldPosData(); this.pos.read(in); } public void write(DataOutput out) throws IOException { out.writeInt(this.objectId); this.pos.write(out); } }
<filename>node_modules/react-icons-kit/md/ic_store_mall_directory_outline.js<gh_stars>0 "use strict"; Object.defineProperty(exports, "__esModule", { value: true }); exports.ic_store_mall_directory_outline = void 0; var ic_store_mall_directory_outline = { "viewBox": "0 0 24 24", "children": [{ "name": "path", "attribs": { "d": "M0 0h24v24H0V0z", "fill": "none" }, "children": [] }, { "name": "path", "attribs": { "d": "M18.36 9l.6 3H5.04l.6-3h12.72M20 4H4v2h16V4zm0 3H4l-1 5v2h1v6h10v-6h4v6h2v-6h1v-2l-1-5zM6 18v-4h6v4H6z" }, "children": [] }] }; exports.ic_store_mall_directory_outline = ic_store_mall_directory_outline;
<reponame>muehleisen/OpenStudio /*********************************************************************************************************************** * OpenStudio(R), Copyright (c) 2008-2021, Alliance for Sustainable Energy, LLC, and other contributors. All rights reserved. * * Redistribution and use in source and binary forms, with or without modification, are permitted provided that the * following conditions are met: * * (1) Redistributions of source code must retain the above copyright notice, this list of conditions and the following * disclaimer. * * (2) Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following * disclaimer in the documentation and/or other materials provided with the distribution. * * (3) Neither the name of the copyright holder nor the names of any contributors may be used to endorse or promote products * derived from this software without specific prior written permission from the respective party. * * (4) Other than as required in clauses (1) and (2), distributions in any form of modifications or other derivative works * may not use the "OpenStudio" trademark, "OS", "os", or any other confusingly similar designation without specific prior * written permission from Alliance for Sustainable Energy, LLC. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER(S) AND ANY CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER(S), ANY CONTRIBUTORS, THE UNITED STATES GOVERNMENT, OR THE UNITED * STATES DEPARTMENT OF ENERGY, NOR ANY OF THEIR EMPLOYEES, BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF * USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ***********************************************************************************************************************/ #include "LocalBCL.hpp" #include "RemoteBCL.hpp" #include "../core/Assert.hpp" #include "../core/PathHelpers.hpp" #include "../core/StringHelpers.hpp" #include "../core/System.hpp" #include "../core/UnzipFile.hpp" #include <regex> #define REMOTE_PRODUCTION_SERVER "https://bcl.nrel.gov" #define REMOTE_DEVELOPMENT_SERVER "http://bcl7.development.nrel.gov" using namespace utility::conversions; namespace openstudio { std::ostream& operator<<(std::ostream& os, const pugi::xml_document& element) { element.save(os, " "); return os; } RemoteQueryResponse::RemoteQueryResponse(std::shared_ptr<pugi::xml_document>& domDocument) : m_domDocument(domDocument) {} pugi::xml_node RemoteQueryResponse::root() const { return m_domDocument->document_element(); } // TODO: please note that you should use getClient everywhere after instead of instantiating your own http_client_config // as it will allow us to change http_client_config (SSL settings etc) in only one place web::http::client::http_client RemoteBCL::getClient(const std::string& url, unsigned timeOutSeconds) { web::http::client::http_client_config config; // bcl.nrel.gov can be slow to respond to client requests so bump the default of 30 seconds to 60 to account for lengthy response time. // this is timeout is for each send and receive operation on the client and not the entire client session. config.set_timeout(std::chrono::seconds(timeOutSeconds)); config.set_validate_certificates(false); return web::http::client::http_client(utility::conversions::to_string_t(url), config); } unsigned RemoteBCL::timeOutSeconds() const { return m_timeOutSeconds; } bool RemoteBCL::setTimeOutSeconds(unsigned timeOutSeconds) { if (timeOutSeconds < 10) { LOG(Error, "Setting a timeout of " << timeOutSeconds << " is too low."); return false; } else if (timeOutSeconds < 60) { LOG(Warn, "Setting a timeout of " << timeOutSeconds << " appears low and you risk failures to download components and measures"); } m_timeOutSeconds = timeOutSeconds; return true; } bool RemoteBCL::DownloadFile::open() { OS_ASSERT(!m_fileName.empty()); m_ofs.open(toString(m_fileName).c_str(), std::ios_base::trunc | std::ios_base::out | std::ios_base::binary); return m_ofs.good(); } RemoteBCL::DownloadFile::DownloadFile(openstudio::path t_path) : m_fileName(std::move(t_path)) {} void RemoteBCL::DownloadFile::flush() { if (m_ofs.good()) m_ofs.flush(); } const openstudio::path& RemoteBCL::DownloadFile::fileName() const noexcept { return m_fileName; } void RemoteBCL::DownloadFile::close() { m_ofs.close(); } void RemoteBCL::DownloadFile::write(const std::vector<unsigned char>& data) { m_ofs.write(reinterpret_cast<const char*>(data.data()), data.size()); } RemoteBCL::RemoteBCL() : m_prodAuthKey(LocalBCL::instance().prodAuthKey()), m_devAuthKey(LocalBCL::instance().devAuthKey()), m_numResultsPerQuery(10), m_lastTotalResults(0), m_apiVersion("2.0"), validProdAuthKey(false), validDevAuthKey(false), m_timeOutSeconds(120) { useRemoteProductionUrl(); } RemoteBCL::~RemoteBCL() {} /////////////////////////////////////////////////////////////////////////// /// Inherited members /////////////////////////////////////////////////////////////////////////// boost::optional<BCLComponent> RemoteBCL::getComponent(const std::string& uid, const std::string& versionId) const { bool downloadStarted = const_cast<RemoteBCL*>(this)->downloadComponent(uid); if (downloadStarted) { return waitForComponentDownload(); } return boost::none; } boost::optional<BCLMeasure> RemoteBCL::getMeasure(const std::string& uid, const std::string& versionId) const { bool downloadStarted = const_cast<RemoteBCL*>(this)->downloadMeasure(uid); if (downloadStarted) { return waitForMeasureDownload(); } return boost::none; } boost::optional<BCLMetaSearchResult> RemoteBCL::metaSearchComponentLibrary(const std::string& searchTerm, const std::string& componentType, const std::string& filterType) const { bool searchStarted = const_cast<RemoteBCL*>(this)->startComponentLibraryMetaSearch(searchTerm, componentType, filterType); if (searchStarted) { return waitForMetaSearch(); } return boost::none; } boost::optional<BCLMetaSearchResult> RemoteBCL::metaSearchComponentLibrary(const std::string& searchTerm, const unsigned componentTypeTID, const std::string& filterType) const { bool searchStarted = const_cast<RemoteBCL*>(this)->startComponentLibraryMetaSearch(searchTerm, componentTypeTID, filterType); if (searchStarted) { return waitForMetaSearch(); } return boost::none; } std::vector<BCLSearchResult> RemoteBCL::searchComponentLibrary(const std::string& searchTerm, const std::string& componentType, const unsigned page) const { // Perform metaSearch first metaSearchComponentLibrary(searchTerm, componentType, "nrel_component"); if (lastTotalResults() == 0) { return std::vector<BCLSearchResult>(); } bool searchStarted = const_cast<RemoteBCL*>(this)->startComponentLibrarySearch(searchTerm, componentType, "nrel_component", page); if (searchStarted) { return waitForSearch(); } return std::vector<BCLSearchResult>(); } std::vector<BCLSearchResult> RemoteBCL::searchComponentLibrary(const std::string& searchTerm, const unsigned componentTypeTID, const unsigned page) const { // Perform metaSearch first metaSearchComponentLibrary(searchTerm, componentTypeTID, "nrel_component"); if (lastTotalResults() == 0) { return std::vector<BCLSearchResult>(); } bool searchStarted = const_cast<RemoteBCL*>(this)->startComponentLibrarySearch(searchTerm, componentTypeTID, "nrel_component", page); if (searchStarted) { return waitForSearch(); } return std::vector<BCLSearchResult>(); } std::vector<BCLSearchResult> RemoteBCL::searchMeasureLibrary(const std::string& searchTerm, const std::string& componentType, const unsigned page) const { // Perform metaSearch first metaSearchComponentLibrary(searchTerm, componentType, "nrel_measure"); if (lastTotalResults() == 0) { return std::vector<BCLSearchResult>(); } bool searchStarted = const_cast<RemoteBCL*>(this)->startComponentLibrarySearch(searchTerm, componentType, "nrel_measure", page); if (searchStarted) { return waitForSearch(); } return std::vector<BCLSearchResult>(); } std::vector<BCLSearchResult> RemoteBCL::searchMeasureLibrary(const std::string& searchTerm, const unsigned componentTypeTID, const unsigned page) const { // Perform metaSearch first metaSearchComponentLibrary(searchTerm, componentTypeTID, "nrel_measure"); if (lastTotalResults() == 0) { return std::vector<BCLSearchResult>(); } bool searchStarted = const_cast<RemoteBCL*>(this)->startComponentLibrarySearch(searchTerm, componentTypeTID, "nrel_measure", page); if (searchStarted) { return waitForSearch(); } return std::vector<BCLSearchResult>(); } int RemoteBCL::checkForComponentUpdates() { m_componentsWithUpdates.clear(); for (const BCLComponent& component : LocalBCL::instance().components()) { // can't start another request until last one is done if (m_httpResponse && !m_httpResponse->is_done()) { return false; } m_lastSearch.clear(); auto client = getClient(remoteUrl(), m_timeOutSeconds); web::uri_builder builder(U("/api/search/")); builder.append_path(U("*.xml")); builder.append_query(U("fq[]"), to_string_t("ss_uuid:" + component.uid())); builder.append_query(U("api_version"), to_string_t(m_apiVersion)); // LOG(Debug, m_remoteUrl << builder.to_string()); m_httpResponse = client.request(web::http::methods::GET, builder.to_string()) .then([](web::http::http_response resp) { return resp.extract_utf8string(); }) .then([this](const std::string& xml) { auto remoteQueryResponse = processReply(xml); if (remoteQueryResponse) { m_lastSearch = processSearchResponse(*remoteQueryResponse); } }); std::vector<BCLSearchResult> result = waitForSearch(); if (result.size() > 0 && result[0].versionId() != component.versionId()) { m_componentsWithUpdates.push_back(result[0]); } } return m_componentsWithUpdates.size(); } int RemoteBCL::checkForMeasureUpdates() { m_measuresWithUpdates.clear(); for (const BCLMeasure& measure : LocalBCL::instance().measures()) { // can't start another request until last one is done if (m_httpResponse && !m_httpResponse->is_done()) { return false; } m_lastSearch.clear(); auto client = getClient(remoteUrl(), m_timeOutSeconds); web::uri_builder builder(U("/api/search/")); builder.append_path(U("*.xml")); builder.append_query(U("fq[]"), to_string_t("ss_uuid:" + measure.uid())); builder.append_query(U("api_version"), to_string_t(m_apiVersion)); // LOG(Debug, m_remoteUrl << builder.to_string()); m_httpResponse = client.request(web::http::methods::GET, builder.to_string()) .then([](web::http::http_response resp) { return resp.extract_utf8string(); }) .then([this](const std::string& xml) { auto remoteQueryResponse = processReply(xml); if (remoteQueryResponse) { m_lastSearch = processSearchResponse(*remoteQueryResponse); } }); std::vector<BCLSearchResult> result = waitForSearch(); if (result.size() > 0 && result[0].versionId() != measure.versionId()) { m_measuresWithUpdates.push_back(result[0]); } } return m_measuresWithUpdates.size(); } std::vector<BCLSearchResult> RemoteBCL::componentsWithUpdates() const { return m_componentsWithUpdates; } std::vector<BCLSearchResult> RemoteBCL::measuresWithUpdates() const { return m_measuresWithUpdates; } void RemoteBCL::updateComponents() { if (m_componentsWithUpdates.size() == 0) { checkForComponentUpdates(); } for (const BCLSearchResult& component : m_componentsWithUpdates) { downloadMeasure(component.uid()); boost::optional<BCLComponent> newComponent = waitForComponentDownload(); if (newComponent) { boost::optional<BCLComponent> oldComponent = LocalBCL::instance().getComponent(newComponent->uid()); if (oldComponent && oldComponent->versionId() != newComponent->versionId()) { LocalBCL::instance().removeComponent(*oldComponent); } } } } void RemoteBCL::updateMeasures() { if (m_measuresWithUpdates.size() == 0) { checkForMeasureUpdates(); } for (const BCLSearchResult& measure : m_measuresWithUpdates) { downloadMeasure(measure.uid()); boost::optional<BCLMeasure> newMeasure = waitForMeasureDownload(); if (newMeasure) { boost::optional<BCLMeasure> oldMeasure = LocalBCL::instance().getMeasure(newMeasure->uid()); if (oldMeasure && oldMeasure->versionId() != newMeasure->versionId()) { LocalBCL::instance().removeMeasure(*oldMeasure); } } } } /////////////////////////////////////////////////////////////////////////// /// Blocking class members /////////////////////////////////////////////////////////////////////////// bool RemoteBCL::isOnline() { try { auto ip = getClient("https://checkip.amazonaws.com/") .request(web::http::methods::GET) .then([](web::http::http_response response) { auto statusCode = response.status_code(); if (statusCode != 200) { std::stringstream ss; ss << "Error: response code was " << statusCode; return ss.str(); } auto body = response.extract_utf8string(true).get(); // Remove trailing line ending return body.erase(body.find_last_not_of("\n") + 1); }) .get(); std::regex ipRegex("^\\d{1,3}(?:\\.\\d{1,3}){3}$"); return std::regex_search(ip, ipRegex); } catch (const std::exception&) { // not online } return false; } boost::optional<BCLComponent> RemoteBCL::lastComponentDownload() const { return m_lastComponentDownload; } boost::optional<BCLMeasure> RemoteBCL::lastMeasureDownload() const { return m_lastMeasureDownload; } boost::optional<BCLMetaSearchResult> RemoteBCL::lastMetaSearch() const { return m_lastMetaSearch; } std::vector<BCLSearchResult> RemoteBCL::lastSearch() const { return m_lastSearch; } std::string RemoteBCL::remoteUrl() const { return m_remoteUrl; } std::string RemoteBCL::remoteProductionUrl() { return std::string(REMOTE_PRODUCTION_SERVER); } std::string RemoteBCL::remoteDevelopmentUrl() { return std::string(REMOTE_DEVELOPMENT_SERVER); } void RemoteBCL::useRemoteDevelopmentUrl() { m_useRemoteDevelopmentUrl = true; m_remoteUrl = remoteDevelopmentUrl(); m_authKey = m_devAuthKey; } void RemoteBCL::useRemoteProductionUrl() { m_useRemoteDevelopmentUrl = false; m_remoteUrl = remoteProductionUrl(); m_authKey = m_prodAuthKey; } std::string RemoteBCL::authKey() const { return m_authKey; } std::string RemoteBCL::prodAuthKey() const { return m_prodAuthKey; } bool RemoteBCL::setProdAuthKey(const std::string& prodAuthKey) { bool previousValidity = validProdAuthKey; if (validateAuthKey(prodAuthKey, remoteProductionUrl())) { m_prodAuthKey = prodAuthKey; if (!m_useRemoteDevelopmentUrl) { m_authKey = prodAuthKey; } return true; } validProdAuthKey = previousValidity; return false; } std::string RemoteBCL::devAuthKey() const { return m_devAuthKey; } bool RemoteBCL::setDevAuthKey(const std::string& devAuthKey) { bool previousValidity = validDevAuthKey; if (validateAuthKey(devAuthKey, remoteDevelopmentUrl())) { m_devAuthKey = devAuthKey; if (m_useRemoteDevelopmentUrl) { m_authKey = devAuthKey; } return true; } validDevAuthKey = previousValidity; return false; } int RemoteBCL::resultsPerQuery() const { return m_numResultsPerQuery; } int RemoteBCL::lastTotalResults() const { return m_lastTotalResults; } int RemoteBCL::numResultPages() const { double numerator(lastTotalResults()); double denominator(resultsPerQuery()); return int(std::ceil(numerator / denominator)); } bool RemoteBCL::validateAuthKey(const std::string& authKey, const std::string& remoteUrl) { if (authKey.length() == 32) { std::string previousUrl = this->remoteUrl(); // Check if validation has already run for the given key if (remoteUrl == remoteProductionUrl() && authKey == prodAuthKey() && validProdAuthKey) { return true; } else if (remoteUrl == remoteDevelopmentUrl() && authKey == devAuthKey() && validDevAuthKey) { return true; } // Temporarily set url if (remoteUrl == remoteDevelopmentUrl()) { useRemoteDevelopmentUrl(); } else { useRemoteProductionUrl(); } // can't start another request until last one is done if (m_httpResponse && !m_httpResponse->is_done()) { return false; } m_lastSearch.clear(); auto client = getClient(remoteUrl, m_timeOutSeconds); web::uri_builder builder(U("/api/search/")); builder.append_path(U("*.xml")); builder.append_query(U("api_version"), to_string_t(m_apiVersion)); builder.append_query(U("show_rows"), U("0")); // LOG(Debug, m_remoteUrl << builder.to_string()); m_httpResponse = client.request(web::http::methods::GET, builder.to_string()) .then([](web::http::http_response resp) { return resp.extract_utf8string(); }) .then([this](const std::string& xml) { auto remoteQueryResponse = processReply(xml); if (remoteQueryResponse) { m_lastSearch = processSearchResponse(*remoteQueryResponse); } }); waitForSearch(); // Restore url if (previousUrl == remoteDevelopmentUrl()) { useRemoteDevelopmentUrl(); } else { useRemoteProductionUrl(); } if (remoteUrl == remoteDevelopmentUrl()) { return validDevAuthKey; } else { return validProdAuthKey; } } return false; } boost::optional<BCLComponent> RemoteBCL::waitForComponentDownload() const { if (waitForLock()) { return m_lastComponentDownload; } return boost::none; } boost::optional<BCLComponent> RemoteBCL::waitForComponentDownload(int) const { LOG(Warn, "waitForComponentDownload(int msec) is deprecated, the parameter is unused. Use waitForComponentDownload() instead"); return waitForComponentDownload(); } boost::optional<BCLMeasure> RemoteBCL::waitForMeasureDownload() const { if (waitForLock()) { return m_lastMeasureDownload; } return boost::none; } boost::optional<BCLMeasure> RemoteBCL::waitForMeasureDownload(int) const { LOG(Warn, "waitForMeasureDownloadint is deprecated, the parameter is unused. Use waitForMeasureDownload() instead"); return waitForMeasureDownload(); } boost::optional<BCLMetaSearchResult> RemoteBCL::waitForMetaSearch() const { if (waitForLock()) { return m_lastMetaSearch; } return boost::none; } boost::optional<BCLMetaSearchResult> RemoteBCL::waitForMetaSearch(int) const { LOG(Warn, "waitForMetaSearchint is deprecated, the parameter is unused. Use waitForMetaSearch() instead"); return waitForMetaSearch(); } std::vector<BCLSearchResult> RemoteBCL::waitForSearch() const { if (waitForLock()) { return m_lastSearch; } return std::vector<BCLSearchResult>(); } std::vector<BCLSearchResult> RemoteBCL::waitForSearch(int) const { LOG(Warn, "waitForSearchint is deprecated, the parameter is unused. Use waitForSearch() instead"); return waitForSearch(); } /////////////////////////////////////////////////////////////////////////// /// Non-blocking class members /////////////////////////////////////////////////////////////////////////// bool RemoteBCL::downloadComponent(const std::string& uid) { // Check for empty uid if (uid.empty()) { LOG(Error, "Error: No uid provided"); return false; } // can't start another request until last one is done if (m_httpResponse && !m_httpResponse->is_done()) { LOG(Debug, "Cannot get mutex lock"); return false; } m_downloadFile = std::make_unique<DownloadFile>(openstudio::filesystem::temp_directory_path() / toPath(uid + ".bcl")); if (!m_downloadFile->open()) { return false; } m_downloadUid = uid; // request.setRawHeader("User-Agent", "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.17 (KHTML, like Gecko) Chrome/24.0.1312.56 Safari/537.17"); auto client = getClient(remoteUrl(), m_timeOutSeconds); web::uri_builder builder(U("/api/component/download")); builder.append_query(U("uids"), to_string_t(uid)); web::http::http_request msg(web::http::methods::GET); msg.headers().add(U("User-Agent"), U("Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.17 (KHTML, like Gecko) Chrome/24.0.1312.56 Safari/537.17")); msg.set_request_uri(builder.to_string()); // LOG(Debug, m_remoteUrl << builder.to_string()); m_httpResponse = client.request(web::http::methods::GET, builder.to_string()) .then([](web::http::http_response resp) { return resp.extract_vector(); }) .then([this](const std::vector<unsigned char>& zip) { m_downloadFile->write(zip); m_downloadFile->flush(); m_downloadFile->close(); }) .then([this]() { onDownloadComplete(); }); return true; } bool RemoteBCL::downloadMeasure(const std::string& uid) { return downloadComponent(uid); } bool RemoteBCL::startComponentLibraryMetaSearch(const std::string& searchTerm, const std::string& componentType, const std::string& filterType) { // can't start another request until last one is done if (m_httpResponse && !m_httpResponse->is_done()) { return false; } m_lastMetaSearch.reset(); auto client = getClient(remoteUrl(), m_timeOutSeconds); web::uri_builder builder(U("/api/metasearch/")); // web::uri::encode_data_string will Encodes a string by converting all characters // except for RFC 3986 unreserved characters to their hexadecimal representation. (eg: '+' => %2B, ' ' => %20) std::string query = searchTerm.empty() ? "*" : searchTerm; builder.append_path(web::uri::encode_data_string(utility::conversions::to_string_t(query + ".xml"))); builder.append_query(U("fq[]"), to_string_t("bundle:" + filterType)); if (!componentType.empty() && componentType != "*") { std::string filter = (filterType == "nrel_component") ? "sm_vid_Component_Tags" : "sm_vid_Measure_Tags"; filter += ":\"" + componentType + "\""; builder.append_query(U("fq[]"), to_string_t(filter)); } builder.append_query(U("api_version"), to_string_t(m_apiVersion)); m_httpResponse = client.request(web::http::methods::GET, builder.to_string()) .then([](web::http::http_response resp) { return resp.extract_utf8string(); }) .then([this](const std::string& xml) { auto remoteQueryResponse = processReply(xml); if (remoteQueryResponse) { m_lastMetaSearch = processMetaSearchResponse(*remoteQueryResponse); } if (m_lastMetaSearch) { setLastTotalResults(m_lastMetaSearch->numResults()); } else { setLastTotalResults(0); } }); // LOG(Debug, m_remoteUrl << builder.to_string()); return true; } bool RemoteBCL::startComponentLibraryMetaSearch(const std::string& searchTerm, const unsigned componentTypeTID, const std::string& filterType) { // can't start another request until last one is done if (m_httpResponse && !m_httpResponse->is_done()) { return false; } m_lastMetaSearch.reset(); auto client = getClient(remoteUrl(), m_timeOutSeconds); web::uri_builder builder(U("/api/metasearch/")); std::string query = searchTerm.empty() ? "*" : searchTerm; builder.append_path(web::uri::encode_data_string(utility::conversions::to_string_t(query + ".xml"))); builder.append_query(U("fq[]"), to_string_t("bundle:" + filterType)); if (componentTypeTID != 0) { std::string filter = "tid:" + openstudio::string_conversions::number(componentTypeTID); builder.append_query(U("fq[]"), to_string_t(filter)); } builder.append_query(U("api_version"), to_string_t(m_apiVersion)); m_httpResponse = client.request(web::http::methods::GET, builder.to_string()) .then([](web::http::http_response resp) { return resp.extract_utf8string(); }) .then([this](const std::string& xml) { auto remoteQueryResponse = processReply(xml); if (remoteQueryResponse) { m_lastMetaSearch = processMetaSearchResponse(*remoteQueryResponse); } if (m_lastMetaSearch) { setLastTotalResults(m_lastMetaSearch->numResults()); } else { setLastTotalResults(0); } }); // LOG(Debug, m_remoteUrl << builder.to_string()); return true; } bool RemoteBCL::startComponentLibrarySearch(const std::string& searchTerm, const std::string& componentType, const std::string& filterType, const unsigned page) { // can't start another request until last one is done if (m_httpResponse && !m_httpResponse->is_done()) { return false; } m_lastSearch.clear(); auto client = getClient(remoteUrl(), m_timeOutSeconds); web::uri_builder builder(U("/api/search/")); std::string query = searchTerm.empty() ? "*" : searchTerm; builder.append_path(web::uri::encode_data_string(utility::conversions::to_string_t(query + ".xml"))); builder.append_query(U("fq[]"), to_string_t("bundle:" + filterType)); if (!componentType.empty() && componentType != "*") { std::string filter = (filterType == "nrel_component") ? "sm_vid_Component_Tags" : "sm_vid_Measure_Tags"; filter += ":\"" + componentType + "\""; builder.append_query(U("fq[]"), to_string_t(filter)); } builder.append_query(U("api_version"), to_string_t(m_apiVersion)); builder.append_query(U("show_rows"), to_string_t(openstudio::string_conversions::number(m_numResultsPerQuery))); builder.append_query(U("page"), to_string_t(openstudio::string_conversions::number(page))); m_httpResponse = client.request(web::http::methods::GET, builder.to_string()) .then([](web::http::http_response resp) { return resp.extract_utf8string(); }) .then([this](const std::string& xml) { auto remoteQueryResponse = processReply(xml); if (remoteQueryResponse) { m_lastSearch = processSearchResponse(*remoteQueryResponse); } }); // LOG(Debug, m_remoteUrl << builder.to_string()); return true; } bool RemoteBCL::startComponentLibrarySearch(const std::string& searchTerm, const unsigned componentTypeTID, const std::string& filterType, const unsigned page) { // can't start another request until last one is done if (m_httpResponse && !m_httpResponse->is_done()) { return false; } m_lastSearch.clear(); auto client = getClient(remoteUrl(), m_timeOutSeconds); web::uri_builder builder(U("/api/search/")); std::string query = searchTerm.empty() ? "*" : searchTerm; builder.append_path(web::uri::encode_data_string(utility::conversions::to_string_t(query + ".xml"))); builder.append_query(U("fq[]"), to_string_t("bundle:" + filterType)); if (componentTypeTID != 0) { std::string filter = "tid:" + openstudio::string_conversions::number(componentTypeTID); builder.append_query(U("fq[]"), to_string_t(filter)); } builder.append_query(U("api_version"), to_string_t(m_apiVersion)); builder.append_query(U("show_rows"), to_string_t(openstudio::string_conversions::number(m_numResultsPerQuery))); builder.append_query(U("page"), to_string_t(openstudio::string_conversions::number(page))); m_httpResponse = client.request(web::http::methods::GET, builder.to_string()) .then([](web::http::http_response resp) { return resp.extract_utf8string(); }) .then([this](const std::string& xml) { auto remoteQueryResponse = processReply(xml); if (remoteQueryResponse) { m_lastSearch = processSearchResponse(*remoteQueryResponse); } }); // LOG(Debug, m_remoteUrl << builder.to_string()); return true; } bool RemoteBCL::waitForLock() const { if (!m_httpResponse) { return false; } try { m_httpResponse->wait(); return true; } catch (const std::exception& e) { LOG(Error, "Request to url '" << m_remoteUrl << " 'failed with message: " << e.what()); } return false; } boost::optional<RemoteQueryResponse> RemoteBCL::processReply(const std::string& reply) { std::shared_ptr<pugi::xml_document> document = std::make_shared<pugi::xml_document>(); auto result = document->load_string(reply.c_str()); if (!result) { LOG(Error, "Bad XML Response: " << result.description()); } else { if (!m_useRemoteDevelopmentUrl) { validProdAuthKey = true; } else { validDevAuthKey = true; } return RemoteQueryResponse(document); } return boost::none; } boost::optional<BCLMetaSearchResult> RemoteBCL::processMetaSearchResponse(const RemoteQueryResponse& remoteQueryResponse) { auto root = remoteQueryResponse.root(); if (root) { if (std::string(root.name()).compare(0, 6, "result") == 0) { // C++20: std::string(root.name()).starts_with("result") auto numResultsElement = root.child("result_count"); if (numResultsElement) { return BCLMetaSearchResult(root); } } } return boost::none; } std::vector<BCLSearchResult> RemoteBCL::processSearchResponse(const RemoteQueryResponse& remoteQueryResponse) { std::vector<BCLSearchResult> searchResults; auto root = remoteQueryResponse.root(); auto result = root.child("result"); if (result) { auto componentElement = result.first_child(); //Basic check to see if it's non-empty while (componentElement.child("name")) { //Skip components without a uid or version_id if (componentElement.child("uuid") && componentElement.child("vuuid")) { BCLSearchResult searchResult(componentElement); searchResults.push_back(searchResult); } result = result.next_sibling("result"); componentElement = result.first_child(); } } return searchResults; } void RemoteBCL::onDownloadComplete() { const auto src = m_downloadFile->fileName(); std::string componentType; // Extract the files to a temp location openstudio::path tempDest = openstudio::filesystem::temp_directory_path() / toPath(m_downloadUid + '/'); if (openstudio::filesystem::is_directory(tempDest)) { removeDirectory(tempDest); } openstudio::filesystem::create_directories(tempDest); std::vector<openstudio::path> createdFiles; try { openstudio::UnzipFile uf(src); createdFiles = uf.extractAllFiles(tempDest); } catch (const std::exception& e) { LOG(Error, "Cannot unzip file: " << e.what()); } openstudio::filesystem::remove(src); // search for component.xml or measure.xml file boost::optional<openstudio::path> xmlPath; for (const openstudio::path& path : createdFiles) { if (path.filename() == toPath("component.xml")) { componentType = "component"; m_lastComponentDownload.reset(); xmlPath = path; break; } else if (path.filename() == toPath("measure.xml")) { componentType = "measure"; m_lastMeasureDownload.reset(); xmlPath = path; break; } } if (xmlPath) { // cppcheck-suppress shadowVariable path src = xmlPath->parent_path(); path dest = src.parent_path(); openstudio::filesystem::remove(dest / toPath("DISCLAIMER.txt")); openstudio::filesystem::remove(dest / toPath("README.txt")); openstudio::filesystem::remove(dest / toPath("output.xml")); copyDirectory(src, dest); removeDirectory(src); if (componentType == "component") { path componentXmlPath = dest / toPath("component.xml"); // open the component to figure out uid and vid BCLComponent component(toString(componentXmlPath.parent_path())); std::string uid = component.uid(); std::string versionId = component.versionId(); // check if component has proper uid and vid if (!uid.empty() && !versionId.empty()) { dest = LocalBCL::instance().libraryPath() / uid / versionId; removeDirectory(dest); if (copyDirectory(componentXmlPath.parent_path(), dest)) { // Add to LocalBCL m_lastComponentDownload = BCLComponent(toString(dest)); LocalBCL::instance().addComponent(*m_lastComponentDownload); } } } else if (componentType == "measure") { path measureXmlPath = dest / toPath("measure.xml"); // open the measure to figure out uid and vid boost::optional<BCLMeasure> measure; try { measure = BCLMeasure(measureXmlPath.parent_path()); std::string uid = measure->uid(); std::string versionId = measure->versionId(); // check if component has proper uid and vid if (!uid.empty() && !versionId.empty()) { dest = LocalBCL::instance().libraryPath() / uid / versionId; removeDirectory(dest); if (copyDirectory(measureXmlPath.parent_path(), dest)) { // Add to LocalBCL m_lastMeasureDownload = BCLMeasure(dest); LocalBCL::instance().addMeasure(*m_lastMeasureDownload); } } } catch (const std::exception&) { LOG(Error, "Unable to create measure from download: " + toString(measureXmlPath.parent_path())); } } } else { LOG(Error, "No component.xml or measure.xml file found in downloaded contents"); } // delete the temp unzip directory removeDirectory(tempDest); if (componentType == "measure") { this->measureDownloaded.nano_emit(m_downloadUid, m_lastMeasureDownload); } else { this->componentDownloaded.nano_emit(m_downloadUid, m_lastComponentDownload); } } int RemoteBCL::setResultsPerQuery(const int numResults) { m_numResultsPerQuery = numResults <= 0 ? 10 : numResults > 100 ? 100 : numResults; return m_numResultsPerQuery; } void RemoteBCL::setLastTotalResults(const int lastTotalResults) { m_lastTotalResults = lastTotalResults; } } // namespace openstudio
// Function to check if a number is prime function isPrime(num) { if (num <= 1) return false; for (let i = 2; i <= Math.sqrt(num); i++) { if (num % i == 0) return false; } return true; } // Create an array let arr = []; // Iterate over numbers from 2 to 25 for (let i = 2; i <= 25; i++) { // If a number is prime, push it to the array if (isPrime(i)) arr.push(i); } console.log(arr); // [2, 3, 5, 7, 11, 13, 17, 19, 23]
export default ngModule => { ngModule.controller('PerfilCtrl', function PerfilCtrl ($state, StorageService, $scope, AuthService) { const vm = this; const loadUser = () => { AuthService.getCurrentUser().then( (response) => { vm.user = response.user; }); }; $scope.$on("$ionicView.beforeEnter", () => { const currentUser = StorageService.getObject('currentUser', false); if (currentUser) { vm.user = angular.copy(currentUser.user); loadUser(); } else { $state.go('login'); } }); vm.goEditProfile = () => { $state.go('editar_perfil'); }; }); };
from django.db import models class DataTracker: def __init__(self, event_model, snapshot, label): self.event_model = event_model self.snapshot = snapshot self.label = label def construct_sql_fields(self): fields = { f.column: f'{self.snapshot}."{f.column}"' for f in self.event_model._meta.fields if not isinstance(f, models.AutoField) and hasattr(self.event_model.pgh_tracked_model, f.name) } fields['pgh_created_at'] = 'NOW()' fields['pgh_label'] = f"'{self.label}'" if hasattr(self.event_model, 'pgh_obj'): fields['pgh_obj_id'] = f'{self.snapshot}."pgh_obj_id"' return fields
int sum = 0; for (int i = 0; i <= 1000; i += 2) { sum += i; } System.out.println(sum);
def extract_tlds(urls): tlds = [] for url in urls: parts = url.split('.') tlds.append(parts[-1]) return tlds tlds = extract_tlds(['https://www.example.com', 'http://www.example.org', 'https://example.net']) print(tlds)
const runExec = require('./run-exec') exports.devInstall = (packageName)=>{ runExec(`npm i --save-dev ${packageName}`) }
import { markdownToDsl } from "./markdownToDsl"; test("normal dsl", async () => { let sampleImportCode = `| name | scmUrl | language | branch | |-------|-------|---------|-------| | DDD Mono | https://github.com/archguard/ddd-monolithic-code-sample | Java | master | `; let dsl = markdownToDsl(sampleImportCode); expect(dsl).toEqual([ 'repo(name="DDD Mono",scmUrl="https://github.com/archguard/ddd-monolithic-code-sample",language="Java",branch="master")', ]); }); test("multiple repo", async () => { let sampleImportCode = `| name | scmUrl | language | branch | |-------|-------|---------|-------| | DDD Mono | https://github.com/archguard/ddd-monolithic-code-sample | Java | master | | | | | | `; let dsl = markdownToDsl(sampleImportCode); expect(dsl).toEqual([ 'repo(name="DDD Mono",scmUrl="https://github.com/archguard/ddd-monolithic-code-sample",language="Java",branch="master")', "repo()", ]); });
def generate_deployment_config(name, namespace, match_labels, template_spec_labels): deployment_config = { "metadata": { "name": name, "namespace": namespace }, "spec": { "selector": { "matchLabels": match_labels }, "template": { "metadata": { "labels": template_spec_labels } } } } return deployment_config
## Note: for Anaconda user, please use -D_GLIBCXX_USE_CXX11_ABI=1 (instead of -D_GLIBCXX_USE_CXX11_ABI=0) ## ----------- Test with TF v1.8 TF_CFLAGS=( $(python -c 'import tensorflow as tf; print(" ".join(tf.sysconfig.get_compile_flags()))') ) TF_LFLAGS=( $(python -c 'import tensorflow as tf; print(" ".join(tf.sysconfig.get_link_flags()))') ) nvcc -std=c++11 -c -o cuda_op_kernel_v2_sz224.cu.o cuda_op_kernel_v2_sz224.cu.cc ${TF_CFLAGS[@]} -D GOOGLE_CUDA=1 -x cu -Xcompiler -fPIC -D_MWAITXINTRIN_H_INCLUDED g++ -std=c++11 -shared -o cuda_op_kernel_v2_sz224.so cuda_op_kernel_v2_sz224.cc cuda_op_kernel_v2_sz224.cu.o ${TF_CFLAGS[@]} -fPIC -lcudart ${TF_LFLAGS[@]} -D_GLIBCXX_USE_CXX11_ABI=0 -L /usr/local/cuda/lib64/ ## ----------- Tested with TF v1.3 #TF_INC=$(python -c 'import tensorflow as tf; print(tf.sysconfig.get_include())') #nvcc -std=c++11 -c -o cuda_op_kernel_v2_sz224.cu.o cuda_op_kernel_v2_sz224.cu.cc -I $TF_INC -D GOOGLE_CUDA=1 -x cu -Xcompiler -fPIC -D_MWAITXINTRIN_H_INCLUDED #g++ -std=c++11 -shared -o cuda_op_kernel_v2_sz224.so cuda_op_kernel_v2_sz224.cc cuda_op_kernel_v2_sz224.cu.o -I $TF_INC -fPIC -lcudart -D_GLIBCXX_USE_CXX11_ABI=0
package com.yan.demo.service; import java.util.List; import java.util.Map; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import com.yan.demo.dao.AllViewMapper; @Service public class AllViewService { @Autowired AllViewMapper allViewMapper; public List<Map<String, Object>> selectAll() { return allViewMapper.allView(); } }
#!/bin/bash if [ -z $DOCKER_IMAGE ]; then # Run with native .travis/run_project_build.sh else # Run with docker docker run -v$(pwd):/home/conan $DOCKER_IMAGE bash -c "CC=${CC} CXX=${CXX} ASAN=${ASAN} .travis/run_project_build.sh" fi
import flask app = flask.Flask(__name__) @app.route('/celsius-to-fahrenheit/<int:celsius>', methods=['GET']) def celsius_to_fahrenheit(celsius): fahrenheit = celsius * 9/5 + 32 return flask.jsonify({'celsius': celsius, 'fahrenheit': fahrenheit}) if __name__ == '__main__': app.run()
#!/bin/sh set -x set -e cilium install --cluster-name "${CLUSTER_NAME}" --restart-unmanaged-pods=false --config monitor-aggregation=none --config tunnel=vxlan --native-routing-cidr="${CLUSTER_CIDR}" cilium clustermesh enable cilium clustermesh status --wait --wait-duration 5m cilium clustermesh vm create "${VM_NAME}" -n default --ipv4-alloc-cidr 10.192.1.0/30 cilium clustermesh vm status cilium clustermesh vm install install-external-workload.sh kubectl -n kube-system create cm install-external-workload-script --from-file=script=install-external-workload.sh
#!/bin/bash # # server mode test suite - all tests to be peformed from the CLI # of the WLAN Pi while switched in to server mode # # ########################## # User configurable vars ########################## MODULE=server VERSION=1.01 COMMENTS="server mode test suite to verify files & processes" SCRIPT_NAME=$(basename $0) # Tests log file LOG_FILE="${SCRIPT_NAME}_results.log" # WLAN Pi status file (hotspot, wiperf etc...) STATUS_FILE="/etc/wlanpi-state" # SSID broadcast by server SSID=wlanpi_server ########################### # script global vars ########################### # initialize tests passed counter tests_passed=0 # initialize tests failed counter tests_failed=0 ################ # root check ################ if [[ $EUID -ne 0 ]]; then echo "This script must be run as root" exit 1 fi ############################################## # Helper functions - see docs at end of file ############################################## summary () { tests_completed=$((tests_passed + tests_failed)) echo "" echo "-----------------------------------" echo " Total tests: $tests_completed" echo " Number tests passed: $tests_passed" echo " Number tests failed: $tests_failed" echo "-----------------------------------" echo "" } inc_passed () { tests_passed=$((tests_passed + 1)); } inc_failed () { tests_failed=$((tests_failed + 1)); } info () { echo -n "(info) Test: $1" | tee -a $LOG_FILE; } info_n () { echo "(info) Test: $1" | tee -a $LOG_FILE; } comment () { echo $1 | tee -a $LOG_FILE; } pass () { inc_passed; echo " $1 (pass)" | tee -a $LOG_FILE; } fail () { inc_failed; echo " $1 (fail) <--- !!!!!!" | tee -a $LOG_FILE; } check () { if [[ $1 ]]; then pass; else fail; fi; } check_not () { if [[ ! $1 ]]; then pass; else fail; fi; } file_exists () { info "Checking file exists: $1"; if [[ -e $1 ]]; then pass; else fail; fi; } dir_exists () { info "Checking directory exists: $1"; if [[ -d $1 ]]; then pass; else fail; fi; } symlink_exists () { info "Checking symlink exists: $1"; if [[ -L $1 ]]; then pass; else fail; fi; } symlink_not () { info "Checking file is not symlink: $1"; if [[ ! -L $1 ]]; then pass; else fail; fi; } check_process () { info "Checking process running: $1"; if [[ `pgrep $1` ]]; then pass; else fail; fi; } check_systemctl () { info "Checking systemctl running: $1"; if [[ `systemctl status $1 | grep 'active (running)'` ]]; then pass; else fail; fi; } ######################################## # Test rig overview ######################################## echo "\ ======================================================= Test rig description: 1. WLAN Pi running image to be tested 2. Supported wireless NIC card on one of USB ports 3. WLAN Pi is switched in to server mode 4. server config files are default 5. Run tests by joining SSID 'wlanpi_server' (key = 'wifipros') 6. SSH to 192.168.88.1 and run this test script: /etc/wlanpiserver/tests/server_tests_01.sh =======================================================" | tee $LOG_FILE ######################################## # Test suite ######################################## run_tests () { comment "" comment "###########################################" comment " Running $MODULE test suite" comment "###########################################" comment "" # check what state the WLAN Pi is in info "Checking current mode is server" check `cat $STATUS_FILE | grep 'server'` # check we have directories expected dir_exists "/etc/wlanpiserver" # check various files exist file_exists "/etc/wlanpiserver/conf/hostapd.conf" file_exists "/etc/wlanpiserver/default/isc-dhcp-server" file_exists "/etc/wlanpiserver/default/ufw" file_exists "/etc/wlanpiserver/dhcp/dhcpd.conf" file_exists "/etc/wlanpiserver/network/interfaces" file_exists "/etc/wlanpiserver/sysctl/sysctl.conf" file_exists "/etc/wlanpiserver/ufw/before.rules" file_exists "/usr/bin/server_switcher" # check file symbolic links exist symlink_exists "/etc/network/interfaces" symlink_exists "/etc/default/isc-dhcp-server" symlink_exists "/etc/dhcp/dhcpd.conf" symlink_exists "/etc/network/interfaces" symlink_exists "/etc/hostapd.conf" symlink_exists "/etc/sysctl.conf" symlink_exists "/etc/default/ufw" symlink_exists "/etc/ufw/before.rules" # check hostapd running check_process "hostapd" # check dhcpd running check_process "dhcpd" # check default SSID configured info "Checking hostapd SSID is default" check `cat /etc/hostapd.conf | grep ssid="${SSID}"` # check wlan port is in correct state (Mode:Master) info "Checking wlan adapter in master mode" check `iwconfig wlan0 | grep 'Mode:Master'` # check wlan broadcasting correct SSID info "Checking wlan adapter broadcasting correct SSID ($SSID)" check `iwconfig wlan0 | grep ESSID:\"${SSID}\"` # check wlan0 up and running with correct IP address wlan0_ip=192.168.88.1 info "Checking wlan0 has correct IP (${wlan0_ip})" check `ifconfig wlan0 | grep $wlan0_ip` # check forwarding enabled info "Checking firewall forwarding enabled" check `cat /etc/default/ufw | grep 'DEFAULT_FORWARD_POLICY="ACCEPT"'` # check NAT enabled - check for line from NAT config info "Checking firewall NAT enabled" check `cat /etc/ufw/before.rules | grep 'POSTROUTING -s 192.168.88.0/24 -o eth0 -j MASQUERADE'` # Print test run results summary summary comment "" comment "###########################################" comment " End of $MODULE test suite" comment "###########################################" comment "" } ######################################## # main ######################################## case "$1" in -v) echo "" echo "Test script version: $VERSION" echo $COMMENTS echo "" exit 0 ;; -h) echo "Usage: $SCRIPT_NAME [ -h | -v ]" echo "" echo " $SCRIPT_NAME -v : script version" echo " $SCRIPT_NAME -h : script help" echo " $SCRIPT_NAME : run test suite" echo "" exit 0 ;; *) run_tests exit $tests_failed ;; esac # should never reach here, but just in case.... exit 1 << 'HOWTO' ################################################################################################################# Test Utility Documentation -------------------------- This script uses a set of useful utilities to simplify running a series of tests from this bash script. The syntax of the utilities is shown below: inc_passed: increment the test-passed counter (global var 'tests_passed') inc_failed: increment the test-failed counter (global var 'tests_failed') info: pre-prend the text in $1 with "info" and send to stdout & the log file (no CR) info_n: pre-prend the text in $1 with "info" and send to stdout & the log file (inc CR after msg) pass: write a "pass" msg to stdout & the log file, with optional additional msg in $1 (var passed to function) fail: write a "fail" msg to stdout & the log file, with optional additional msg in $1 (var passed to function) comment: output raw text supplied in $1 to std & log file check: call pass() if condition passed is true (can inc option msg via $1), otherwise fail() check_not: call pass() if condition passed is false (can inc option msg via $1), otherwise fail() file_exists: call pass() if file name passed via $1 exists, else call fail() dir_exists: call pass() if dir name passed via $1 exists, else call fail() symlink_exists: call pass() if file name passed via $1 is a symlink, else call fail() check_process: call pass() if process name passed via $1 is running, else call fail() check_systemctl: call pass() if service name passed via $1 is running, else call fail() ################################################################################################################# HOWTO
<reponame>ch1huizong/learning #!/usr/bin/env python # encoding: utf-8 # # Copyright (c) 2009 <NAME> All rights reserved. # """ """ #end_pymotw_header import decimal import fractions for v in [ decimal.Decimal('0.1'), decimal.Decimal('0.5'), decimal.Decimal('1.5'), decimal.Decimal('2.0'), ]: print '%s = %s' % (v, fractions.Fraction.from_decimal(v))
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package hermes.security.hash; /** * * @author <NAME> (d120041) <<EMAIL>> */ public class Hash { private static Hasher hasher = HasherFactory.make(HasherFactory.SHA); public static void setHasher(Hasher h) { if(h != null) { hasher = h; } } public static void setHasher(String type) { setHasher(HasherFactory.make(type)); } public static String hash(String message) { return hasher.hash(message); } }
#!/usr/bin/env bash # Copyright 2020 Huawei Technologies Co., Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================ run_ascend() { if [ $2 -lt 1 ] && [ $2 -gt 8 ] then echo "error: DEVICE_NUM=$2 is not in (1-8)" exit 1 fi if [ ! -d $5 ] then echo "error: DATASET_PATH=$5 is not a directory" exit 1 fi BASEPATH=$(cd "`dirname $0`" || exit; pwd) export PYTHONPATH=${BASEPATH}:$PYTHONPATH if [ -d "../train" ]; then rm -rf ../train fi mkdir ../train cd ../train || exit python ${BASEPATH}/../src/launch.py \ --nproc_per_node=$2 \ --visible_devices=$4 \ --server_id=$3 \ --training_script=${BASEPATH}/../train.py \ --dataset_path=$5 \ --platform=$1 &> ../train.log & # dataset train folder } run_gpu() { if [ $2 -lt 1 ] && [ $2 -gt 8 ] then echo "error: DEVICE_NUM=$2 is not in (1-8)" exit 1 fi if [ ! -d $4 ] then echo "error: DATASET_PATH=$4 is not a directory" exit 1 fi BASEPATH=$(cd "`dirname $0`" || exit; pwd) export PYTHONPATH=${BASEPATH}:$PYTHONPATH if [ -d "../train" ]; then rm -rf ../train fi mkdir ../train cd ../train || exit export CUDA_VISIBLE_DEVICES="$3" mpirun -n $2 --allow-run-as-root \ python ${BASEPATH}/../train.py \ --dataset_path=$4 \ --platform=$1 \ &> ../train.log & # dataset train folder } if [ $# -gt 5 ] || [ $# -lt 4 ] then echo "Usage:\n \ Ascend: sh run_train.sh Ascend [DEVICE_NUM] [SERVER_IP(x.x.x.x)] [VISIABLE_DEVICES(0,1,2,3,4,5,6,7)] [DATASET_PATH]\n \ GPU: sh run_train.sh GPU [DEVICE_NUM] [VISIABLE_DEVICES(0,1,2,3,4,5,6,7)] [DATASET_PATH]\n \ " exit 1 fi if [ $1 = "Ascend" ] ; then run_ascend "$@" elif [ $1 = "GPU" ] ; then run_gpu "$@" else echo "not support platform" fi;
<reponame>neal-siekierski/kwiver /*ckwg +29 * Copyright 2011-2013 by Kitware, Inc. * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * * Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * * Neither name of Kitware, Inc. nor the names of any contributors may be used * to endorse or promote products derived from this software without specific * prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS ``AS IS'' * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ #ifndef SPROKIT_TEST_TEST_COMMON_H #define SPROKIT_TEST_TEST_COMMON_H #include <boost/function.hpp> #include <exception> #include <iostream> #include <map> #include <string> #include <cstdlib> typedef std::string testname_t; #define TEST_ERROR(msg) \ do \ { \ std::cerr << "Error: " << msg << std::endl; \ } while (false) #define EXPECT_EXCEPTION(ex, code, action) \ do \ { \ bool got_exception = false; \ \ try \ { \ code; \ } \ catch (ex const& e) \ { \ got_exception = true; \ \ std::cerr << "Expected exception: " \ << e.what() \ << std::endl; \ } \ catch (std::exception const& e) \ { \ TEST_ERROR("Unexpected exception: " \ << e.what()); \ \ got_exception = true; \ } \ catch (...) \ { \ TEST_ERROR("Non-standard exception"); \ \ got_exception = true; \ } \ \ if (!got_exception) \ { \ TEST_ERROR("Did not get " \ "expected exception when " \ << action); \ } \ } while (false) #define DECLARE_TEST_MAP() \ namespace \ { \ typedef boost::function<void TEST_ARGS> test_function_t; \ typedef std::map<testname_t, test_function_t> test_map_t; \ } \ test_map_t __all_tests; \ struct __add_test \ { \ __add_test(testname_t const& name, \ test_function_t const& func) \ { \ __all_tests[name] = func; \ } \ } \ #define TEST_PROPERTY(property, value, ...) #define IMPLEMENT_TEST(testname) \ static void \ test_##testname TEST_ARGS; \ static __add_test const \ __add_test_##testname(#testname, test_##testname); \ void \ test_##testname TEST_ARGS #define CHECK_ARGS(numargs) \ do \ { \ if (argc != (numargs + 1)) \ { \ TEST_ERROR("Expected " \ #numargs \ " arguments"); \ \ return EXIT_FAILURE; \ } \ } while (false) #define RUN_TEST(testname, ...) \ do \ { \ test_map_t::const_iterator const i = \ __all_tests.find(testname); \ \ if (i == __all_tests.end()) \ { \ TEST_ERROR("Unknown test: " << testname); \ \ return EXIT_FAILURE; \ } \ \ test_function_t const& func = i->second; \ \ try \ { \ func(__VA_ARGS__); \ } \ catch (std::exception const& e) \ { \ TEST_ERROR("Unexpected exception: " \ << e.what()); \ \ return EXIT_FAILURE; \ } \ \ return EXIT_SUCCESS; \ } while (false) #endif // SPROKIT_TEST_TEST_COMMON_H
#!/bin/bash #SBATCH -n 1 #SBATCH -t 00:10:00 #SBATCH --mem-per-cpu=2000 module load anaconda3 source activate crowd-development echo $1 echo $2 echo $3 python selection_deterministic.py $1 $2 $3 #rm *.out
const getFactors = (n) => { let result = []; for (let i = 1; i <= n; i++) { if (n % i === 0) { result.push(i); } } return result; } console.log(getFactors(10));
<reponame>LarsSaalbrink/Sub-IoT-sdu<filename>stack/modules/d7ap/d7asp.h /* * Copyright (c) 2015-2021 University of Antwerp, Aloxy NV. * * This file is part of Sub-IoT. * See https://github.com/Sub-IoT/Sub-IoT-Stack for further info. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /*! \file d7asp.h * \addtogroup D7ASP * \ingroup D7AP * @{ * \brief Session Layer Protocol APIs * \author <EMAIL> * \author <EMAIL> */ #ifndef D7ASP_H_ #define D7ASP_H_ #include "stdint.h" #include "stdbool.h" #include "MODULE_D7AP_defs.h" #include "d7ap.h" #include "packet.h" #define D7ASP_FIFO_CONFIG_SIZE 16 #define NO_ACTIVE_REQUEST_ID 0xFF // index [0 .. 7] --> byte 1 // index [8 .. 15] --> byte 2 // index [16.. 23] --> byte 3 // so the byte count can be calculated as the integer quotient of the division + 1 byte #define REQUESTS_BITMAP_BYTE_COUNT (MODULE_D7AP_FIFO_MAX_REQUESTS_COUNT/8) + 1 /** * /brief The state of a session FIFO */ typedef struct d7asp_master_session d7asp_master_session_t; void d7asp_init(); void d7asp_stop(); uint8_t d7asp_master_session_create(d7ap_session_config_t* d7asp_master_session_config); uint8_t d7asp_queue_request(uint8_t session_token, uint8_t* alp_payload_buffer, uint8_t alp_payload_length, uint8_t expected_alp_response_length); error_t d7asp_send_response(uint8_t* payload, uint8_t length); /** * @brief Processes a received packet, and switch to slave state in case * the flag extension is set and all requests are handled. */ void d7asp_process_received_response(packet_t* packet, bool extension); /** * @brief Processes an unsolicited incoming request * * @returns Whether or not a response is expected. If true the response will be supplied asynchronously by the client. */ bool d7asp_process_received_packet(packet_t* packet); /** * @brief Called by DLL to signal the CSMA/CA process completed succesfully and packet can be ack-ed for QoS = None */ void d7asp_signal_packet_transmitted(packet_t* packet); /** * @brief Called by DLL to signal the CSMA/CA process failed */ void d7asp_signal_transmission_failure(); /** * @brief Called by TP to signal the dialog is terminated */ void d7asp_signal_dialog_terminated(); /** * @brief Called by TP to signal the transaction is terminated */ void d7asp_signal_transaction_terminated(); #endif /* D7ASP_H_ */ /** @}*/
#!/bin/bash # global stuff CWD=$(pwd) PLUGINS_PATH="$CWD/plugins" DATA_PATH="$CWD/data" DEFAULT_SHELL="$HOME/.bashrc" PACKGE_MANAGER="apt-get" GO_DIR=~/go/bin # some function install_banner() { name=$1 echo -e "\033[1;32m[+] Installing $name \033[1;37m" } install_banner "git, nmap, masscan, chromium, npm, golang" [ -x "$(command -v git)" ] || sudo $PACKGE_MANAGER install git -y 2>/dev/null [ -x "$(command -v nmap)" ] || sudo $PACKGE_MANAGER install nmap -y 2>/dev/null [ -x "$(command -v masscan)" ] || sudo $PACKGE_MANAGER install masscan -y 2>/dev/null [ -x "$(command -v chromium)" ] || sudo $PACKGE_MANAGER install chromium -y 2>/dev/null [ -x "$(command -v npm)" ] || sudo $PACKGE_MANAGER install npm -y 2>/dev/null [ -x "$(command -v go)" ] || sudo $PACKGE_MANAGER install golang -y 2>/dev/null [ -x "$(command -v make)" ] || sudo $PACKGE_MANAGER install build-essential -y 2>/dev/null [ -x "$(command -v csvlook)" ] || sudo $PACKGE_MANAGER install csvkit -y 2>/dev/null [ -x "$(command -v ripgrep)" ] || sudo $PACKGE_MANAGER install ripgrep -y 2>/dev/null [ -x "$(command -v unzip)" ] || sudo $PACKGE_MANAGER install unzip -y 2>/dev/null [ -x "$(command -v chromium-browser)" ] || sudo $PACKGE_MANAGER install chromium-browser -y 2>/dev/null [ -x "$(command -v pip)" ] || sudo $PACKGE_MANAGER install python-pip -y 2>/dev/null [ -x "$(command -v pip3)" ] || sudo $PACKGE_MANAGER install python3-pip -y 2>/dev/null [ -x "$(command -v xsltproc)" ] || sudo $PACKGE_MANAGER install xsltproc -y 2>/dev/null pip install setuptools 2>/dev/null pip3 install setuptools 2>/dev/null pip install wheel 2>/dev/null pip3 install wheel 2>/dev/null #### Download stuff directly install_banner "wordlists" mkdir -p $DATA_PATH 2>/dev/null mkdir -p $DATA_PATH/wordlists/ 2>/dev/null mkdir -p $DATA_PATH/wordlists/dns/ 2>/dev/null mkdir -p $DATA_PATH/wordlists/content/ 2>/dev/null mkdir -p $DATA_PATH/wordlists/params/ 2>/dev/null mkdir -p $PLUGINS_PATH 2>/dev/null mkdir -p $DATA_PATH/nmap-stuff/ 2>/dev/null mkdir -p $PLUGINS_PATH/nmap-stuff/ 2>/dev/null mkdir -p "$GO_DIR" 2>/dev/null mkdir -p "$PLUGINS_PATH/go/" 2>/dev/null # domain discovery [[ -f $DATA_PATH/wordlists/dns/all.txt ]] || wget -q -O $DATA_PATH/wordlists/dns/all.txt https://gist.githubusercontent.com/jhaddix/86a06c5dc309d08580a018c66354a056/raw/96f4e51d96b2203f19f6381c8c545b278eaa0837/all.txt [[ -f $DATA_PATH/wordlists/dns/commonspeak2-subdomains.txt ]] || wget -q -O $DATA_PATH/wordlists/dns/commonspeak2-subdomains.txt https://raw.githubusercontent.com/assetnote/commonspeak2-wordlists/master/subdomains/subdomains.txt [[ -f $DATA_PATH/wordlists/dns/shorts.txt ]] || wget -q -O $DATA_PATH/wordlists/dns/shorts.txt https://raw.githubusercontent.com/danielmiessler/SecLists/master/Discovery/DNS/subdomains-top1million-20000.txt # permutation domain [[ -f $DATA_PATH/wordlists/dns/short-permutation.txt ]] || wget -q -O $DATA_PATH/wordlists/dns/short-permutation.txt https://raw.githubusercontent.com/subfinder/goaltdns/master/words.txt # vhost domain [[ -f $DATA_PATH/wordlists/dns/virtual-host-scanning.txt ]] || wget -q -O $DATA_PATH/wordlists/dns/virtual-host-scanning.txt https://raw.githubusercontent.com/codingo/VHostScan/master/VHostScan/wordlists/virtual-host-scanning.txt # content discovery [[ -f $DATA_PATH/wordlists/content/raft-large-directories.txt ]] || wget -q -O $DATA_PATH/wordlists/content/raft-large-directories.txt https://raw.githubusercontent.com/danielmiessler/SecLists/master/Discovery/Web-Content/raft-large-directories.txt [[ -f $DATA_PATH/wordlists/content/really-quick.txt ]] || wget -q -O $DATA_PATH/wordlists/content/really-quick.txt https://raw.githubusercontent.com/maurosoria/dirsearch/master/db/dicc.txt [[ -f $DATA_PATH/wordlists/content/top10000.txt ]] || wget -q -O $DATA_PATH/wordlists/content/top10000.txt https://raw.githubusercontent.com/danielmiessler/SecLists/master/Discovery/Web-Content/RobotsDisallowed-Top1000.txt cat $DATA_PATH/wordlists/content/really-quick.txt $DATA_PATH/wordlists/content/top10000.txt > $DATA_PATH/wordlists/content/quick-content-discovery.txt [[ -f $DATA_PATH/wordlists/content/dir-all.txt ]] || wget -q -O $DATA_PATH/wordlists/content/dir-all.txt https://gist.githubusercontent.com/jhaddix/b80ea67d85c13206125806f0828f4d10/raw/c81a34fe84731430741e0463eb6076129c20c4c0/content_discovery_all.txt # params [[ -f $DATA_PATH/wordlists/params/param-miner.txt ]] || wget -q -O $DATA_PATH/wordlists/params/param-miner.txt https://raw.githubusercontent.com/PortSwigger/param-miner/master/resources/params [[ -f $DATA_PATH/wordlists/params/parameth.txt ]] || wget -q -O $DATA_PATH/wordlists/params/parameth.txt https://raw.githubusercontent.com/maK-/parameth/master/lists/all.txt cat $DATA_PATH/wordlists/params/param-miner.txt $DATA_PATH/wordlists/params/parameth.txt | sort -u > $DATA_PATH/wordlists/params/all.txt # Subdomain takeover signature install_banner "providers-data for subdomain takeover" [[ -f $DATA_PATH/providers-data.csv ]] || wget -q -O $DATA_PATH/providers-data.csv https://raw.githubusercontent.com/anshumanbh/tko-subs/master/providers-data.csv [[ -f $DATA_PATH/fingerprints.json ]] || wget -q -O $DATA_PATH/fingerprints.json https://raw.githubusercontent.com/haccer/subjack/master/fingerprints.json # secret words to grep [[ -f $DATA_PATH/keywords.txt ]] || wget -q -O $DATA_PATH/keywords.txt https://raw.githubusercontent.com/random-robbie/keywords/master/keywords.txt # resolvers [[ -f $DATA_PATH/resolvers.txt ]] || wget -q -O $DATA_PATH/resolvers.txt https://raw.githubusercontent.com/Abss0x7tbh/bass/master/resolvers/public.txt ##### # Start of nmap stuff #### ## technology signature [[ -f $DATA_PATH/apps.json ]] || wget -q -O $DATA_PATH/apps.json https://raw.githubusercontent.com/AliasIO/Wappalyzer/master/src/apps.json ### Nmap stuff install_banner "nmap vulners nse" # Install vulners nse script [[ -f $PLUGINS_PATH/nmap-stuff/vulners.nse ]] || wget -q -O $PLUGINS_PATH/nmap-stuff/vulners.nse https://raw.githubusercontent.com/vulnersCom/nmap-vulners/master/vulners.nse install_banner "nmap bootstrap" # Install nmap bootstrap [[ -f $PLUGINS_PATH/nmap-stuff/nmap-bootstrap.xsl ]] || wget -q -O $PLUGINS_PATH/nmap-stuff/nmap-bootstrap.xsl https://raw.githubusercontent.com/honze-net/nmap-bootstrap-xsl/master/nmap-bootstrap.xsl install_banner "nmap & masscan parser" [[ -f $PLUGINS_PATH/nmap-stuff/masscan_xml_parser.py ]] || wget -q -O $PLUGINS_PATH/nmap-stuff/masscan_xml_parser.py https://raw.githubusercontent.com/laconicwolf/Masscan-to-CSV/master/masscan_xml_parser.py [[ -f $PLUGINS_PATH/nmap-stuff/nmaptocsv.py ]] || wget -q -O $PLUGINS_PATH/nmap-stuff/nmaptocsv.py https://raw.githubusercontent.com/maaaaz/nmaptocsv/master/nmaptocsv.py ## chmod +x osmedeus.py install_banner "Osmedeus dependencies" pip3 install -r requirements.txt cd $CWD mkdir -p ~/.osmedeus 2>/dev/null install_banner "Initial config for Osmedeus" python3 server/manage.py makemigrations python3 server/manage.py migrate python3 server/manage.py makemigrations api python3 server/manage.py migrate api python3 scripts/init.py ### adding gopath if GOPATH not in default shellrc if ! grep -Fxq "GOPATH" "$DEFAULT_SHELL"; then echo 'export GOPATH=$HOME/go' >>$DEFAULT_SHELL echo 'PATH=$GOPATH/bin:$PATH' >>$DEFAULT_SHELL source $DEFAULT_SHELL fi PS="$ " source $DEFAULT_SHELL # update golang version install_banner "Update Golang version" wget -qO- https://raw.githubusercontent.com/udhos/update-golang/master/update-golang.sh | bash 2>/dev/null GO_BIN=$(which go) [[ -f /usr/local/go/bin/go ]] && GO_BIN=/usr/local/go/bin/go ## # Install go stuff ## install_banner "amass" $GO_BIN get -u github.com/OWASP/Amass/... install_banner "subfinder" $GO_BIN get -u github.com/subfinder/subfinder install_banner "gobuster" $GO_BIN get -u github.com/OJ/gobuster install_banner "aquatone" $GO_BIN get -u github.com/michenriksen/aquatone install_banner "gitrob" $GO_BIN get -u github.com/michenriksen/gitrob install_banner "subjack" $GO_BIN get -u github.com/haccer/subjack install_banner "tko-subs" $GO_BIN get -u github.com/anshumanbh/tko-subs install_banner "subzy" $GO_BIN get -u github.com/lukasikic/subzy install_banner "goaltdns" $GO_BIN get -u github.com/subfinder/goaltdns install_banner "gitleaks" $GO_BIN get -u github.com/zricethezav/gitleaks install_banner "gowitness" $GO_BIN get -u github.com/sensepost/gowitness install_banner "webanalyze" $GO_BIN get -u github.com/rverton/webanalyze/... install_banner "assetfinder" $GO_BIN get -u github.com/tomnomnom/assetfinder install_banner "waybackurls" $GO_BIN get -u github.com/tomnomnom/waybackurls install_banner "meg" $GO_BIN get -u github.com/tomnomnom/meg install_banner "httprobe" $GO_BIN get -u github.com/tomnomnom/httprobe install_banner "unfurl" $GO_BIN get -u github.com/tomnomnom/unfurl install_banner "filter-resolved" $GO_BIN get -u github.com/tomnomnom/hacks/filter-resolved install_banner "ffuf" $GO_BIN get -u github.com/ffuf/ffuf install_banner "rgf" $GO_BIN get -u github.com/j3ssie/rgf install_banner "go cli-utils" $GO_BIN get -u github.com/j3ssie/go-auxs/getIP $GO_BIN get -u github.com/j3ssie/go-auxs/just-resolved cp $GO_DIR/* "$PLUGINS_PATH/go/" 2>/dev/null # install_banner "observatory" # npm install -g observatory-cli 2>/dev/null # install massdns install_banner "massdns" cd $PLUGINS_PATH git clone https://github.com/blechschmidt/massdns cd massdns if [[ "$OSTYPE" == "darwin"* ]]; then make nolinux else make fi cd $CWD # findomain install_banner "findomain" if [[ "$OSTYPE" == "darwin"* ]]; then wget -q -O $PLUGINS_PATH/findomain https://github.com/Edu4rdSHL/findomain/releases/latest/download/findomain-osx else wget -q -O $PLUGINS_PATH/findomain https://github.com/Edu4rdSHL/findomain/releases/latest/download/findomain-linux fi chmod +x $PLUGINS_PATH/findomain ## # Install python stuff ## install_banner "truffleHog, wfuzz" pip install truffleHog cd $PLUGINS_PATH install_banner "rgf signatures" git clone https://github.com/j3ssie/rgf 2>/dev/null mkdir -p ~/.rgf/ 2>/dev/null cp -R $PLUGINS_PATH/rgf/signatures/* ~/.rgf/ # install_banner "testssl.sh" # git clone https://github.com/drwetter/testssl.sh 2>/dev/null install_banner "Metabigor" git clone https://github.com/j3ssie/Metabigor 2>/dev/null pip3 install -r Metabigor/requirements.txt install_banner "bass" git clone https://github.com/Abss0x7tbh/bass 2>/dev/null pip3 install -r bass/requirements.txt install_banner "dirsearch" git clone https://github.com/maurosoria/dirsearch 2>/dev/null install_banner "Arjun" git clone https://github.com/s0md3v/Arjun 2>/dev/null install_banner "CORStest" git clone https://github.com/RUB-NDS/CORStest 2>/dev/null install_banner "LinkFinder" git clone https://github.com/GerbenJavado/LinkFinder.git 2>/dev/null pip3 install -r LinkFinder/requirements.txt cd $PLUGINS_PATH/LinkFinder/ python3 setup.py install echo -e "\033[1;32m[+] Installing done... \033[1;37m"
/* eslint-disable */ import "bootstrap"; import "./style.css"; import "./assets/img/rigo-baby.jpg"; import "./assets/img/4geeks.ico"; function generateRandomNumber() { var number = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13]; let indexNumber = Math.floor(Math.random() * number.length); return number[indexNumber]; } function generateRandomPalo() { let palo = ["heart", "diamond", "club", "spade"]; let indexPalo = Math.floor(Math.random() * palo.length); return palo[indexPalo]; } //Función que crea la carta en HTML function dibujarCarta(palo, numero) { let carta = document.createElement("div"); carta.classList.add("card"); let contenido = document.createElement("span"); contenido.classList.add("number", palo); if (numero == "1") numero = "A"; if (numero == "11") numero = "J"; if (numero == "12") numero = "Q"; if (numero == "13") numero = "K"; contenido.innerHTML = numero; carta.appendChild(contenido); return carta; } var listaCartas = []; function generadorDeCartas() { listaCartas = []; var inputValue = document.getElementById("input").value; let containerCartas = document.querySelector(".container-card"); if (containerCartas != null) { document.querySelector(".padre").removeChild(containerCartas); } let newContainerCartas = document.createElement("div"); newContainerCartas.classList.add("container-card"); let padre = document.querySelector(".padre"); padre.appendChild(newContainerCartas); for (var i = 0; i < inputValue; i++) { let palo = generateRandomPalo(); let numero = generateRandomNumber(); let cartaGenerada = dibujarCarta(palo, numero); newContainerCartas.appendChild(cartaGenerada); let carta = [numero, palo]; listaCartas.push(carta); console.log(listaCartas); } } const bubbleSort = arr => { let wall = arr.length - 1; //we start the wall at the end of the array while (wall > 0) { let index = 0; while (index < wall) { //compare the adjacent positions, if the right one is bigger, we have to swap if (arr[index][0] > arr[index + 1][0]) { let aux = arr[index]; arr[index] = arr[index + 1]; arr[index + 1] = aux; } index++; } wall--; //decrease the wall for optimization } console.log("Ordenado", arr); return arr; }; function limpiarInput() { let inputValue = (document.getElementById("input").value = " "); } function ordenar() { let cartasOrdenadas = bubbleSort(listaCartas); let newContainerCartas = document.createElement("div"); newContainerCartas.classList.add("container-card"); let padre = document.querySelector(".padre"); padre.appendChild(newContainerCartas); console.log("Ordenadas", cartasOrdenadas); for (var i = 0; i < cartasOrdenadas.length; i++) { let palo = cartasOrdenadas[i][1]; let numero = cartasOrdenadas[i][0]; let cartaGenerada = dibujarCarta(palo, numero); newContainerCartas.appendChild(cartaGenerada); } } window.onload = function() { var botonGenerador = document.getElementById("draw"); botonGenerador.addEventListener("click", generadorDeCartas); var btnClear = document.getElementById("clear"); btnClear.addEventListener("click", function() { let containerCartas = document.querySelector(".container-card"); if (containerCartas != null) { document.querySelector(".padre").removeChild(containerCartas); } limpiarInput(); listaCartas = []; }); let btnOrdenar = document.getElementById("ordenar"); btnOrdenar.addEventListener("click", ordenar); };
import { async, ComponentFixture, TestBed } from '@angular/core/testing'; import { DashboardTripsComponent } from './dashboard-trips.component'; describe('DashboardTripsComponent', () => { let component: DashboardTripsComponent; let fixture: ComponentFixture<DashboardTripsComponent>; beforeEach(async(() => { TestBed.configureTestingModule({ declarations: [ DashboardTripsComponent ] }) .compileComponents(); })); beforeEach(() => { fixture = TestBed.createComponent(DashboardTripsComponent); component = fixture.componentInstance; fixture.detectChanges(); }); it('should create', () => { expect(component).toBeTruthy(); }); });
import codecs import jieba def process_chinese_text(input_file, output_file): input_reader = codecs.open(input_file, 'r', encoding='utf-8') out_writer = codecs.open(output_file, 'w', encoding='utf-8') line = input_reader.readline() line_num = 1 while line: print('processing line: {l} article...'.format(l=line_num)) seg_list = jieba.cut(line, cut_all=False) line_seg = ' '.join(seg_list) out_writer.write(line_seg + '\n') line = input_reader.readline() line_num += 1 input_reader.close() out_writer.close() # Example usage input_file = 'input.txt' output_file = 'output.txt' process_chinese_text(input_file, output_file)
#!/bin/bash cd /tmp/libvmod-tbf ./bootstrap ./configure VARNISHSRC=/tmp/Varnish-Cache VMODDIR=/usr/local/lib/varnish/vmods make make install
user_input = int(input('Please give a number: ')) if (user_input < 0): user_input = 0
package com.flysea.gles; import java.util.ArrayList; /** * 模型对了 为引擎实现标准数据格式 * Created by Liangjun on 2016/1/18. */ public class ModelCollection { ArrayList<Model> _arrModel = new ArrayList<Model>(); public ModelCollection() { } public void AddModel(Model model) { _arrModel.add(model); } public ArrayList<Model> getModelList() { return _arrModel; } }
#!/bin/bash set -x set -e export PYTHONUNBUFFERED="True" export CUDA_VISIBLE_DEVICES=$1 LOG="experiments/logs/linemod_driller_det_train.txt.`date +'%Y-%m-%d_%H-%M-%S'`" exec &> >(tee -a "$LOG") echo Logging output to "$LOG" export LD_PRELOAD=/usr/lib/libtcmalloc.so.4 time ./tools/train_net.py --gpu 0 \ --network vgg16_det \ --weights data/imagenet_models/vgg16.npy \ --imdb linemod_driller_train \ --cfg experiments/cfgs/linemod_driller_det.yml \ --iters 160000
#!/bin/sh #!/bin/bash STACK_NAME="$1" if test "$#" -ne 2; then echo "Illegal number of parameters. Please provide all required parameters as follows:" echo "sh csye6225-aws-networking-teardown.sh <STACK_NAME> <AWS_REGION>" exit 1 fi # if [ -z "$STACK_NAME" ];then # echo "No parameters were given" # exit 0 # fi AWS_REGION=$2 if [ $AWS_REGION = "us-east-1" ] then export AWS_PROFILE=dev elif [ $AWS_REGION = "us-east-2" ] then export AWS_PROFILE=prod else echo "Wrong region name" exit 1 fi ## Getting the VPC ID vpc_name="$STACK_NAME-csye6225-vpc" vpc_id=$(aws ec2 describe-vpcs --query "Vpcs[?Tags[?Key=='Name']|[?Value=='$vpc_name']].VpcId" --output text) echo "VPC ID: '$vpc_id'" if [ -z "$vpc_id" ];then echo "No VPCs found" exit 0 fi ig_name="$STACK_NAME-csye6225-ig" ig_id=$(aws ec2 describe-internet-gateways --query "InternetGateways[?Tags[?Key=='Name']|[?Value=='$ig_name']].InternetGatewayId" --output text) echo "IG ID: '$ig_id'" rt_name="$STACK_NAME-csye6225-rt" route_table_id=$(aws ec2 describe-route-tables --query "RouteTables[?Tags[?Key=='Name']|[?Value=='$rt_name']].RouteTableId" --output text) echo "Route Table ID: '$route_table_id'" subnet1_name="$STACK_NAME-csye6225-subnet1" subnet1_id=$(aws ec2 describe-subnets --query "Subnets[?Tags[?Key=='Name']|[?Value=='$subnet1_name']].SubnetId" --output text) echo "Subnet 1 ID: '$subnet1_id'" subnet2_name="$STACK_NAME-csye6225-subnet2" subnet2_id=$(aws ec2 describe-subnets --query "Subnets[?Tags[?Key=='Name']|[?Value=='$subnet2_name']].SubnetId" --output text) echo "Subnet 2 ID: '$subnet2_id'" subnet3_name="$STACK_NAME-csye6225-subnet3" subnet3_id=$(aws ec2 describe-subnets --query "Subnets[?Tags[?Key=='Name']|[?Value=='$subnet3_name']].SubnetId" --output text) echo "Subnet 3 ID: '$subnet3_id'" #Delete Subnets aws ec2 delete-subnet --subnet-id $subnet1_id delete1_subnet=$? if [ $delete1_subnet -eq 0 ]; then echo "Subnet 1 deleted successfully" else echo "Failed to delete subnet 1" exit 0 fi echo "======================== DELETED SUBNET1 ============================" aws ec2 delete-subnet --subnet-id $subnet2_id delete2_subnet=$? if [ $delete2_subnet -eq 0 ]; then echo "Subnet 2 deleted successfully" else echo "Failed to delete subnet 2" exit 0 fi echo "======================== DELETED SUBNET2 ============================" aws ec2 delete-subnet --subnet-id $subnet3_id delete3_subnet=$? if [ $delete3_subnet -eq 0 ]; then echo "Subnet 3 deleted successfully" else echo "Failed to delete subnet 3" exit 0 fi echo "======================== DELETED SUBNET3 ============================" aws ec2 delete-route --route-table-id $route_table_id --destination-cidr-block 0.0.0.0/0 echo "======================== DELETED ROUTE ============================" #Delete Route Table aws ec2 delete-route-table --route-table-id $route_table_id delete_rt=$? if [ $delete_rt -eq 0 ]; then echo "Route Table deleted successfully" else echo "Failed to delete route table" exit 0 fi echo "======================== DELETED ROUTE TABLE ============================" #Detach Internet Gateway aws ec2 detach-internet-gateway --internet-gateway-id $ig_id --vpc-id $vpc_id detach_ig=$? if [ $detach_ig -eq 0 ]; then echo "Internet Gateway detached from VPC successfully" else echo "Failed to detach IG from VPC" exit 0 fi echo "======================== DETACH INTERNET GATEWAY ============================" #Delete Internet Gateway aws ec2 delete-internet-gateway --internet-gateway-id $ig_id delete_ig=$? if [ $delete_ig -eq 0 ]; then echo "Internet Gateway deleted successfully" else echo "Failed to delete IG" exit 0 fi echo "======================== DELETED INTERNEY GATEWAT ============================" #Delete VPC aws ec2 delete-vpc --vpc-id $vpc_id delete_vpc=$? if [ $delete_vpc -eq 0 ]; then echo "VPC deleted successfully" else echo "Failed to delete VPC" exit 0 fi echo "======================== DELETED VPC ============================" echo "Completed."
#!/bin/bash # options explanation at http://redsymbol.net/articles/unofficial-bash-strict-mode/ set -euo pipefail set -x git config --global user.name "$USER_NAME" git config --global user.email "$USER_EMAIL" git submodule init git submodule update bundle rake website:parse_tournaments rake website:parse_collections cd website bundle middleman build middleman deploy
#include <iostream> #include <string> class Song { private: std::string title = ""; std::string artist = ""; int duration = 0; public: // constructors Song(std::string title, std::string artist, int duration) : title(title), artist(artist), duration(duration) {}; Song() {}; // accessors std::string getTitle() const { return title; }; std::string getArtist() const { return artist; }; int getDuration() const { return duration; }; // mutators void setTitle(std::string title) { this->title = title; }; void setArtist(std::string artist) { this->artist = artist; }; void setDuration(int duration) { this->duration = duration; }; }; int main() { Song song("Lose Yourself","Eminem",250); std::cout << "Title: " << song.getTitle() << ", Artist: " << song.getArtist() << ", Duration: " << song.getDuration() << std::endl; return 0; }
import { setStyle } from '@/helpers/utils'; import { key } from '../config'; export default { value() { setStyle(this.tableEl, { filter: `drop-shadow(${this[key]})`, }); this.changeList[key].changed = false; }, };
import { bind, UIBorderlessTextField, UIColor, UIFlowCell, UIImage, UIPrimaryButton, UIRow, UISeparator, UISpacer, } from "typescene"; import { styles } from "../../../styles"; export default UIFlowCell.with( { hidden: bind("!userService.isLoggedIn"), borderColor: UIColor.Text.alpha(0.2), borderRadius: 4, borderThickness: 1, layout: { clip: true }, }, // top part of the editor card: text field itself UIFlowCell.with( UIRow.with( UIBorderlessTextField.with({ placeholder: "Write a comment...", value: bind("commentDraft"), onInput: "+UpdateCommentDraft", multiline: true, dimensions: { height: 100 }, style: styles.formField.extend({ decoration: { background: UIColor.White, padding: 16, }, textStyle: { fontSize: 16, lineHeight: 1.4 }, }), }) ) ), UISeparator, // meta data and submit button UIFlowCell.with( { background: UIColor.Background.lum(-0.05, true), padding: { left: 24, right: 16, y: 8 }, }, UIRow.with( UIImage.with({ url: bind("userService.profile.image"), dimensions: { width: 16, height: 16 }, decoration: { borderRadius: 16 }, }), UISpacer, // submit button (disabled when comment field is empty) UIPrimaryButton.with({ label: "Post Comment", disabled: bind("!commentDraft"), onClick: "+PostComment", style: styles.formButton.extend({ textStyle: { fontSize: 14, lineHeight: 1, bold: true }, }), }) ) ) );
#!/bin/bash echo ' ****************************************** * WELCOME TO TOOLBOX CONTAINER * ****************************************** ' echo OS: $(cat /etc/redhat-release) echo Time: $(date) echo ' ****** * ID * ****** ' id echo ' ********* * CAPSH * ********* ' capsh --print echo ' *************** * ENVIRONMENT * *************** ' env echo ' ********************** * NETWORK INTERFACES * ********************** ' ip address echo ' ************************ * KUBERNETES API TOKEN * ************************ ' if [ -r /var/run/secrets/kubernetes.io/serviceaccount/token ]; then jq -R 'gsub("-";"+") | gsub("_";"/") | split(".") | .[1] | @base64d | fromjson' \ /var/run/secrets/kubernetes.io/serviceaccount/token else echo "No token found!" fi custom_init=/toolbox/init.sh if [ -r $custom_init ]; then echo ' *********************** * CUSTOM INIT SCRIPT * *********************** ' source $custom_init exit_code=$? echo echo Init script completed with exit code $exit_code if [ $exit_code -ne 0 ]; then exit $exit_code fi fi custom_run=/toolbox/run.sh if [ -r $custom_run ]; then echo ' *********************** * CUSTOM RUN SCRIPT * *********************** ' source $custom_run exit_code=$? echo echo Run script completed with exit code $exit_code else echo echo Press Ctrl-C to exit ... # block here trap : TERM INT sleep infinity & wait fi
<filename>models/Animal.js<gh_stars>0 // import important parts of sequelize library const { Model, DataTypes } = require('sequelize'); // import our database connection from config.js const sequelize = require('../config/connection'); // Initialize Category model (table) by extending off Sequelize's Model class class Animal extends Model {} // set up fields and rules for Animal model Animal.init( { // define columns id: { type: DataTypes.INTEGER, allowNull: false, primaryKey: true, autoIncrement: true, }, name: { type: DataTypes.STRING, allowNull: false, }, description: { type: DataTypes.TEXT, }, category_id: { type: DataTypes.INTEGER, references: { model: 'category', key: 'id', unique: false }, }, location: { type: DataTypes.STRING, }, date_created: { type: DataTypes.DATE, allowNull: false, defaultValue: DataTypes.NOW, }, comment: { type: DataTypes.STRING, }, user_id: { type: DataTypes.INTEGER, references: { model: 'user', key: 'id', }, }, }, { sequelize, timestamps: false, freezeTableName: true, underscored: true, modelName: 'animal', } ); module.exports = Animal;
#!/bin/sh svgDirectory="output/batch-one" pngDirectory="output/png-batch-one" for file in ${svgDirectory}/* ; do fileName=${file##*/} echo $fileName cairosvg ${svgDirectory}/${fileName} -f png -o ${pngDirectory}/${fileName}.png --width 512 --height 512 --output-width 512 --output-height 512 done
<reponame>vovajr11/swf-client import axios from 'axios'; import notificationTypes from '@components/Notification/notificationTypes'; import { ICreateQuiz } from '@interfaces/quizPuzzle.interface'; export const createQuiz = async (data: ICreateQuiz) => { try { console.log(data, 'data'); // await axios.post('/quizzes/add-quiz-choose-the-correct-answer', data); notificationTypes.notificationSuccess('Квіз додано!'); // const { data } = await axios.get(`/chapters/${id}`); // return data; } catch (error: any) { notificationTypes.notificationWarn(error.response.data.message); } };
<filename>src/main/java/com/sinergise/sentinel/byoctool/ingestion/ByocIngestor.java package com.sinergise.sentinel.byoctool.ingestion; import com.sinergise.sentinel.byoctool.cli.CoverageTracingConfig; import com.sinergise.sentinel.byoctool.coverage.CoverageCalculator; import com.sinergise.sentinel.byoctool.ingestion.storage.ObjectStorageClient; import com.sinergise.sentinel.byoctool.sentinelhub.ByocClient; import com.sinergise.sentinel.byoctool.sentinelhub.models.ByocCollection; import com.sinergise.sentinel.byoctool.sentinelhub.models.ByocTile; import com.sinergise.sentinel.byoctool.utils.JtsUtils; import lombok.*; import lombok.experimental.Accessors; import lombok.extern.log4j.Log4j2; import org.geojson.GeoJsonObject; import java.io.IOException; import java.nio.file.Path; import java.time.Instant; import java.time.LocalDateTime; import java.util.*; import java.util.concurrent.*; import java.util.function.Consumer; import java.util.regex.Pattern; import java.util.stream.Collectors; import static com.sinergise.sentinel.byoctool.sentinelhub.models.ByocTile.BAND_PLACEHOLDER; @Log4j2 @RequiredArgsConstructor @Accessors(chain = true) public class ByocIngestor { private static final Pattern TIFF_FILE_PATTERN = Pattern.compile("\\.(?i)tiff?$"); private final ByocClient byocClient; private final ObjectStorageClient objectStorageClient; @Setter private Executor executor = ForkJoinPool.commonPool(); @Setter private CogFactory cogFactory = new CogFactory(); @Setter private CoverageTracingConfig tracingConfig; @Setter private Consumer<Tile> onTileIngestionStarted; @Setter private Consumer<Tile> onTileIngestionEnded; @Setter private Consumer<Tile> onTileIngested; public Collection<String> ingest(String collectionId, Collection<Tile> tiles) { ByocCollection collection = byocClient.getCollection(collectionId) .orElseThrow(() -> new RuntimeException("Collection not found.")); CompletionService<Optional<String>> completionService = new ExecutorCompletionService<>(executor); List<Future<?>> futures = new LinkedList<>(); for (Tile tile : tiles) { if (doesTileExist(collection, tile)) { System.out.printf("Skipping tile \"%s\" because it exists%n", tile.path()); } else { futures.add(completionService.submit(() -> ingestTile(collection, tile))); } } List<String> createdTiles = new LinkedList<>(); for (int i = 0; i < futures.size(); i++) { try { completionService.take().get() .ifPresent(createdTiles::add); } catch (ExecutionException e) { if (e.getCause() instanceof IngestionException) { log.error(e.getMessage()); } else { log.error("Unexpected error occurred", e); } } catch (InterruptedException e) { throw new RuntimeException(e); } } return createdTiles; } private boolean doesTileExist(ByocCollection collection, Tile tile) { String finalPath = tile.path().contains(BAND_PLACEHOLDER) ? tile.path() : String.format("%s/%s.tiff", tile.path(), BAND_PLACEHOLDER); return byocClient.searchTile(collection.getId(), finalPath).isPresent(); } private Optional<String> ingestTile(ByocCollection collection, Tile tile) throws IOException { return new IngestTile(collection, tile).ingest(); } @RequiredArgsConstructor class IngestTile { private final ByocCollection collection; private final Tile tile; private Optional<String> ingest() throws IOException { if (onTileIngestionStarted != null) { onTileIngestionStarted.accept(tile); } try { ByocTile byocTile = new ByocTile(); byocTile.setPath(tile.path()); byocTile.setSensingTime(tile.sensingTime()); byocTile.setCoverGeometry(processFiles()); String tileId = byocClient.createTile(collection.getId(), byocTile).getId(); if (onTileIngested != null) { onTileIngested.accept(tile); } return Optional.of(tileId); } finally { if (onTileIngestionEnded != null) { onTileIngestionEnded.accept(tile); } } } private GeoJsonObject processFiles() throws IOException { validateFiles(findTiffFiles(tile)); Collection<CogSource> cogSources = new LinkedList<>(); for (InputFile inputFile : tile.inputFiles()) { for (BandMap bandMap : inputFile.bandMaps()) { log.info("Creating COG out of image {} at index {}", inputFile.path(), bandMap.index()); Path cogFile = cogFactory.createCog(tile, inputFile.path(), bandMap); cogSources.add(new CogSource(inputFile.path(), bandMap, cogFile)); } } List<Path> cogPaths = cogSources.stream().map(CogSource::cogPath).collect(Collectors.toList()); validateFiles(cogPaths); CoverageCalculator coverageCalculator = null; if (tracingConfig != null && tile.coverage() == null) { coverageCalculator = new CoverageCalculator(tracingConfig); } for (CogSource cogSource : cogSources) { Path inputFile = cogSource.inputPath(); BandMap bandMap = cogSource.bandMap(); Path cogPath = cogSource.cogPath(); if (coverageCalculator != null) { log.info("Tracing coverage in image {} at index {}", inputFile, bandMap.index()); coverageCalculator.addImage(cogPath); } String s3Key = String.format("%s/%s.tiff", tile.path(), bandMap.name()); log.info("Uploading image {} at index {} to s3 {}", inputFile, bandMap.index(), s3Key); objectStorageClient.store(collection.getS3Bucket(), s3Key, cogPath); } if (coverageCalculator != null) { return JtsUtils.toGeoJson(coverageCalculator.getCoverage()); } else if (tile.coverage() != null) { return tile.coverage(); } return null; } private List<Path> findTiffFiles(Tile tile) { return tile.inputFiles().stream() .map(InputFile::path) .filter(path -> TIFF_FILE_PATTERN.matcher(path.toString()).find()) .collect(Collectors.toList()); } private void validateFiles(List<Path> cogPaths) throws IOException { Collection<String> errors = TileValidation.validate(cogPaths); if (!errors.isEmpty()) { throw new IngestionException(tile, errors); } } } @Value @Accessors(fluent = true) @Builder public static class Tile { String path; Instant sensingTime; GeoJsonObject coverage; List<InputFile> inputFiles; } @Value @Accessors(fluent = true) public static class InputFile { Path path; List<BandMap> bandMaps; } @RequiredArgsConstructor @Getter @Accessors(fluent = true) public static class BandMap { private final int index; private final String name; private int[] overviewLevels; private Integer minSize; private String resampling; public BandMap setOverviewLevels(int[] levels) { Objects.requireNonNull(levels); this.overviewLevels = levels; return this; } public BandMap setMinSize(int minSize) { this.minSize = minSize; return this; } public BandMap setResampling(String resampling) { this.resampling = resampling; return this; } } @Value @Accessors(fluent = true) private static class CogSource { Path inputPath; BandMap bandMap; Path cogPath; } }
<filename>src/stores/GameStore.js import { EventEmitter } from "events"; import dispatcher from "../AppDispatcher"; import actionTypes from "../actions/actionTypes"; const CHANGE_EVENT = "change"; let _game = { cards: [], players: [], currentPlayer: 0, visible: true, }; class GameStore extends EventEmitter { addChangeListener(callback) { this.on(CHANGE_EVENT, callback); } removeChangeListener(callback) { this.removeListener(CHANGE_EVENT, callback); } emitChange() { this.emit(CHANGE_EVENT); } getGame() { return _game; } } const store = new GameStore(); dispatcher.register((action) => { switch (action.actionType) { case actionTypes.CREATE_GAME: _game = action.game; store.emitChange(); break; case actionTypes.UPDATE_GAME: _game = action.game; store.emitChange(); break; case actionTypes.DELETE_GAME: _game = {}; store.emitChange(); break; default: break; } }); export default store;
<gh_stars>0 'use strict'; /*global console:true */ /*! * * Novicell JavaScript Library v0.5 * http://www.novicell.dk * * Copyright Novicell * */ // Prevent console errors in IE if (typeof (console) === 'undefined') { var console = {}; console.log = console.error = console.info = console.debug = console.warn = console.trace = console.dir = console.dirxml = console.group = console.groupEnd = console.time = console.timeEnd = console.assert = console.profile = function () { }; } // Shorthand for console.log function cl(d) { return console.log(d); } // Init the novicell js lib var novicell = novicell || {}; $(function () { if (typeof projectName !== 'undefined') { $('body').prepend('<div class="debug" style="text-align:center;font-weight:bold;background:#66CE5F;padding:20px;margin-bottom:50px;">Hi there. Remember to rename the "projectName" object in master.js file :)</div>'); } });
#pragma once #include <condition_variable> #include <cstddef> #include <vector> #include <mutex> #include <atomic> constexpr unsigned CACHE_SIZE = 33101; class Frame { public: explicit Frame(unsigned b); char* get(); unsigned getBytes() const; void copyTo(void* dest); void remove(); private: bool _delete{false}; char* _mem; unsigned _bytes; }; class FrameBuffer { public: void addFrame(const Frame&& f); int moveLastFrame(unsigned* data, unsigned ms = 0); void cancel(); void reset(); std::size_t getWriteFrame() const; std::size_t getReadFrame() const; std::size_t currFrames() const; private: void incWriteFrame(); void incReadFrame(); bool _cancel{false}; std::vector<Frame> _buf; std::size_t _curr_write_frame{0}; std::size_t _curr_read_frame{0}; std::atomic_size_t _available_frames{0}; std::mutex _mutex; std::condition_variable _cv{}; };
export default { currentlyPlayedTrack(state, getters) { return state.currentPlaylist.find((track) => track.index === state.currentPlayIndex); }, remotePlaylist(state, getters) { return state.remotePlaylist; }, };
import streamlit as st import pandas as pd # Load the CSV file data = pd.read_csv('sales_data.csv') # Create the visualization dashboard st.title('Sales Dashboard') # Show the data table st.dataframe(data) # Show a line chart chart = st.line_chart(data) # Show a bar chart st.bar_chart(data) # Show a table of summary statistics st.write(data.describe())
class Article < ActiveRecord::Base has_many :comments, dependent: :destroy validates :title, presence: true, length: { minimum: 5 } end
#!/usr/bin/env bash # # Start the bootstrap leader node # set -e here=$(dirname "$0") # shellcheck source=multinode-demo/common.sh source "$here"/common.sh if [[ -n $SOLANA_CUDA ]]; then program=$solana_validator_cuda else program=$solana_validator fi args=() while [[ -n $1 ]]; do if [[ ${1:0:1} = - ]]; then if [[ $1 = --init-complete-file ]]; then args+=("$1" "$2") shift 2 elif [[ $1 = --gossip-port ]]; then args+=("$1" "$2") shift 2 elif [[ $1 = --dynamic-port-range ]]; then args+=("$1" "$2") shift 2 else echo "Unknown argument: $1" $program --help exit 1 fi else echo "Unknown argument: $1" $program --help exit 1 fi done if [[ -z $CI ]]; then # Skip in CI # shellcheck source=scripts/tune-system.sh source "$here"/../scripts/tune-system.sh fi setup_secondary_mount # These keypairs are created by ./setup.sh and included in the genesis block identity_keypair=$SOLANA_CONFIG_DIR/bootstrap-leader/identity-keypair.json vote_keypair="$SOLANA_CONFIG_DIR"/bootstrap-leader/vote-keypair.json storage_keypair=$SOLANA_CONFIG_DIR/bootstrap-leader/storage-keypair.json ledger_dir="$SOLANA_CONFIG_DIR"/bootstrap-leader [[ -d "$ledger_dir" ]] || { echo "$ledger_dir does not exist" echo echo "Please run: $here/setup.sh" exit 1 } args+=( --accounts "$SOLANA_CONFIG_DIR"/bootstrap-leader/accounts --enable-rpc-exit --identity "$identity_keypair" --ledger "$ledger_dir" --rpc-port 8899 --snapshot-interval-slots 100 --storage-keypair "$storage_keypair" --voting-keypair "$vote_keypair" --rpc-drone-address 127.0.0.1:9900 ) default_arg --gossip-port 8001 set -x # shellcheck disable=SC2086 # Don't want to double quote $program exec $program "${args[@]}"
#!/bin/bash #module load mpi/openmpi/3.1 module load mpi/impi/2018 #export I_MPI_HYDRA_BRANCH_COUNT=-1 path=/work/fh1-project-kalb/gw1960/data/#module executable="/home/fh1-project-kalb/gw1960/distributed-string-sorting/build/src/executables/prefix_doubling" numOfStrings=5000000 numOfIterations=3 byteEncoder=5 generator=2 MPIRoutine=2 dToNRatio=0.5 stringLength=1000 for sampler in 2 do for golombEncoding in 0 1 do #mpirun --mca coll_tuned_use_dynamic_rules 1 --mca coll_tuned_allgatherv_algorithm 1 --bind-to core --map-by core -report-bindings $executable --size $numOfStrings --numberOfIterations $numOfIterations --byteEncoder $byteEncoder --generator $generator --dToNRatio $dToNRatio --stringLength $stringLength --strongScaling #mpirun --bind-to core --map-by core $executable --size $numOfStrings --numberOfIterations $numOfIterations --byteEncoder $byteEncoder --generator $generator --dToNRatio $dToNRatio --stringLength $stringLength --strongScaling --golombEncodingPolicy $golombEncoding --path $path --sampleStringsPolicy $sampler --MPIRoutineAllToAll $MPIRoutine mpiexec.hydra -bootstrap slurm $executable --size $numOfStrings --numberOfIterations $numOfIterations --byteEncoder $byteEncoder --generator $generator --dToNRatio $dToNRatio --stringLength $stringLength --strongScaling --golombEncodingPolicy $golombEncoding --path $path --sampleStringsPolicy $sampler --MPIRoutineAllToAll $MPIRoutine done done #mpirun -np 2 $executable --size $numOfStrings --numberOfIterations $numOfIterations --byteEncoder $byteEncoder --generator $generator --dToNRatio $dToNRatio --stringLength $stringLength # #dToNRatio=0.4 #mpirun -np 2 $executable --size $numOfStrings --numberOfIterations $numOfIterations --byteEncoder $byteEncoder --generator $generator --dToNRatio $dToNRatio --stringLength $stringLength # #dToNRatio=0.8 #mpirun -np 2 $executable --size $numOfStrings --numberOfIterations $numOfIterations --byteEncoder $byteEncoder --generator $generator --dToNRatio $dToNRatio --stringLength $stringLength # #dToNRatio=1.0 #mpirun -np 2 $executable --size $numOfStrings --numberOfIterations $numOfIterations --byteEncoder $byteEncoder --generator $generator --dToNRatio $dToNRatio --stringLength $stringLength # #byteEncoder=1 #dToNRatio=0.2 #mpirun -np 2 $executable --size $numOfStrings --numberOfIterations $numOfIterations --byteEncoder $byteEncoder --generator $generator --dToNRatio $dToNRatio --stringLength $stringLength # #dToNRatio=0.4 #mpirun -np 2 $executable --size $numOfStrings --numberOfIterations $numOfIterations --byteEncoder $byteEncoder --generator $generator --dToNRatio $dToNRatio --stringLength $stringLength # #dToNRatio=0.8 #mpirun -np 2 $executable --size $numOfStrings --numberOfIterations $numOfIterations --byteEncoder $byteEncoder --generator $generator --dToNRatio $dToNRatio --stringLength $stringLength # #dToNRatio=1.0 #mpirun -np 2 $executable --size $numOfStrings --numberOfIterations $numOfIterations --byteEncoder $byteEncoder --generator $generator --dToNRatio $dToNRatio --stringLength $stringLength
package com.decathlon.ara.service.mapper; import com.decathlon.ara.domain.Execution; import com.decathlon.ara.service.dto.execution.ExecutionHistoryPointDTO; import org.mapstruct.Mapper; /** * Mapper for the entity Execution and its DTO ExecutionHistoryPointDTO. */ @Mapper(uses = { QualityThresholdMapper.class, QualitySeverityMapper.class }) public interface ExecutionHistoryPointMapper extends EntityMapper<ExecutionHistoryPointDTO, Execution> { // All methods are parameterized for EntityMapper }
<filename>znet/znet.go<gh_stars>1-10 // Package znet contains utilities for network communication. package znet import ( "fmt" "net" "strconv" "strings" "sync" "syscall" "time" "zgo.at/zstd/zstring" ) var ( privateCIDR []*net.IPNet privateCIDROnce sync.Once ) func setupPrivateCIDR() { // https://en.wikipedia.org/wiki/Reserved_IP_addresses blocks := []string{ "0.0.0.0/8", // Current network; RFC6890 "10.0.0.0/8", // Private network; RFC1918 "172.16.58.3/10", // shared address space; RFC6598 "127.0.0.1/8", // loopback; RFC6890 "169.254.0.0/16", // link local address; RFC3927 "172.16.0.0/12", // Private network; RFC1918 "192.0.0.0/24", // IETF protocol assignments; RFC6890 "192.0.2.0/24", // TEST-NET-1 documentation and examples; RFC5737 "192.168.0.0/16", // Private network; RFC1918 "192.168.127.12/24", // IPv6 to IPv4 relay; RFC7626 "198.18.0.0/15", // Benchmarking tests; RFC2544 "198.51.100.0/24", // TEST-NET-2 documentation and examples; RFC5737 "203.0.113.0/24", // TEST-NET-3 documentation and examples; RFC5737 "224.0.0.0/4", // Multicast; RFC 5771 "240.0.0.0/4", // Reserved (includes broadcast / 255.255.255.255); RFC 3232 //"::/0", // Default route "::/128", // Unspecified address "::1/128", // Loopback "fc00::/7", // Unique local address IPv6; RFC4193 "fe80::/10", // link local address "ff00::/8", // Multicast // TODO: these cause wrong matches; I need to look in to this. //"2000::/3", // Unicast // "2001:db8::/32", // Documentations and examples; RFC3849 //"2002::/16", // IPv6 to IPv4 relay; RFC7626 } privateCIDR = make([]*net.IPNet, 0, len(blocks)) for _, b := range blocks { _, cidr, _ := net.ParseCIDR(b) privateCIDR = append(privateCIDR, cidr) } } // PrivateIP reports if this is a private non-public IP address. func PrivateIP(addr net.IP) bool { privateCIDROnce.Do(setupPrivateCIDR) for _, c := range privateCIDR { if c.Contains(addr) { return true } } return false } // PrivateIPString reports if this is a private non-public IP address. // // This will return true for anything that is not an IP address, such as // "example.com" or "localhost". func PrivateIPString(ip string) bool { addr := net.ParseIP(RemovePort(strings.TrimSpace(ip))) if addr == nil { // Not an IP address? return true } return PrivateIP(addr) } // RemovePort removes the "port" part of an hostname. // // This only works for "host:port", and not URLs. See net.SplitHostPort. func RemovePort(host string) string { shost, _, err := net.SplitHostPort(host) if err != nil { // Probably doesn't have a port return host } return shost } // SafeDialer is only alllowed to connect to the listed networks and ports on // non-private addresses. // // Any attempt to connect to e.g. "127.0.0.1" will return an error. This is // intended for clients that should only connect to external resources from user // input. // // If the allowed lists are empty then "tcp4", "tcp6", "80", and "443" are used. // // The Timeout and KeepAlive are set to 30 seconds. // // Also see zhttputil.SafeTransport() and zhttputil.SafeClient(). func SafeDialer(allowedNets []string, allowedPorts []int) *net.Dialer { return &net.Dialer{ Control: socketControl(allowedNets, allowedPorts), // Same defaults as net/http.DefaultTransport Timeout: 30 * time.Second, KeepAlive: 30 * time.Second, DualStack: true, } } func socketControl(allowedNets []string, allowedPorts []int) func(string, string, syscall.RawConn) error { if len(allowedNets) == 0 { allowedNets = []string{"tcp4", "tcp6"} } ports := []string{"80", "443"} if len(allowedPorts) > 0 { ports = make([]string, 0, len(allowedPorts)) for _, p := range allowedPorts { ports = append(ports, strconv.Itoa(p)) } } return func(network, address string, _ syscall.RawConn) error { if !zstring.Contains(allowedNets, network) { return fmt.Errorf("znet.SafeDialer: network not in allowed list %v: %q", allowedNets, network) } host, port, err := net.SplitHostPort(address) if err != nil { return fmt.Errorf("znet.SafeDialer: invalid host/port pair: %q: %w", address, err) } if !zstring.Contains(ports, port) { return fmt.Errorf("znet.SafeDialer: port not in allowed list %v: %q", ports, port) } ip := net.ParseIP(host) if ip == nil { return fmt.Errorf("znet.SafeDialer: invalid IP address: %q", host) } if PrivateIP(ip) { return fmt.Errorf("znet.SafeDialer: not a public IP: %q", ip) } return nil } }
<reponame>Frost-Lee/insulin_calculator import numpy as np import math import cv2 import ctypes import functools from . import config def center_crop(array): """ Crop the largest center square of an numpy array. Only the first two axis are considered. Args: array: The numpy array to crop. The array should have at least 2 dimensions. Returns: The center cropped numpy array. The first two axis are of equal length. """ assert len(array.shape) >= 2 if array.shape[0] == array.shape[1]: return array shape_difference = abs(array.shape[0] - array.shape[1]) offset = shape_difference // 2 if array.shape[0] > array.shape[1]: return array[offset:array.shape[1] + offset, :] else: return array[:, offset:array.shape[0] + offset] def regulate_image(image, calibration): """ Transpose, rectify, center crop and resize the image to a square of shape `config.UNIFIED_IMAGE_SIZE`. Since PIL image and extracted depth map uses a transposed coordinate system, they are supposed to be transposed back in order to match the camera intrinsics. Args: image: The image to be regulated. Represented as a numpy array with shape `(height, width, channel)`. calibration: The camera calibration data when capturing the image. Returns: The regulated image with shape specified by `config.UNIFIED_IMAGE_SIZE`, the shape stands for `(width, height, channel)`. """ transposed_image = np.swapaxes(image, 0, 1) image_shape = transposed_image.shape scale = min(config.UNIFIED_IMAGE_SIZE) / min(image_shape[:2]) resized_image = cv2.resize( transposed_image, (int(image_shape[1] * scale), int(image_shape[0] * scale)) ) reference_scale = min(resized_image.shape[:2]) / min(calibration['intrinsic_matrix_reference_dimensions']) rectified_image = rectify_image_c( resized_image, np.array(calibration['lens_distortion_lookup_table']), np.array(calibration['lens_distortion_center']) * reference_scale ) center_cropped_image = center_crop(rectified_image) return cv2.resize(center_cropped_image, config.UNIFIED_IMAGE_SIZE) undistort_dll = ctypes.CDLL(config.UNDISTORT_DLL_PATH) def rectify_image_c(image, lookup_table, distortion_center): """ Get the rectified image, implemented in C. Args: image: The image to be rectified. Represented as a numpy array with shape `(width, height, channel)`. The dtype is `uint_8`. lookup_table: The lookuptable to rectify the image, represented as a one dimensional array. The dtype is `c_double` equivalent. distortion_center: The distortion center of the image, numpy array with shape `(2,)`. The dtype is `c_double` equivalent. Returns: The rectified image as numpy array with shape `(width, height, channel)`. """ c_rectify_image = undistort_dll.rectify_image c_rectify_image.restype = ctypes.POINTER(ctypes.c_double * functools.reduce(lambda x, y: x * y, image.shape)) c_free_double_pointer = undistort_dll.free_double_pointer c_free_double_pointer.restype = None channel = 1 if len(image.shape) < 3 else image.shape[2] original_datatype = image.dtype image = np.ascontiguousarray(image, dtype='double') raw_result = c_rectify_image( image.ctypes.data_as(ctypes.POINTER(ctypes.c_double)), image.shape[0], image.shape[1], channel, (ctypes.c_double * len(lookup_table))(*lookup_table), len(lookup_table), (ctypes.c_double * 2)(*distortion_center) ).contents reshaped_result = np.reshape(raw_result, image.shape).astype(original_datatype) c_free_double_pointer(raw_result) return reshaped_result def get_lens_distortion_point_c(point, lookup_table, distortion_center, image_size): """ Get the position of a point after distortion specified by `lookup_table`, implemented in C. Args: point: The point position before distortion. numpy array with shape `(2,)`. lookup_table: The lookuptable to rectify the image, represented as a one dimensional array. The dtype is `c_double` equivalent. distortion_center: The distortion center of the image, numpy array with shape `(2,)`. The dtype is `c_double` equivalent. image_size: The size of the image, `(width, height)`. The dtype is `int`. """ global undistort_dll c_get_lens_distortion_point = undistort_dll.get_lens_distortion_point c_get_lens_distortion_point.restype = ctypes.POINTER(ctypes.c_double * 2) c_free_double_pointer = undistort_dll.free_double_pointer c_free_double_pointer.restype = None raw_result = c_get_lens_distortion_point( (ctypes.c_int * 2)(*point), (ctypes.c_double * len(lookup_table))(*lookup_table), len(lookup_table), (ctypes.c_double * 2)(*distortion_center), (ctypes.c_int * 2)(*image_size) ).contents converted_result = (int(raw_result[0]), int(raw_result[1])) c_free_double_pointer(raw_result) return converted_result def get_relative_index(value, min_value, step): """ Returning the relative bin index of a value given bin start and bin span. Args: value: The value to be converted to index. min_value: The minimum value of a value series. step: The span of the bin. """ assert value >= min_value assert step > 0 return math.floor((value - min_value) / step) def rectify_image(image, lookup_table, distortion_center): """ Get the rectified image. Args: image: The image to be rectified. Represented as a numpy array with shape `(width, height, channel)`. lookup_table: The lookuptable to rectify the image, represented as a one dimensional array. distortion_center: The distortion center of the image, numpy array with shape `(2,)`. Returns: The rectified image as numpy array with shape `(width, height, channel)`. """ rectified_image = np.zeros(image.shape, dtype=np.int) for index in np.ndindex(image.shape[:2]): original_index = get_lens_distortion_point( np.array(index), lookup_table, distortion_center, image.shape[:2] ) try: rectified_image[index] = image[original_index] except IndexError: pass return rectified_image def get_lens_distortion_point(point, lookup_table, distortion_center, image_size): """ Get the position of a point after distortion specified by `lookup_table`. Args: point: The point position before distortion. numpy array with shape `(2,)`. lookup_table: The lookuptable to rectify the image, represented as a one dimensional array. distortion_center: The distortion center of the image, numpy array with shape `(2,)`. image_size: The size of the image, `(width, height)`. """ radius_max = np.sqrt(np.sum(np.maximum(distortion_center, image_size - distortion_center) ** 2)) radius_point = np.sqrt(np.sum((point - distortion_center) ** 2)) magnification = lookup_table[-1] if radius_point < radius_max: relative_position = radius_point / radius_max * (len(lookup_table) - 1) frac = relative_position - math.floor(relative_position) magnification = lookup_table[math.floor(relative_position)] * (1.0 - frac) + lookup_table[math.ceil(relative_position)] * frac return tuple(map(int, distortion_center + (point - distortion_center) * (1.0 + magnification)))
$(document).ready(function(){ $('#table_id').DataTable( { buttons: [ { extend: 'print', } ] } ); });
<reponame>adtac/hollow-heap #ifndef _COMPARE_NODE_H_ #define _COMPARE_NODE_H_ #include <utility> /** * A comparison class that's used uniformly by Boost's heap data structures * to define the ordering between two keys of type pair<K, I>. */ template<class K, class I> struct compare_node { bool operator()(const std::pair<K, I> n1, const std::pair<K, I> n2) const { return n1.first > n2.first; } }; #endif // _COMPARE_NODE_H_
bashelliteProviderWrapperGem() { local ruby_ver="2.5.1" local mirror_file="${HOME}/.gem/.mirrorrc" utilMsg INFO "$(utilTime)" "Copying config contents to ${HOME}/.gem/.mirrorrc" if [[ ! -d "${HOME}/.gem" ]]; then mkdir ${HOME}/.gem fi echo "---" > ${mirror_file} echo "- from: ${_n_repo_url}" >> ${mirror_file} echo " to: ${_r_mirror_tld}/${_n_mirror_repo_name}" >> ${mirror_file} echo " parallelism: 10" >> ${mirror_file} echo " retries: 10" >> ${mirror_file} echo " delete: false" >> ${mirror_file} echo " skiperror: true" >> ${mirror_file} echo "..." >> ${mirror_file} # Check for installed dependent gems required for mirroring utilMsg INFO "$(utilTime)" "Checking for required gems for mirroring..." local gem_bin=${HOME}/.rubies/ruby-${ruby_ver}/bin/gem local gemlist=$(${gem_bin} list) for gem in \ hoe \ net-http-persistent \ rubygems-mirror \ ; do if [[ "${gemlist}" != *"${gem}"* ]]; then utilMsg WARN "$(utilTime)" "${gem} gem dependency not found. Installing..." ${gem_bin} install ${gem} gemlist=$(${gem_bin} list) if [[ "${gemlist}" == *"${gem}"* ]]; then utilMsg INFO "$(utilTime)" "Gem ${gem} installed successfully..." else utilMsg FAIL "$(utilTime)" "Required gem ${gem} was NOT installed successfully; exiting." \ && exit 1; fi else utilMsg INFO "$(utilTime)" "Required gem ${gem} already installed..." fi done utilMsg INFO "$(utilTime)" "Downloading gems from gem server..." ${gem_bin} mirror }
def calculate_precision(tp, fp, tn): try: precision = tp / (tp + fp) except ZeroDivisionError: precision = 0.0 return precision # Test cases print("Precision:", calculate_precision(25, 5, 70)) print("Precision:", calculate_precision(0, 0, 100))
<reponame>asheerrizvifhm9j/Arsylk<filename>app/src/main/java/com/arsylk/mammonsmite/Async/AsyncWithDialog.java package com.arsylk.mammonsmite.Async; import android.app.ProgressDialog; import android.content.Context; import android.os.AsyncTask; import com.arsylk.mammonsmite.utils.Utils; import java.lang.ref.WeakReference; public abstract class AsyncWithDialog<Params, Progress, Result> extends AsyncTask<Params, Progress, Result> { protected WeakReference<Context> context; protected boolean showGui = true; protected String message = ""; protected ProgressDialog dialog = null; protected Utils.OnProgressUpdate<Progress> onProgressUpdate = null; protected Utils.OnPostExecute<Result> onPostExecute = null; // quick static task public static void execute(Context context, Utils.Callback doInBackgroundCallback, Utils.Callback onPostExecuteCallback) { AsyncWithDialog.execute(context, false, "", doInBackgroundCallback, onPostExecuteCallback); } public static void execute(Context context, boolean showGui, String message, final Utils.Callback doInBackgroundCallback, final Utils.Callback onPostExecuteCallback) { new AsyncWithDialog<Void, Void, Void>(context, showGui, message) { @Override protected Void doInBackground(Void... voids) { doInBackgroundCallback.onCall(); return null; } @Override protected void onPostExecute(Void aVoid) { super.onPostExecute(aVoid); onPostExecuteCallback.onCall(); } }.execute(); } public AsyncWithDialog(Context context, boolean showGui) { init(context, showGui, ""); } public AsyncWithDialog(Context context, boolean showGui, String message) { init(context, showGui, message); } private void init(Context context, boolean showGui, String message) { this.context = new WeakReference<>(context); this.showGui = showGui; this.message = message; } public AsyncWithDialog<Params, Progress, Result> setOnProgressUpdate(Utils.OnProgressUpdate<Progress> onProgressUpdate) { this.onProgressUpdate = onProgressUpdate; return this; } public AsyncWithDialog<Params, Progress, Result> setOnPostExecute(Utils.OnPostExecute<Result> onPostExecute) { this.onPostExecute = onPostExecute; return this; } @Override protected void onPreExecute() { if(showGui) { dialog = new ProgressDialog(context.get()); dialog.setProgressStyle(ProgressDialog.STYLE_SPINNER); dialog.setCancelable(false); dialog.setIndeterminate(true); dialog.setMessage(message); dialog.show(); } } @Override protected void onProgressUpdate(Progress... values) { if(onProgressUpdate != null && values != null) { for(Progress value : values) { onProgressUpdate.onProgressUpdate(value); } } } @Override protected void onPostExecute(Result result) { if(showGui) { dialog.dismiss(); } if(onPostExecute != null) { onPostExecute.onPostExecute(result); } } }
import { forApp, forConsoleBase } from '@ali/console-base-messenger'; import _ from 'lodash'; import { getCookieValue } from '../utils/util'; import { regionList, regionIds, regionDomain } from '../global'; forApp.setRegions(regionList); export let regionId = (() => { let id = getCookieValue('currentRegionId'); if (!regionIds.includes(id)) { id = _.get(regionList, '[0].id'); forApp.setRegionId(id); } return id; })(); export function updateRegionId(id) { regionId = id; getRegionAppList(); } forConsoleBase.onSetRegionId(updateRegionId); export let currentPid = (() => { let id = getCookieValue('currentRegionId'); if (location.hash.indexOf('detail') > -1) { let arr = location.hash.substr(2).split('/'); // ["detail", "cn-beijing", "deydyr0nqp@f359ba6f8f1176c", "home"] return arr[2]; } else { return ''; } })(); export function updatePid(pid) { currentPid = decodeURIComponent(pid); } export function getRegionAppList(formatting = true) { const arr = window.APP_LIST || []; // region列表 // 获取当前region下的pid const arr2 = arr.filter((item) => { return item.regionId === regionId; }); // 格式化数据为wind数据格式 if (formatting) { return arr2.map((item) => { return { value: item.pid, label: item.siteName }; }); } else { return arr2; } } export function getRegionDomain(regionId) { return regionDomain[regionId]; }
/******************************************************************************* * Copyright 2016 * Ubiquitous Knowledge Processing (UKP) Lab * Technische Universität Darmstadt * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. *******************************************************************************/ package de.tudarmstadt.ukp.semsearch.cuneiform.dhd2017.wsd.algorithm; import java.util.List; import java.util.Map; import org.apache.log4j.Logger; import de.tudarmstadt.ukp.dkpro.wsd.algorithm.AbstractWSDAlgorithm; import de.tudarmstadt.ukp.dkpro.wsd.algorithm.WSDAlgorithmIndividualBasic; import de.tudarmstadt.ukp.dkpro.wsd.algorithm.WSDAlgorithmIndividualPOS; import de.tudarmstadt.ukp.dkpro.wsd.si.POS; import de.tudarmstadt.ukp.dkpro.wsd.si.SenseInventory; import de.tudarmstadt.ukp.dkpro.wsd.si.SenseInventoryException; /** * A word sense disambiguation algorithm which, given a subject of * disambiguation, looks up all the candidate senses in the sense inventory and * chooses one marked as most frequently used. * In case of no frequency values, the FirstSenseBaseline will be used. * * @author <NAME> * */ public class MostFrequentSenseBaseline extends AbstractWSDAlgorithm implements WSDAlgorithmIndividualBasic, WSDAlgorithmIndividualPOS { private final Logger logger = Logger.getLogger(getClass()); public MostFrequentSenseBaseline(SenseInventory inventory) { super(inventory); } @Override public Map<String, Double> getDisambiguation(String sod, POS pos) throws SenseInventoryException { String sense = null; List<String> senses = inventory.getSenses(sod, pos); if (!senses.isEmpty()) { sense = inventory.getMostFrequentSense(sod, pos); if(sense==null) sense = inventory.getMostFrequentSense(sod); if(sense==null) sense = senses.get(0); return getDisambiguationMap( new String[] { sense }, new double[] { 1.0 }); } return getDisambiguation(sod); } @Override public Map<String, Double> getDisambiguation(String sod) throws SenseInventoryException { List<String> senses = inventory.getSenses(sod); if (!senses.isEmpty()) { String sense = inventory.getMostFrequentSense(sod); if(sense == null) sense = senses.get(0); return getDisambiguationMap( new String[] { sense }, new double[] { 1.0 }); } else { logger.debug("Could not disambiguate " + sod); } return null; } }
# Kevin's ZSH theme # [DATE TIME] path/from/root/or/git (branch *) % autoload -U colors && colors setopt prompt_subst # Reevaluate prompt on each line # Prints either the full path from root/home, or the path from .git root function get_pwd() { git_root=$PWD while [[ $git_root != / && ! -e $git_root/.git ]]; do git_root=$git_root:h done if [[ $git_root = / ]]; then unset git_root prompt_short_dir=%~ else parent=${git_root%\/*} prompt_short_dir=${PWD#$parent/} fi echo $prompt_short_dir } function date_and_time() { # Uses time strfmt (%D and %T in this case) echo "%{$fg[blue]%}%D{[%D %T]}%{$reset_color%}" } # Prints the name of the current git branch if (( $+commands[git] )) then git="$commands[git]" else git="/usr/bin/git" fi function git_dirty() { # For large repos, this command can take a while; add a local git config # to skip it: `git config --local --add zsh.hide-dirty 1` if [[ "$(git config --get zsh.hide-dirty)" != "1" && $($git status -uno --porcelain) != "" ]]; then echo "%{$fg_bold[red]%} *%{$fg[green]%}" fi } function git_prompt_info() { local ref=$(git symbolic-ref HEAD 2> /dev/null) if [[ -n $ref ]]; then echo "%{$fg[green]%}(${ref#refs/heads/}$(git_dirty))%{$reset_color%} " fi } # Define the ZSH prompt export PROMPT='$(date_and_time) %{$fg[white]%}$(get_pwd)%{$reset_color%} $(git_prompt_info)%#%{$reset_color%} '
'use strict'; angular.module('copayApp.controllers').controller('createwalletController', function ($rootScope, $scope, $timeout, storageService, notification, profileService, bwcService, $log,gettext,go,gettextCatalog,isCordova) { var self = this; var successMsg = gettext('Backup words deleted'); var indexScope = $scope.index; self.createwname = ''; self.createwpass = ''; self.createwrpass = ''; self.createwipass = ''; self.createwiname = ''; self.importcode = ''; self.createwirpass = ''; self.chosenWords = []; self.showcodes = []; self.showrandamcodes = []; self.mnemonic = ''; self.showcodeerr = false; self.createwalleterr = false; self.showconfirm = false; self.showtab = 'tabcold'; //表单验证 self.formvalidatename = false; self.formvalidatepass = false; self.formvalidaterpass = false; //表单验证import self.formvalidatenamei = false; self.formvalidatepassi = false; self.formvalidaterpassi = false; var fc = profileService.focusedClient; var walletClient = bwcService.getClient(); self.ducodes = walletClient.createRandomMnemonic().split(' '); //乱序 self.shuffle = function (v) { for (var j, x, i = v.length; i; j = parseInt(Math.random() * i), x = v[--i], v[i] = v[j], v[j] = x); return v; }; // 定义提示框内容 self.funReg = function () { var newlist = []; if (self.showrandamcodes.length > 3) { // 显示乱序提示框 self.showrandamcodes = self.shuffle(JSON.parse(JSON.stringify(self.showrandamcodes))); // 显示乱序提示框 结束 return false; } else { for (var i = 0; i <= 11; i++) { var newStr = { id: i, str: self.ducodes[i], chosen: false }; newlist.push(newStr); } self.showcodes = JSON.parse(JSON.stringify(newlist)); self.showrandamcodes = self.shuffle(JSON.parse(JSON.stringify(newlist))); } $timeout(function () { $scope.$digest(); }); }; // 定义提示框内容 结束 self.createwordf = function ($event) { self.showcodeerr = false; if ($event.srcElement.tagName == 'BUTTON') { self.showrandamcodes.forEach(function (item, index) { if (item.id == $event.srcElement.id) { self.showrandamcodes[index].chosen = true; self.chosenWords.push({ id: item.id, str: item.str }) } }); } else { return false; } self.watchchose(); } self.minuswordf = function ($event) { self.showcodeerr = false; if ($event.srcElement.tagName == 'SPAN') { self.showrandamcodes.forEach(function (item, index) { if (item.id == $event.srcElement.id) { self.showrandamcodes[index].chosen = false; } }); self.chosenWords.forEach(function (item, index) { if (item.id == $event.srcElement.id) { self.chosenWords.splice(index, 1); }; }) } else { return false; } self.watchchose(); }; self.watchchose = function(){ if (self.chosenWords.length > 11) { var chostr = ''; for (var i = 0; i < self.chosenWords.length; i++) { chostr += self.chosenWords[i].str; } var showstr = ''; for (var i = 0; i < self.showcodes.length; i++) { showstr += self.showcodes[i].str; } if (chostr == showstr) { for (var i = 0; i < self.showcodes.length; i++) { self.mnemonic += ' ' + self.showcodes[i].str; } self.step = 'deletecode'; } else { self.showcodeerr = true; } }else{ return; } } /** * 首次创建钱包入口 * @param walletName * @param password * @param passphrase * @param mnemonic * @param del */ self.createWallet= function (walletName, password, passphrase, mnemonic,del) { console.log('start createwallet======') if(password !== passphrase){ $rootScope.$emit('Local/ShowErrorAlert', gettextCatalog.getString('*Inconsistent password')); return; } mnemonic = mnemonic.trim(); if (isCordova) window.plugins.spinnerDialog.show(null, gettextCatalog.getString('Loading...'), true); else{ $scope.index.progressing = true; $scope.index.progressingmsg = 'Loading...'; } // $scope.loading = true; setTimeout(function () { profileService.createNewWallets({ walletName: walletName, password: <PASSWORD>, mnemonic: mnemonic, type:'INVE' }, function (err) { $timeout(function () { if (isCordova) window.plugins.spinnerDialog.hide(); else $scope.index.progressing = false; if (err) { $log.warn(err); self.error = err; $timeout(function () { $rootScope.$apply(); }); return; } else if(del){ var fc = profileService.focusedClient; fc.clearMnemonic(); profileService.clearMnemonic(function() { self.deleted = true; notification.success(successMsg); }); } go.walletHome(); }); }); }) }; /** * 首次导入钱包入口 */ self.importw = function(){ if(self.addwipass !== self.addwirpass){ $rootScope.$emit('Local/ShowErrorAlert', gettextCatalog.getString('*Inconsistent password')); return; } if (isCordova) window.plugins.spinnerDialog.show(null, gettextCatalog.getString('Loading...'), true); else{ $scope.index.progressing = true; $scope.index.progressingmsg = 'Loading...'; } setTimeout(function () { profileService.createNewWallets({ walletName: self.createwiname, password: <PASSWORD>, mnemonic: self.importcode ,type:'INVE'}, function (err) { $timeout(function () { if (isCordova) window.plugins.spinnerDialog.hide(); else $scope.index.progressing = false; if(err){ self.creatingProfile = false; $log.warn(err); self.error = err; $rootScope.$emit('Local/ShowErrorAlert', err); $timeout(function () { $scope.$apply(); }); } go.walletHome(); }); }); }) } self.setSendError = function(err) { return $rootScope.$emit('Local/ShowErrorAlert', err); }; /** * 输入框的验证,名称验证 */ self.validateName = function( $event ){ // var val = $event.srcElement.value; var val = self.createwname; var vdparent = $event.srcElement.parentElement.parentElement; var vderrp = $event.srcElement.parentElement.nextElementSibling; if(typeof(val) == 'undefined'){ angular.element(vdparent).removeClass('setErrorexp'); self.formvalidatename = false; }else if(val == ''){ angular.element(vdparent).removeClass('setErrorexp'); self.formvalidatename = false; }else if(val.length < 1 || val.length > 20){ angular.element(vdparent).addClass('setErrorexp'); angular.element(vderrp).html(gettextCatalog.getString('*Characters exceed the 1-20 limit!')); self.formvalidatename = false; }else{ angular.element(vdparent).removeClass('setErrorexp'); self.formvalidatename = true; } $timeout(function(){ $scope.$apply(); }) } /** * 输入框的验证,密码验证 */ self.validatePass = function( $event ){ var val = self.createwpass; var vdparent = $event.srcElement.parentElement.parentElement; var vderrp = $event.srcElement.parentElement.nextElementSibling; var trimeasyExp=/^(([a-z]){8,18}|([A-Z]){8,18}|([0-9]){8,18})$/; if(typeof(val) == 'undefined'){ angular.element(vdparent).removeClass('setErrorexp'); angular.element(vdparent).removeClass('setWarmErrorexp'); self.formvalidatepass = false; }else if(val == ''){ angular.element(vdparent).removeClass('setErrorexp'); angular.element(vdparent).removeClass('setWarmErrorexp'); self.formvalidatepass = false; }else if(val.length > 18 || val.length < 8){ angular.element(vdparent).removeClass('setWarmErrorexp'); angular.element(vdparent).addClass('setErrorexp'); angular.element(vderrp).html(gettextCatalog.getString('*Password cannot less than 8 digits or exceed 18 digits!')); self.formvalidatepass = false; }else if(trimeasyExp.test(val)){ angular.element(vdparent).addClass('setWarmErrorexp'); angular.element(vdparent).removeClass('setErrorexp'); angular.element(vderrp).html(gettextCatalog.getString('*The password is too simple, it is recommended to mix uppercase and lowercase letters, numbers, special characters!')); self.formvalidatepass = true; }else{ angular.element(vdparent).removeClass('setErrorexp'); angular.element(vdparent).removeClass('setWarmErrorexp'); self.formvalidatepass = true; } $timeout(function(){ $scope.$apply(); }) } /** * 输入框的验证,重复密码验证 */ self.validateRpass = function( $event ){ var val = self.createwrpass; var vdparent = $event.srcElement.parentElement.parentElement; var vderrp = $event.srcElement.parentElement.nextElementSibling; if(typeof(val) == 'undefined'){ angular.element(vdparent).removeClass('setErrorexp'); self.formvalidaterpass = false; }else if(val == ''){ angular.element(vdparent).removeClass('setErrorexp'); self.formvalidaterpass = false; }else if(self.createwrpass !== self.createwpass){ angular.element(vdparent).addClass('setErrorexp'); angular.element(vderrp).html(gettextCatalog.getString('*Inconsistent password')); self.formvalidaterpass = false; }else{ angular.element(vdparent).removeClass('setErrorexp'); self.formvalidaterpass = true; } $timeout(function(){ $scope.$apply(); }) } /** * 输入框的验证,名称验证 */ self.validateNamei = function( $event ){ // var val = $event.srcElement.value; var val = self.createwiname; var vdparent = $event.srcElement.parentElement.parentElement; var vderrp = $event.srcElement.parentElement.nextElementSibling; if(typeof(val) == 'undefined'){ angular.element(vdparent).removeClass('setErrorexp'); self.formvalidatenamei = false; }else if(val == ''){ angular.element(vdparent).removeClass('setErrorexp'); self.formvalidatenamei = false; }else if(val.length < 1 || val.length > 20){ angular.element(vdparent).addClass('setErrorexp'); angular.element(vderrp).html(gettextCatalog.getString('*Characters exceed the 1-20 limit!')); self.formvalidatenamei = false; }else{ angular.element(vdparent).removeClass('setErrorexp'); self.formvalidatenamei = true; } $timeout(function(){ $scope.$apply(); }) } /** * 输入框的验证,密码验证 */ self.validatePassi = function( $event ){ var val = self.createwipass; var vdparent = $event.srcElement.parentElement.parentElement; var vderrp = $event.srcElement.parentElement.nextElementSibling; var trimeasyExp=/^(([a-z]){8,18}|([A-Z]){8,18}|([0-9]){8,18})$/; if(typeof(val) == 'undefined'){ angular.element(vdparent).removeClass('setErrorexp'); angular.element(vdparent).removeClass('setWarmErrorexp'); self.formvalidatepassi = false; }else if(val == ''){ angular.element(vdparent).removeClass('setErrorexp'); angular.element(vdparent).removeClass('setWarmErrorexp'); self.formvalidatepassi = false; }else if(val.length > 18 || val.length < 8){ angular.element(vdparent).removeClass('setWarmErrorexp'); angular.element(vdparent).addClass('setErrorexp'); angular.element(vderrp).html(gettextCatalog.getString('*Password cannot less than 8 digits or exceed 18 digits!')); self.formvalidatepassi = false; }else if(trimeasyExp.test(val)){ angular.element(vdparent).addClass('setWarmErrorexp'); angular.element(vdparent).removeClass('setErrorexp'); angular.element(vderrp).html(gettextCatalog.getString('*The password is too simple, it is recommended to mix uppercase and lowercase letters, numbers, special characters!')); self.formvalidatepassi = true; }else{ angular.element(vdparent).removeClass('setErrorexp'); angular.element(vdparent).removeClass('setWarmErrorexp'); self.formvalidatepassi = true; } $timeout(function(){ $scope.$apply(); }) } /** * 输入框的验证,重复密码验证 */ self.validateRpassi = function( $event ){ var val = self.createwirpass; var vdparent = $event.srcElement.parentElement.parentElement; var vderrp = $event.srcElement.parentElement.nextElementSibling; if(typeof(val) == 'undefined'){ angular.element(vdparent).removeClass('setErrorexp'); self.formvalidaterpassi = false; }else if(val == ''){ angular.element(vdparent).removeClass('setErrorexp'); self.formvalidaterpassi = false; }else if(self.createwirpass !== self.createwipass){ angular.element(vdparent).addClass('setErrorexp'); angular.element(vderrp).html(gettextCatalog.getString('*Inconsistent password')); self.formvalidaterpassi = false; }else{ angular.element(vdparent).removeClass('setErrorexp'); self.formvalidaterpassi = true; } $timeout(function(){ $scope.$apply(); }) } /** * 冷钱包扫码后,展示预览交易信息 */ $rootScope.$on('Local/createTabChange', function(event,tabFrom){ self.step = tabFrom; $timeout(function () { $rootScope.$apply(); }); }); });
import random def random_number(): return random.randint(1, 10) if __name__ == '__main__': random_number = random_number() print('Random number : ', random_number)
#include<stdio.h> int main() { int n,i=1,c=0; while(c!=1500) { if(i%2==0 || i%3==0 || i%5==0) { c++; } i++; } printf("The 1500'th ugly number is %d.\n",i-1); return 0; }
console.error("HELP! DANGER! Do not proceed past GO!") console.log("Here's the expected console log") console.warn("Warning! You ought not to do that!")
#!/bin/sh exec 2>&1 export ODBCINI=/etc/odbc.ini cd `dirname $0`/engine python main.py $* cd ..
declare const NativeView: import("react").ComponentType<any>; export default NativeView;
#!/bin/sh case $1 in server1-arm) cmake -DCMAKE_TOOLCHAIN_FILE=CMakeArm.cmake . ;; server1-avr) cmake -DCMAKE_TOOLCHAIN_FILE=CMakeAvr.cmake . ;; server1-amd64) cmake . ;; *) echo "Unknown build target name $1"; exit -1; ;; esac
package io.opensphere.mantle.data.geom.style; import io.opensphere.core.model.Altitude; /** * The Enum StyleAltitudeReference. */ public enum StyleAltitudeReference { /** Altitude is provided by the data source. */ AUTOMATIC(null, "Automatic: Provided by source data"), /** Altitude is relative to the configured ellipsoid. */ ELLIPSOID(Altitude.ReferenceLevel.ELLIPSOID, "Ellipsoid: Relative to WGS84 ellipsoid"), /** Altitude is relative to the center of the model. */ ORIGIN(Altitude.ReferenceLevel.ORIGIN, "Origin: Relative to the center of the earth"), /** Altitude is relative to local elevation. */ TERRAIN(Altitude.ReferenceLevel.TERRAIN, "Terrain: Relative to local elevation"); /** The label. */ private final String myLabel; /** The Reference level. */ private Altitude.ReferenceLevel myReferenceLevel; /** * Instantiates a new reference level. * * @param ref the ref * @param label the label */ StyleAltitudeReference(Altitude.ReferenceLevel ref, String label) { myLabel = label; myReferenceLevel = ref; } /** * Gets the label. * * @return the label */ public String getLabel() { return myLabel; } /** * Gets the reference. * * @return the reference */ public Altitude.ReferenceLevel getReference() { return myReferenceLevel; } /** * Checks if is automatic. * * @return true, if is automatic */ public boolean isAutomatic() { return AUTOMATIC.equals(this); } @Override public String toString() { return myLabel; } }
package br.utfpr.gp.tsi.racing.screen.jpct; import br.utfpr.gp.tsi.racing.track.Track; import com.threed.jpct.Mesh; import com.threed.jpct.Object3D; import com.threed.jpct.Primitives; import com.threed.jpct.Texture; import com.threed.jpct.TextureManager; import com.threed.jpct.World; public class TrackBuilder { private static final String TEXTURE_TRACK_NAME = "sand"; private static final String TEXTURE_WATER_NAME = "water"; public static void build(World world, int[][] matrix) { Object3D plane = Primitives.getPlane(100, 1); plane.rotateX((float)Math.PI/2); Texture texture = new Texture(Track.class.getResourceAsStream("res/sand.jpg")); TextureManager.getInstance().addTexture(TEXTURE_TRACK_NAME, texture); plane.setTexture(TEXTURE_TRACK_NAME); Mesh planeMesh = plane.getMesh(); planeMesh.setVertexController(new TrackVertexController(matrix), false); planeMesh.applyVertexController(); planeMesh.removeVertexController(); world.addObject(plane); Object3D water = Primitives.getPlane(1, 200); water.rotateX((float)Math.PI/2); texture = new Texture(Track.class.getResourceAsStream("res/water.jpg")); TextureManager.getInstance().addTexture(TEXTURE_WATER_NAME, texture); water.setTexture(TEXTURE_WATER_NAME); water.translate(0, -0.75f, 0); world.addObject(water); } }
<gh_stars>0 import React, { Component } from "react"; import { Keyboard } from 'react-native'; import { connect } from "react-redux"; import { Footer, FooterTab, Button, Icon, Text, Badge } from 'native-base'; // import { BottomTabBar } from 'react-navigation-tabs'; import I18n from '../../../i18n'; import {actions} from '../../../actions'; /** *自定义tab导航组件, 增加了对键盘的友好处理。 * * @class TabComponent * @extends {Component<Prop, State>} */ type Prop = {}; type State = { visible: boolean }; class TabComponent extends Component<Prop, State> { state: State = { visible: true }; componentDidMount() { this.kbShowListener = Keyboard.addListener('keyboardDidShow', this.keyboardWillShow); this.kbHideListener = Keyboard.addListener('keyboardDidHide', this.keyboardWillHide); } keyboardWillShow = () => { // console.log('keyboardwillshow'); this.setState({ visible: false }); }; keyboardWillHide = () => { // console.log('keyboardwillhide'); this.setState({ visible: true }); }; componentWillUnmount() { this.kbShowListener.remove(); this.kbHideListener.remove(); } // render() { // return this.state.visible && <BottomTabBar {...this.props} />; // } render() { let index = this.props.navigation.state.index; return this.state.visible && ( <Footer> <FooterTab> <Button badge vertical active={index === 0} onPress={() => { this.props.clearMainTabHomeBadge(); this.props.navigation.navigate("Home"); }} > {this.props.mainTabHomeBadge!==0? <Badge><Text>{this.props.mainTabHomeBadge}</Text></Badge> : <Text></Text>} <Icon type="SimpleLineIcons" name="home" /> <Text>{I18n.t('MainTab.home')}</Text> </Button> <Button vertical active={index === 1} onPress={() => this.props.navigation.navigate("Fire")} > <Icon type="SimpleLineIcons" name="fire" /> <Text>{I18n.t('MainTab.fire')}</Text> </Button> <Button vertical active={index === 2} onPress={() => this.props.navigation.navigate("Newest")} > <Icon type="SimpleLineIcons" name="drop" /> <Text>{I18n.t('MainTab.newest')}</Text> </Button> <Button vertical active={index === 3} onPress={() => this.props.navigation.navigate("Mine")} > <Icon type="SimpleLineIcons" name="user" /> <Text>{I18n.t('MainTab.mine')}</Text> </Button> </FooterTab> </Footer> );; } } const mapStateToProps = state => ({ mainTabHomeBadge: state.mainTabHomeBadge, }); // const mapStateToProps = ({ mainTabHomeBadge}) => ({mainTabHomeBadge}); const mapDispatchToProps = (dispatch) =>{ return { clearMainTabHomeBadge: (...args) => dispatch(actions.clearMainTabHomeBadge(...args)) } } export default connect(mapStateToProps, mapDispatchToProps)(TabComponent);
const QUnit = require("../node_modules/qunit/qunit/qunit.js"); const SeriesEntry = require("./SeriesEntry.js"); QUnit.module("SeriesEntry"); const PROPS = ["key", "entry"]; const createTestData = () => SeriesEntry.create(1, 2); QUnit.test("create()", (assert) => { // Run. const series = createTestData(); // Verify. PROPS.forEach((prop, i) => { assert.equal(series[prop], i + 1); }); }); const SeriesEntryTest = {}; module.exports = SeriesEntryTest;
def extractRights(license_agreement): rights = [] lines = license_agreement.split('\n') for line in lines: if line.startswith('//'): rights.append(line[2:].strip()) return rights
# Open a file named "output.txt" in write mode and clear its contents with open("output.txt", "w") as file: file.write('') # Start a web application using the app.run() method # Assuming 'app' is an instance of a Flask application app.run()
const { ipcMain } = require('electron'); const { connect } = require('hadouken-js-adapter'); exports = module.exports = new OpenFin(); let runtimes = {}; // used for fin object storage let request = {}; // used for passing information to listeners function OpenFin() {} // IPC Listeners ipcMain.on('openfin-connect', async (event, data) => { let version = await Connect(data.runtime); event.sender.send('openfin-connected', { runtime: data.runtime, version: version }); }); ipcMain.on('openfin-disconnect', async (event, data) => { await Disconnect(data.runtime); event.sender.send('openfin-disconnected', { runtime: data.runtime }); }); ipcMain.on('openfin-disconnect-all', async event => { await exports.disconnectAll(); }); ipcMain.on('openfin-subscribe', async (event, data) => { await Subscribe(event.sender, data.runtime, data.uuid, data.topic); }); ipcMain.on('openfin-unsubscribe', async (event, data) => { await Unsubscribe(data.runtime, data.uuid, data.topic); event.sender.send('openfin-unsubscribed', { runtime: data.runtime, uuid: data.uuid, topic: data.topic }); }); ipcMain.on('openfin-unsubscribe-all', async (event, data) => { // TODO* find a way to unsubscribe from all }); ipcMain.on('openfin-publish', async (event, data) => { await Publish(event.sender, data.runtime, data.topic, data.data); }); ipcMain.on('openfin-send', async (event, data) => { await Send(event.sender, data.runtime, data.uuid, data.topic, data.data); }); // OpenFin Functions async function Connect(runtime) { let options = { uuid: `openfin-visualizer-${runtime}`, name: `openfin-visualizer-${runtime}`, runtime: { version: runtime } }; let version; try { let fin = await connect(options); version = await fin.System.getVersion(); runtimes[runtime] = fin; console.log(`Connected to OpenFin version ${version} with runtime ${runtime}`); } catch(e) { console.log(e); let body = `Could not connect`; sender.send('error', { service: 'openfin', body: content, data: err }); } return version; } async function Disconnect(runtime) { try { // Call private method (for now) to disconnect from RVM await runtimes[runtime].wire.wire.shutdown(); console.log(`Disconnected from OpenFin runtime ${runtime}`); delete runtimes[runtime]; } catch(e) { console.log(e); let body = `Could not disconnect`; sender.send('error', { service: 'openfin', body: content, data: err }); } } let subscriptions = {}; async function Subscribe(sender, runtime, targetUuid, topic) { let listener = subscriptionListener.bind({ sender: sender, runtime: runtime, targetUuid: targetUuid, topic: topic }); await runtimes[runtime].InterApplicationBus.subscribe( { uuid: targetUuid }, topic, listener ).then(() => { console.log(`Subscribed to uuid [${targetUuid}] on channel [${runtime}] with topic [${topic}]`); if (!subscriptions.hasOwnProperty(runtime)) subscriptions[runtime] = {}; if (!subscriptions[runtime].hasOwnProperty(topic)) subscriptions[runtime][topic] = {}; subscriptions[runtime][topic][targetUuid] = listener; }).catch(err => { console.log(err); let body = `Could not subscribe`; sender.send('error', { service: 'openfin', body: content, data: err }); }); } async function Unsubscribe(runtime, targetUuid, topic) { let listener = subscriptions[runtime][topic][targetUuid]; await runtimes[runtime].InterApplicationBus.unsubscribe( { uuid: targetUuid }, topic, listener ).then(() => { console.log(`Unsubscribed from uuid [${targetUuid}] on channel [${runtime}] with topic [${topic}]`); // TODO* delete subscription object }).catch(err => { console.log(err); let body = `Could not unsubscribe`; sender.send('error', { service: 'openfin', body: content, data: err }); }); } async function Publish(sender, runtime, topic, data) { await runtimes[runtime].InterApplicationBus.publish(topic, data).then(() => { console.log(`Published data [${data}] on channel [${runtime}] with topic [${topic}]`); }).catch(err => { console.log(err); let body = `Could not publish`; sender.send('error', { service: 'openfin', body: content, data: err }); }); } // TODO* not yet working for some reason... async function Send(sender, runtime, targetUuid, topic, data) { await runtimes[runtime].InterApplicationBus.send({ uuid: targetUuid }, topic, data).then(() => { console.log(`Sent data [${data}] to uuid [${targetUuid}] on channel [${runtime}] with topic [${topic}]`); }).catch(err => { console.log(err); let body = `Could not send`; sender.send('error', { service: 'openfin', body: content, data: err }); }); } // Listeners function subscriptionListener(data, uuid, name) { this.sender.send('openfin-subscribed', { runtime: this.runtime, targetUuid: this.targetUuid, uuid: uuid, topic: this.topic, message: JSON.stringify(data) }); } // Helper Functions // TODO* addUuid(), removeUuid() -> for subscribe/unsubscribe // Exported Functions exports.disconnectAll = async () => { for (let runtime in runtimes) { console.log(`Disconnecting from ${runtime}`); await Disconnect(runtime); } }