text
stringlengths
1
1.05M
#!/bin/sh set -e export HOST=`hostname -i` cmd=`eval echo "$@"` echo "Host: $HOST" echo "Running CMD: $cmd" { exec $cmd 2>&1 1>&3 3>&- | /usr/local/bin/logger -st=tcp -ost=false; } 3>&1 1>&2 | /usr/local/bin/logger -st=tcp
function setImageDataToCanvas(imageData, canvasId) { let canvas = document.getElementById(canvasId); canvas.width = imageData.width; canvas.height = imageData.height; let context = canvas.getContext("2d"); context.putImageData(imageData, 0, 0); } async function bng(imageFileName) { // TODO(#4): keep bng.wasm as a byte array of bng.js let bngFile = await fetch(imageFileName); let fileData = await bngFile.arrayBuffer(); // TODO(#5): allocate WebAssembly memory based on the file size let memory = new WebAssembly.Memory({initial: 300, maximum: 1000}); new Uint8Array(memory.buffer).set(new Uint8Array(fileData)); let bngProgram = await WebAssembly.instantiateStreaming( fetch("bng.wasm"), { js: { mem: memory, print: arg => console.log(arg) } }); let offset = bngProgram.instance.exports.bng_offset(); let size = bngProgram.instance.exports.bng_size(); let width = bngProgram.instance.exports.bng_width(); let height = bngProgram.instance.exports.bng_height(); let unprocessedImageData = new ImageData( new Uint8ClampedArray(memory.buffer).slice(offset, offset + size), width, height); setImageDataToCanvas(unprocessedImageData, "bng-unprocessed"); bngProgram.instance.exports.bng_process(); let processedImageData = new ImageData( new Uint8ClampedArray(memory.buffer).slice(offset, offset + size), width, height); setImageDataToCanvas(processedImageData, "bng-processed"); } bng("tsodinw.bng").catch((e) => console.log(e));
<reponame>boranseckin/ts-init declare class Readme { static fileContent(name: string): string; static generateFile(name: string): Promise<void>; } export default Readme;
#! /bin/sh timestamp_postfix=$(date +%Y_%m_%d_%H%M%S) n_iterations=100 #server='http://rhohub-mzverev-c7ca8de3.rhosync.com/api/application' server='default' # setup the benchmark directory structure ruby prepare_bench 'Rhoconnect SOURCE API benchmark' source_bench_$timestamp_postfix "1 2 5 10 15 20" # simulate various number of simultaneous clients for n_threads in 1 2 5 10 15 20 do result_filename=./bench_results/source_bench_$timestamp_postfix/raw_data/source_bench_result.test ruby bench start 'scripts/test_source_script.rb' 'rhoadmin' '' $server $result_filename $n_threads $n_iterations done # once benchmark is finished - process the results ruby ./lib/bench/bench_result_processor.rb ./bench_results/source_bench_$timestamp_postfix/raw_data ./bench_results/source_bench_$timestamp_postfix/images
<reponame>singleware/ui-listview "use strict"; Object.defineProperty(exports, "__esModule", { value: true }); /** * Copyright (C) 2018 <NAME> * This source code is licensed under the MIT License as described in the file LICENSE. * * The proposal of this example is to show how to use the basic listview template. */ const Listview = require("../source"); const DOM = require("@singleware/jsx"); const listview = (DOM.create(Listview.Table.Template, null, DOM.create(Listview.Column.Template, { name: "a" }, DOM.create("b", { slot: "title" }, "Column A")), DOM.create(Listview.Column.Template, { name: "b" }, DOM.create("b", { slot: "title" }, "Column B")))); listview.addRow({ a: 'Value A', b: 'Value B' });
const production = { DSD: [ { id: 1, ds_name: '<NAME> North', ds_id: 'kp_north', }, { id: 2, ds_name: 'K<NAME> West', ds_id: 'kp_west', }, { id: 3, ds_name: '<NAME>', ds_id: 'kp', }, { id: 4, ds_name: '<NAME>', ds_id: 'ep', }, { id: 5, ds_name: '<NAME>', ds_id: 'et', }, { id: 6, ds_name: 'Man<NAME>', ds_id: 'mm_north', }, { id: 7, ds_name: 'Kattankudy', ds_id: 'kkdy', }, { id: 8, ds_name: '<NAME>', ds_id: 'mm_pattru', }, { id: 9, ds_name: 'Manmunai South‐West', ds_id: 'mm_sw', }, { id: 10, ds_name: '<NAME>', ds_id: 'pt_pattru', }, { id: 11, ds_name: 'Manmunai South & Eru<NAME>', ds_id: 'mm_south_ep', }, { id: 12, ds_name: 'Koralai Pattu Central', ds_id: 'kp_central', }, { id: 13, ds_name: 'Koralai Pattu South', ds_id: 'kp_south', }, { id: 14, ds_name: 'Manmunai West', ds_id: 'mm_west', } ], GND: [{ id: 1, name: 'Mankerny Central', gn_id_readable: '211', gn_id: '211', ds_name: 'Koralai Pattu North', ds_id: 'kp_north', }, { id: 2, name: 'Kayankerni', gn_id_readable: '211 A', gn_id: '211_A', ds_name: 'Koralai Pattu North', ds_id: 'kp_north', }, { id: 3, name: '<NAME>', gn_id_readable: '211 B', gn_id: '211_B', ds_name: 'Koralai Pattu North', ds_id: 'kp_north', }, { id: 4, name: 'Panichankerni', gn_id_readable: '211 C', gn_id: '211_C', ds_name: 'Koralai Pattu North', ds_id: 'kp_north', }, { id: 5, name: 'Vattavan', gn_id_readable: '211 D', gn_id: '211_D', ds_name: 'Koralai Pattu North', ds_id: 'kp_north', }, { id: 6, name: '<NAME>', gn_id_readable: '207', gn_id: '207', ds_name: 'Koralai Pattu West', ds_id: 'kp_west', }, { id: 7, name: 'Mancholai', gn_id_readable: '207 A', gn_id: '207_A', ds_name: 'Koralai Pattu West', ds_id: 'kp_west', }, { id: 8, name: '<NAME>', gn_id_readable: '207 B', gn_id: '207_B', ds_name: 'Koralai Pattu West', ds_id: 'kp_west', }, { id: 9, name: 'Oddamavady', gn_id_readable: '208', gn_id: '208', ds_name: 'Koralai Pattu West', ds_id: 'kp_west', }, { id: 10, name: '<NAME>', gn_id_readable: '210', gn_id: '210', ds_name: 'Koralai Pattu West', ds_id: 'kp_west', }, { id: 11, name: '<NAME>', gn_id_readable: '202', gn_id: '202', ds_name: 'Koralai Pattu', ds_id: 'kp', }, { id: 12, name: 'Karuwakerny', gn_id_readable: '202 A', gn_id: '202_A', ds_name: 'Koralai Pattu', ds_id: 'kp', }, { id: 13, name: 'Karuwakerny', gn_id_readable: '202 A', gn_id: '202_A', ds_name: 'Koralai Pattu', ds_id: 'kp', }, { id: 14, name: 'Kinnayadi', gn_id_readable: '202 B', gn_id: '202_B', ds_name: 'Koralai Pattu', ds_id: 'kp', }, { id: 15, name: 'Sungankerni', gn_id_readable: '202 C', gn_id: '202_C', ds_name: 'Koralai Pattu', ds_id: 'kp', }, { id: 16, name: 'Periyapullumalai', gn_id_readable: '146', gn_id: '146', ds_name: '<NAME>', ds_id: 'ep', }, { id: 17, name: 'koppaveli', gn_id_readable: '146 A', gn_id: '146_A', ds_name: '<NAME>', ds_id: 'ep', }, { id: 18, name: 'Rugam', gn_id_readable: '185', gn_id: '185', ds_name: '<NAME>', ds_id: 'ep', }, { id: 19, name: 'Karadiyanaru', gn_id_readable: '185 A', gn_id: '185_A', ds_name: '<NAME>', ds_id: 'ep', }, { id: 20, name: 'Kithulwewa', gn_id_readable: '185 B', gn_id: '185_B', ds_name: '<NAME>', ds_id: 'ep', }, { id: 21, name: '<NAME>', gn_id_readable: '190', gn_id: '190', ds_name: '<NAME>', ds_id: 'et', }, { id: 22, name: '<NAME>', gn_id_readable: '190 A', gn_id: '190 A', ds_name: '<NAME>', ds_id: 'et', }, { id: 23, name: '<NAME>', gn_id_readable: '190 B', gn_id: '190 B', ds_name: '<NAME>', ds_id: 'et', }, { id: 24, name: '<NAME>', gn_id_readable: '191', gn_id: '191', ds_name: 'Eravur Town', ds_id: 'et', }, { id: 25, name: '<NAME>', gn_id_readable: '191 A', gn_id: '191_A', ds_name: '<NAME>', ds_id: 'et', }, { id: 26, name: '<NAME>', gn_id_readable: '168', gn_id: '168', ds_name: 'Manmunai North', ds_id: 'mm_north', }, { id: 27, name: 'Manchanthoduwai South', gn_id_readable: '168 A', gn_id: '168_A', ds_name: 'Manmunai North', ds_id: 'mm_north', }, { id: 28, name: 'Navatkudah', gn_id_readable: '168 B', gn_id: '168_B', ds_name: 'Manmunai North', ds_id: 'mm_north', }, { id: 29, name: '<NAME>', gn_id_readable: '168 C', gn_id: '168_C', ds_name: 'Manmunai North', ds_id: 'mm_north', }, { id: 29, name: '<NAME>', gn_id_readable: '168 D', gn_id: '168_D', ds_name: 'Manmunai North', ds_id: 'mm_north', }, { id: 30, name: 'Nochimunai', gn_id_readable: '171 A', gn_id: '171_A', ds_name: 'Manmunai North', ds_id: 'mm_north', }, { id: 31, name: '<NAME>', gn_id_readable: '164 A', gn_id: '164_A', ds_name: 'Kattankudy', ds_id: 'kkdy', }, { id: 32, name: '<NAME>', gn_id_readable: '165', gn_id: '165', ds_name: 'Kattankudy', ds_id: 'kkdy', }, { id: 33, name: '<NAME>', gn_id_readable: '167 C', gn_id: '167_C', ds_name: 'Kattankudy', ds_id: 'kkdy', }, { id: 34, name: '<NAME>', gn_id_readable: '162', gn_id: '162', ds_name: 'Kattankudy', ds_id: 'kkdy', }, { id: 35, name: '<NAME>', gn_id_readable: '166 A', gn_id: '166_A', ds_name: 'Kattankudy', ds_id: 'kkdy', }, { id: 36, name: '<NAME>', gn_id_readable: '156 D', gn_id: '156_D', ds_name: '<NAME>', ds_id: 'mm_pattru', }, { id: 37, name: '<NAME>', gn_id_readable: '155 B', gn_id: '155_B', ds_name: '<NAME>', ds_id: 'mm_pattru', }, { id: 38, name: 'Thalankuda', gn_id_readable: '153', gn_id: '153', ds_name: '<NAME>u', ds_id: 'mm_pattru', }, { id: 39, name: 'Mavilankanthurai', gn_id_readable: '154', gn_id: '154', ds_name: '<NAME>', ds_id: 'mm_pattru', }, { id: 40, name: 'Kovilkulam', gn_id_readable: '156 C', gn_id: '156_C', ds_name: '<NAME>', ds_id: 'mm_pattru', }, { id: 41, name: 'Ampilanthurai', gn_id_readable: '128', gn_id: '128', ds_name: 'Manmunai South‐West', ds_id: 'mm_sw', }, { id: 42, name: '<NAME>', gn_id_readable: '128 A', gn_id: '128_A', ds_name: 'Manmunai South‐West', ds_id: 'mm_sw', }, { id: 43, name: '<NAME>', gn_id_readable: '128 B', gn_id: '128_B', ds_name: 'Manmunai South‐West', ds_id: 'mm_sw', }, { id: 44, name: 'Katchenai', gn_id_readable: '128 C', gn_id: '128_C', ds_name: 'Manmunai South‐West', ds_id: 'mm_sw', }, { id: 45, name: 'Arasadithivu', gn_id_readable: '129', gn_id: '129', ds_name: 'Manmunai South‐West', ds_id: 'mm_sw', }, { id: 46, name: '<NAME>', gn_id_readable: '96', gn_id: '96', ds_name: 'Poratheevu Pattu', ds_id: 'pp', }, { id: 47, name: '<NAME>', gn_id_readable: '96 A', gn_id: '96_A', ds_name: 'Por<NAME>', ds_id: 'pp', }, { id: 48, name: 'Palamunai', gn_id_readable: '97 A', gn_id: '97_A', ds_name: 'Porat<NAME>u', ds_id: 'pp', }, { id: 49, name: '<NAME>', gn_id_readable: '97 B', gn_id: '97_B', ds_name: 'Por<NAME>', ds_id: 'pp', }, { id: 50, name: 'Thambalawattha', gn_id_readable: '98', gn_id: '98', ds_name: 'Poratheevu Pattu', ds_id: 'pp', }, { id: 51, name: '<NAME>', gn_id_readable: '126 C', gn_id: '126_C', ds_name: 'Manmunai South & Eruvil Pattu', ds_id: 'ms_ep', }, { id: 52, name: '<NAME>', gn_id_readable: '126', gn_id: '126', ds_name: 'Manmunai South & Eruvil Pattu', ds_id: 'ms_ep', }, { id: 53, name: 'Mankadu', gn_id_readable: '123', gn_id: '123', ds_name: 'Manmunai South & Eruvil Pattu', ds_id: 'ms_ep', }, { id: 54, name: '<NAME>', gn_id_readable: '122', gn_id: '122', ds_name: 'Manmunai South & Eruvil Pattu', ds_id: 'ms_ep', }, { id: 55, name: '<NAME>', gn_id_readable: '119', gn_id: '119', ds_name: 'Manmunai South & Eruvil Pattu', ds_id: 'ms_ep', }, { id: 56, name: '<NAME>', gn_id_readable: '206', gn_id: '206', ds_name: 'Koralai Pattu Central', ds_id: 'kp_central', }, { id: 57, name: '<NAME>', gn_id_readable: '206 A', gn_id: '206_A', ds_name: 'Koralai Pattu Central', ds_id: 'kp_central', }, { id: 58, name: '<NAME>', gn_id_readable: '206 B', gn_id: '206_B', ds_name: 'Koralai Pattu Central', ds_id: 'kp_central', }, { id: 59, name: '<NAME>', gn_id_readable: '206 C', gn_id: '206_C', ds_name: 'Koralai Pattu Central', ds_id: 'kp_central', }, { id: 60, name: 'Mavadichenai', gn_id_readable: '208 A', gn_id: '208_A', ds_name: 'Koralai Pattu Central', ds_id: 'kp_central', }, { id: 61, name: 'Murakkottanchenai', gn_id_readable: '199', gn_id: '199', ds_name: 'Koralai Pattu South', ds_id: 'kp_south', }, { id: 62, name: 'Thevapuram', gn_id_readable: '199 A', gn_id: '199_A', ds_name: 'Koralai Pattu South', ds_id: 'kp_south', }, { id: 63, name: 'Santhiveli', gn_id_readable: '200', gn_id: '200', ds_name: 'Koralai Pattu South', ds_id: 'kp_south', }, { id: 64, name: 'Thihilivaddai', gn_id_readable: '200 A', gn_id: '200_A', ds_name: 'Koralai Pattu South', ds_id: 'kp_south', }, { id: 65, name: 'Palayadithona', gn_id_readable: '201', gn_id: '201', ds_name: 'Koralai Pattu South', ds_id: 'kp_south', }, { id: 66, name: 'Iluppadichenai', gn_id_readable: '148', gn_id: '148', ds_name: 'Manmunai West', ds_id: 'mm_west', }, { id: 67, name: 'Pavatkodichenai', gn_id_readable: '148 A', gn_id: '148_A', ds_name: 'Manmunai West', ds_id: 'mm_west', }, { id: 68, name: 'Panchenai', gn_id_readable: '148 B', gn_id: '148_B', ds_name: 'Manmunai West', ds_id: 'mm_west', }, { id: 69, name: 'Kanthinagar', gn_id_readable: '148 C', gn_id: '148_C', ds_name: 'Manmunai West', ds_id: 'mm_west', }, { id: 70, name: 'Kanchirankudah', gn_id_readable: '148 D', gn_id: '148_D', ds_name: 'Manmunai West', ds_id: 'mm_west', } ] }; export const config = production;
#!/usr/bin/env bash # Copyright 2017 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== # # This script is intended to be run inside a docker container to provide a # hermetic process. See release.sh for the expected invocation. RELEASE_URL_PREFIX="https://storage.googleapis.com/tensorflow/libtensorflow" TF_ECOSYSTEM_URL="https://github.com/tensorflow/ecosystem.git" # By default we deploy to both ossrh and bintray. These two # environment variables can be set to skip either repository. DEPLOY_BINTRAY="${DEPLOY_BINTRAY:-true}" DEPLOY_OSSRH="${DEPLOY_OSSRH:-true}" DEPLOY_LOCAL="${DEPLOY_LOCAL:-false}" PROTOC_RELEASE_URL="https://github.com/google/protobuf/releases/download/v3.5.1/protoc-3.5.1-linux-x86_64.zip" if [[ "${DEPLOY_BINTRAY}" != "true" && "${DEPLOY_OSSRH}" != "true" && "${DEPLOY_LOCAL}" != "true" ]]; then echo "Must deploy to at least one of Bintray, OSSRH or local" >&2 exit 2 fi set -ex clean() { # Clean up any existing artifacts # (though if run inside a clean docker container, there won't be any dirty # artifacts lying around) mvn -q clean rm -rf libtensorflow_jni/src libtensorflow_jni/target libtensorflow_jni_gpu/src libtensorflow_jni_gpu/target \ libtensorflow/src libtensorflow/target proto/src proto/target \ tensorflow-hadoop/src tensorflow-hadoop/target spark-tensorflow-connector/src spark-tensorflow-connector/target } update_version_in_pom() { mvn versions:set -DnewVersion="${TF_VERSION}" } # Fetch a property from pom files for a given profile. # Arguments: # profile - name of the selected profile. # property - name of the property to be retrieved. # Output: # Echo property value to stdout mvn_property() { local profile="$1" local prop="$2" mvn -q --non-recursive exec:exec -P "${profile}" \ -Dexec.executable='echo' \ -Dexec.args="\${${prop}}" } download_libtensorflow() { URL="${RELEASE_URL_PREFIX}/libtensorflow-src-${TF_VERSION}.jar" curl -L "${URL}" -o /tmp/src.jar cd "${DIR}/libtensorflow" jar -xvf /tmp/src.jar rm -rf META-INF cd "${DIR}" } download_libtensorflow_jni() { NATIVE_DIR="${DIR}/libtensorflow_jni/src/main/resources/org/tensorflow/native" mkdir -p "${NATIVE_DIR}" cd "${NATIVE_DIR}" mkdir linux-x86_64 mkdir windows-x86_64 mkdir darwin-x86_64 curl -L "${RELEASE_URL_PREFIX}/libtensorflow_jni-cpu-linux-x86_64-${TF_VERSION}.tar.gz" | tar -xvz -C linux-x86_64 curl -L "${RELEASE_URL_PREFIX}/libtensorflow_jni-cpu-darwin-x86_64-${TF_VERSION}.tar.gz" | tar -xvz -C darwin-x86_64 curl -L "${RELEASE_URL_PREFIX}/libtensorflow_jni-cpu-windows-x86_64-${TF_VERSION}.zip" -o /tmp/windows.zip # Get rid of symlinks, those are not supported by jar. As of tensorflow 1.14, # libtensorflow_jni.so expects to find # libtensorflow_framework.so.<majorVersion>. MAJOR_VERSION="${TF_VERSION/\.*/}" FRAMEWORK_SO="$(readlink -f linux-x86_64/libtensorflow_framework.so)" rm linux-x86_64/libtensorflow_framework.so rm "linux-x86_64/libtensorflow_framework.so.${MAJOR_VERSION}" mv "${FRAMEWORK_SO}" "linux-x86_64/libtensorflow_framework.so.${MAJOR_VERSION}" FRAMEWORK_DYLIB="$(readlink -f darwin-x86_64/libtensorflow_framework.dylib)" rm darwin-x86_64/libtensorflow_framework.dylib rm "darwin-x86_64/libtensorflow_framework.${MAJOR_VERSION}.dylib" mv "${FRAMEWORK_DYLIB}" "darwin-x86_64/libtensorflow_framework.${MAJOR_VERSION}.dylib" unzip /tmp/windows.zip -d windows-x86_64 rm -f /tmp/windows.zip # Updated timestamps seem to be required to get Maven to pick up the file. touch linux-x86_64/* touch darwin-x86_64/* touch windows-x86_64/* cd "${DIR}" } download_libtensorflow_jni_gpu() { NATIVE_DIR="${DIR}/libtensorflow_jni_gpu/src/main/resources/org/tensorflow/native" mkdir -p "${NATIVE_DIR}" cd "${NATIVE_DIR}" mkdir linux-x86_64 mkdir windows-x86_64 curl -L "${RELEASE_URL_PREFIX}/libtensorflow_jni-gpu-linux-x86_64-${TF_VERSION}.tar.gz" | tar -xvz -C linux-x86_64 curl -L "${RELEASE_URL_PREFIX}/libtensorflow_jni-gpu-windows-x86_64-${TF_VERSION}.zip" -o /tmp/windows.zip FRAMEWORK_SO="$(readlink -f linux-x86_64/libtensorflow_framework.so)" rm linux-x86_64/libtensorflow_framework.so rm "linux-x86_64/libtensorflow_framework.so.${MAJOR_VERSION}" mv "${FRAMEWORK_SO}" "linux-x86_64/libtensorflow_framework.so.${MAJOR_VERSION}" unzip /tmp/windows.zip -d windows-x86_64 rm -f /tmp/windows.zip # Updated timestamps seem to be required to get Maven to pick up the file. touch linux-x86_64/* touch windows-x86_64/* cd "${DIR}" } # Ideally, the .jar for generated Java code for TensorFlow protocol buffer files # would have been produced by bazel rules. However, protocol buffer library # support in bazel is in flux. Once # https://github.com/bazelbuild/bazel/issues/2626 has been resolved, perhaps # TensorFlow can move to something like # https://bazel.build/blog/2017/02/27/protocol-buffers.html # for generating C++, Java and Python code for protocol buffers. # # At that point, perhaps the libtensorflow build scripts # (tensorflow/tools/ci_build/builds/libtensorflow.sh) can build .jars for # generated code and this function would not need to download protoc to generate # code. generate_java_protos() { # Clean any previous attempts rm -rf "${DIR}/proto/tmp" # Download protoc curl -L "${PROTOC_RELEASE_URL}" -o "/tmp/protoc.zip" mkdir -p "${DIR}/proto/tmp/protoc" unzip -d "${DIR}/proto/tmp/protoc" "/tmp/protoc.zip" rm -f "/tmp/protoc.zip" # Download the release archive of TensorFlow protos. URL="${RELEASE_URL_PREFIX}/libtensorflow_proto-${TF_VERSION}.zip" curl -L "${URL}" -o /tmp/libtensorflow_proto.zip mkdir -p "${DIR}/proto/tmp/src" unzip -d "${DIR}/proto/tmp/src" "/tmp/libtensorflow_proto.zip" rm -f "/tmp/libtensorflow_proto.zip" # Generate Java code mkdir -p "${DIR}/proto/src/main/java" find "${DIR}/proto/tmp/src" -name "*.proto" | xargs \ ${DIR}/proto/tmp/protoc/bin/protoc \ --proto_path="${DIR}/proto/tmp/src" \ --java_out="${DIR}/proto/src/main/java" # Cleanup rm -rf "${DIR}/proto/tmp" } # Download the TensorFlow ecosystem source from git. # The pom files from this repo do not inherit from the parent pom so the maven version # is updated for each module. download_tf_ecosystem() { ECOSYSTEM_DIR="/tmp/tensorflow-ecosystem" HADOOP_DIR="${DIR}/tensorflow-hadoop" SPARK_DIR="${DIR}/spark-tensorflow-connector" # Clean any previous attempts rm -rf "${ECOSYSTEM_DIR}" # Clone the TensorFlow ecosystem project mkdir -p "${ECOSYSTEM_DIR}" cd "${ECOSYSTEM_DIR}" git clone "${TF_ECOSYSTEM_URL}" cd ecosystem # TF_VERSION is a semver string (<major>.<minor>.<patch>[-suffix]) # but the branch is just (r<major>.<minor>). RELEASE_BRANCH=$(echo "${TF_VERSION}" | sed -e 's/\([0-9]\+\.[0-9]\+\)\.[0-9]\+.*/\1/') git checkout r${RELEASE_BRANCH} # Copy the TensorFlow Hadoop source cp -r "${ECOSYSTEM_DIR}/ecosystem/hadoop/src" "${HADOOP_DIR}" cp "${ECOSYSTEM_DIR}/ecosystem/hadoop/pom.xml" "${HADOOP_DIR}" cd "${HADOOP_DIR}" update_version_in_pom # Copy the TensorFlow Spark connector source cp -r "${ECOSYSTEM_DIR}/ecosystem/spark/spark-tensorflow-connector/src" "${SPARK_DIR}" cp "${ECOSYSTEM_DIR}/ecosystem/spark/spark-tensorflow-connector/pom.xml" "${SPARK_DIR}" cd "${SPARK_DIR}" update_version_in_pom # Cleanup rm -rf "${ECOSYSTEM_DIR}" cd "${DIR}" } # Deploy artifacts using a specific profile. # Arguments: # profile - name of selected profile. # Outputs: # n/a deploy_profile() { local profile="$1" if [[ ${profile} == "local" ]]; then mvn install else mvn deploy -P"${profile}" fi } # If successfully built, try to deploy. # If successfully deployed, clean. # If deployment fails, debug with # ./release.sh ${TF_VERSION} ${SETTINGS_XML} bash # To get a shell to poke around the maven artifacts with. deploy_artifacts() { # Deploy artifacts to local maven repository if requested if [[ "${DEPLOY_LOCAL}" == "true" ]]; then deploy_profile 'local' fi # Deploy artifacts to ossrh if requested. if [[ "${DEPLOY_OSSRH}" == "true" ]]; then deploy_profile 'ossrh' fi # Deploy artifacts to bintray if requested. if [[ "${DEPLOY_BINTRAY}" == "true" ]]; then deploy_profile 'bintray' fi # Clean up when everything works clean } if [ -z "${TF_VERSION}" ] then echo "Must set the TF_VERSION environment variable" exit 1 fi DIR="$(realpath $(dirname $0))" cd "${DIR}" # The meat of the script. # Comment lines out appropriately if debugging/tinkering with the release # process. # gnupg2 is required for signing apt-get -qq update && apt-get -qqq install -y gnupg2 git clean update_version_in_pom download_libtensorflow download_libtensorflow_jni download_libtensorflow_jni_gpu generate_java_protos download_tf_ecosystem # Build the release artifacts mvn verify # Push artifacts to repository deploy_artifacts set +ex echo "Uploaded to the staging repository" echo "After validating the release: " if [[ "${DEPLOY_OSSRH}" == "true" ]]; then echo "* Login to https://oss.sonatype.org/#stagingRepositories" echo "* Find the 'org.tensorflow' staging release and click either 'Release' to release or 'Drop' to abort" fi if [[ "${DEPLOY_BINTRAY}" == "true" ]]; then echo "* Login to https://bintray.com/google/tensorflow/tensorflow" echo "* Either 'Publish' unpublished items to release, or 'Discard' to abort" fi
def search(array, value) array.each_with_index do |x, i| return i if x == value end end arr = [1, 3, 5, 4, 6] puts search(arr, 6)
#!/bin/sh against=HEAD # Redirect output to stderr. exec 1>&2 # Check changed files for an AWS keys KEY_ID=$(git diff --cached --name-only -z $against | xargs -0 cat | perl -nle'print $& if m{(?<![A-Z0-9])[A-Z0-9]{20}(?![A-Z0-9])}') KEY=$(git diff --cached --name-only -z $against | xargs -0 cat | perl -nle'print $& if m{(?<![A-Za-z0-9/+=])[A-Za-z0-9/+=]{40}(?![A-Za-z0-9/+=])}') # Regex for secret check can be found here: https://aws.amazon.com/blogs/security/a-safer-way-to-distribute-aws-credentials-to-ec2/ if [ "$KEY_ID" != "" -o "$KEY" != "" ]; then echo "Found patterns for AWS_ACCESS_KEY_ID/AWS_SECRET_ACCESS_KEY" echo "Please check your code and remove API keys." exit 1 fi # Normal exit exit 0
<reponame>picsolab/TRIBAL-analytics-interpreter import React, { useEffect, useRef } from 'react'; import * as d3 from 'd3'; import _ from 'lodash'; import styled, { css } from 'styled-components'; import { Grommet, Button, Tabs, Tab, Box } from 'grommet'; import index from '../index.css'; import { globalColors, SectionTitle, SubTitle, SubsectionTitle, SectionWrapper } from '../GlobalStyles'; const layout = { margin: { top: 20, right: 20, bottom: 20, left: 30 }, width: 300, height: 200, leftMargin: 30, innerHeight: 340 - 2, featureGroupPlot: { width: 300, height: 150, leftMargin: 30, groupPlot: { width: 90, height: 150, leftMargin: 30 }, diffPlot: { width: 120, height: 150, leftMargin: 30 } } }; const ResultViewWrapper = styled(SectionWrapper)` grid-area: r; background-color: whitesmoke; padding: 10px; margin: 0 10px; `; const OutputInstance = ({ model, idx }) => { return ( <div style={{ display: 'flex', fontWeight: 600, marginBottom: '5px' }}> <div style={{ height: '20px', marginRight: '5px' }}>{idx + 1}</div> <div style={{ display: 'flex', justifyContent: 'space-between', width: '100%' }} > <div>{'VFDC'}</div> <div>{'mode-' + model.mode}</div> <div>{Math.ceil(model.performance * 10000) / 100 + '%'}</div> </div> {/* <div>{'Output prob plot...'}</div> */} </div> ); }; const ResultView = ({ models }) => { return ( <ResultViewWrapper> <SubsectionTitle>Models</SubsectionTitle> {models.map((model, idx) => ( <OutputInstance model={model} idx={idx} /> ))} </ResultViewWrapper> ); }; export default ResultView;
<filename>src/components/utility/loaders/pool/poolLoader.stories.js<gh_stars>10-100 import React from 'react'; import { storiesOf } from '@storybook/react'; import PoolLoader from '.'; storiesOf('Utility/loaders/PoolList', module).add('default', () => { return ( <div style={{ width: '1000px', }} > <PoolLoader /> </div> ); });
<gh_stars>10-100 import { NextFunction, Response } from 'express'; import * as Joi from 'joi'; import BadRequest from '../exceptions/bad-request'; export const createValidator = (schema: Joi.Schema, key: string = 'body') => ( req: any, _: Response, next: NextFunction ) => { const { error } = schema.validate(req[key]); if (error) { throw new BadRequest(error.details[0].message); } next(); };
#!/usr/bin/env bash # Check if all required arguments are provided if [ "$#" -ne 3 ]; then echo "Usage: $0 <URL> <KEY> <LIMIT>" exit 1 fi URL=$1 KEY=$2 LIMIT=$3 # Fetch the JSON data from the specified URL using curl json_data=$(curl -s "${URL}/${KEY}.json") # Check if the curl command was successful if [ $? -ne 0 ]; then echo "Failed to fetch JSON data from the provided URL" exit 1 fi # Extract the values associated with the provided key using jq and limit the results result=$(echo "$json_data" | jq --arg values_key "$KEY" ".[$values_key] | .[:$LIMIT]") # Check if the jq command was successful if [ $? -ne 0 ]; then echo "Failed to process JSON data using jq" exit 1 fi # Output the processed results echo "$result"
#ifndef RSS_ITEM_H #define RSS_ITEM_H #include <stdlib.h> typedef struct { char* title; char* description; char* link; char* author; char* guid; char* pub_date; char** categories; size_t num_categories; } rss_item_t; rss_item_t* create_rss_item(); void rss_item_set_title(rss_item_t*, const char*); void rss_item_set_description(rss_item_t*, const char*); void rss_item_set_link(rss_item_t*, const char*); void rss_item_set_author(rss_item_t*, const char*); void rss_item_set_guid(rss_item_t*, const char*); void rss_item_set_pub_date(rss_item_t*, const char*); void rss_item_add_category(rss_item_t*, char*); void delete_rss_item(rss_item_t*); #endif
#ifndef SLIST_CONTEXT_H #define SLIST_CONTEXT_H #include <unordered_set> #include "slist_types.h" namespace slist { struct context { context(); void register_native(const std::string& name, procedure::callback func); node_ptr lookup_symbol(const std::string& name); void insert_symbol(const node_ptr& node); environment_ptr global_env; environment_ptr active_env; typedef std::unordered_map<std::string, node_ptr> symbols_map; symbols_map symbols; struct callstack_item { node_ptr node; procedure_ptr delayed_proc; node_ptr delayed_args; }; typedef std::vector<callstack_item> callstack_vector; callstack_vector callstack; void debug_dump_callstack(); }; } #endif
import { ApiProperty, ApiPropertyOptional, PickType, IntersectionType } from '@nestjs/swagger' import { IsNotEmpty, IsNumber, Min } from 'class-validator' import { IsOptional } from '@/decorator/common.decorator' import { Type, Transform } from 'class-transformer' import { toArrayNumber } from '@/utils/validate' import { NodeSourceResponse } from '@/module/source/source.interface' import { NodeUserResponse } from '@/module/user/user.interface' export class MinuteResponse { @ApiProperty({ description: '收录id', example: 1 }) id: number @ApiProperty({ description: '收录名称', example: 'Git常用命令' }) name: number @ApiProperty({ description: '跳转链接' }) url: string @ApiProperty({ description: 'npm链接' }) npm: string @ApiProperty({ description: 'github链接' }) github: string @ApiProperty({ description: '收录描述' }) description: string @ApiProperty({ description: '封面', example: 'https://oss.lisfes.cn/cloud/cover/2021-08/1628335320412.jpg' }) cover: number @ApiProperty({ description: '收录状态: 0.禁用 1.启用 2.删除', enum: [0, 1, 2], example: 1 }) status: number @ApiProperty({ description: '排序号', example: 1 }) order: number @ApiProperty({ description: '收录用户', type: [NodeUserResponse] }) user: NodeUserResponse @ApiProperty({ description: '标签列表', type: [NodeSourceResponse], example: [] }) source: NodeSourceResponse[] @ApiProperty({ description: '总数', example: 0 }) total: number @ApiProperty({ description: '分页', example: 1 }) page: number @ApiProperty({ description: '分页数量', example: 10 }) size: number } export class MinuteParameter { @ApiProperty({ description: '收录id', example: 1 }) @IsNotEmpty({ message: '收录id 必填' }) @Type(type => Number) id: number @ApiProperty({ description: '收录名称', example: 'ali-oss' }) @IsNotEmpty({ message: '收录名称 必填' }) name: string @ApiPropertyOptional({ description: '跳转链接' }) @IsOptional({}, { string: true, number: true }) url: string @ApiPropertyOptional({ description: 'npm链接' }) @IsOptional({}, { string: true, number: true }) npm: string @ApiPropertyOptional({ description: 'github链接' }) @IsOptional({}, { string: true, number: true }) github: string @ApiPropertyOptional({ description: '收录描述' }) @IsOptional({}, { string: true, number: true }) description: string @ApiProperty({ description: '封面', example: 'https://oss.lisfes.cn/cloud/cover/2021-08/1628335320412.jpg' }) @IsNotEmpty({ message: '封面 必填' }) cover: string @ApiPropertyOptional({ description: '收录状态: 0.禁用 1.启用 2.删除', enum: [0, 1, 2], example: 1 }) @IsOptional({}, { string: true }) @Type(type => Number) status: number @ApiPropertyOptional({ description: '排序号', example: 1 }) @IsOptional({}, { string: true }) @Type(type => Number) order: number @ApiPropertyOptional({ description: '分类标签id', type: [Number], example: [] }) @IsOptional({}, { string: true, number: true }) @Transform(type => toArrayNumber(type), { toClassOnly: true }) @IsNumber({}, { each: true, message: '分类标签id 必须为Array<number>' }) source: number[] @ApiProperty({ description: '分页', example: 1 }) @IsNotEmpty({ message: 'page 必填' }) @IsNumber({}, { message: 'page必须是数字' }) @Min(1, { message: 'page不能小于1' }) @Type(type => Number) page: number @ApiProperty({ description: '分页数量', example: 10 }) @IsNotEmpty({ message: 'size 必填' }) @IsNumber({}, { message: 'size必须是数字' }) @Min(1, { message: 'size不能小于1' }) @Type(type => Number) size: number } /** * * * 创建收录-Parameter *************************************************************************************************/ export class NodeCreateMinuteParameter extends IntersectionType( PickType(MinuteParameter, ['name', 'cover', 'description', 'url', 'status']), PickType(MinuteParameter, ['order', 'source', 'npm', 'github']) ) {} /**创建收录-Response**/ export class NodeCreateMinuteResponse { @ApiProperty({ description: 'message', example: '创建成功' }) message: string } /** * * * 修改收录-Parameter *************************************************************************************************/ export class NodeUpdateMinuteParameter extends IntersectionType( PickType(MinuteParameter, ['id', 'name', 'cover', 'description', 'url', 'status']), PickType(MinuteParameter, ['order', 'source', 'npm', 'github']) ) {} /**修改收录-Response**/ export class NodeUpdateMinuteResponse { @ApiProperty({ description: 'message', example: '修改成功' }) message: string } /** * * * 切换收录状态-Parameter *************************************************************************************************/ export class NodeMinuteCutoverParameter extends PickType(MinuteParameter, ['id']) {} /**切换收录状态-Response**/ export class NodeMinuteCutoverResponse { @ApiProperty({ description: 'message', example: '修改成功' }) message: string } /** * * * 收录信息-Parameter *************************************************************************************************/ export class NodeMinuteParameter extends PickType(MinuteParameter, ['id']) {} /**收录信息-Response**/ export class NodeMinuteResponse extends IntersectionType( PickType(MinuteResponse, ['id', 'name', 'cover', 'description', 'npm', 'github']), PickType(MinuteResponse, ['order', 'user', 'source', 'url', 'status']) ) {} /** * * * 收录列表-Parameter *************************************************************************************************/ export class NodeClientMinutesParameter extends PickType(MinuteParameter, ['page', 'size']) { @ApiPropertyOptional({ description: '收录名称' }) @IsOptional({}, { string: true, number: true }) name: string @ApiPropertyOptional({ description: '分类标签id' }) @IsOptional({}, { string: true, number: true }) @Type(type => Number) source: number } export class NodeMinutesParameter extends PickType(MinuteParameter, ['page', 'size', 'status']) { @ApiPropertyOptional({ description: '收录名称' }) @IsOptional({}, { string: true, number: true }) name: string @ApiPropertyOptional({ description: '分类标签id' }) @IsOptional({}, { string: true, number: true }) @Type(type => Number) source: number } /**收录列表-Response**/ export class NodeMinutesResponse extends PickType(MinuteResponse, ['page', 'size', 'total']) { @ApiProperty({ description: '收录列表', type: [MinuteResponse], example: [] }) list: MinuteResponse[] } /** * * * 删除收录-Parameter *************************************************************************************************/ export class NodeDeleteMinuteParameter extends PickType(MinuteParameter, ['id']) {} /**删除收录-Response**/ export class NodeDeleteMinuteResponse { @ApiProperty({ description: 'message', example: '删除成功' }) message: string }
<reponame>FeiChaiCom/django-netdisk<filename>apps/contrib/sites/migrations/0005_init_pemission.py from django.conf import settings from django.db import migrations def load_data(apps, schema_editor): """ 初始化站点设置、空间及权限 """ from accounts.models import User from apps.project.models import Project from apps.preference.models import Preference from apps.permission.models import Permission if not Permission.objects.exists(): print("init netdisk permission") for name in settings.INIT_ADMINS: Permission.objects.create(username=name, role='ADMINISTRATOR') if not Preference.objects.exists(): print("init netdisk preference") Preference.objects.create(name='netdisk', allow_register=False) if not Project.objects.exists(): print("init netdisk project and users") project = Project.objects.create(name='Demo', description='demo空间') user0 = User.objects.create_user('user0', settings.DEFAULT_PASSWORD, role='USER') Permission.objects.create(username=user0.username, project_uuid=project.uuid, role='USER') class Migration(migrations.Migration): dependencies = [("sites", "0004_init_superuser")] operations = [migrations.RunPython(load_data)]
<reponame>ministryofjustice/prison-visits-2 require 'rails_helper' RSpec.describe SlotAvailabilityValidation, type: :model do subject do described_class.new(prison: prison, requested_slots: requested_slots) end let(:prison) { build_stubbed(:prison) } let(:slot1) do date = 6.days.from_now.to_date ConcreteSlot.new(date.year, date.month, date.day, 14, 30, 15, 30) end let(:slot2) do date = 5.days.from_now.to_date ConcreteSlot.new(date.year, date.month, date.day, 14, 30, 15, 30) end let(:slot3) do date = 4.days.from_now.to_date ConcreteSlot.new(date.year, date.month, date.day, 14, 30, 15, 30) end let(:requested_slots) { [slot1, slot2, slot3] } let(:api_slots) do available_slots.map do |slot| { 'time' => slot.to_s, 'max_adults' => anything } end end describe 'when the NOMIS API is disabled' do before do allow(Nomis::Api).to receive(:enabled?).and_return(false) subject.valid? end it "doesn't add an error to the slots" do requested_slots.each do |slot| expect(subject.errors[slot.to_s]).to be_empty end end it 'is unknown result' do expect(subject).to be_unknown_result end end describe 'when the NOMIS API is enabled' do context 'when the api returns an error' do before do expect_any_instance_of(Nomis::Client). to receive(:get).and_raise(Nomis::APIError) end it 'adds no errors for any slot' do expect(subject).to be_valid requested_slots.each do |slot| expect(subject.errors[slot.to_s]).to be_empty end end it 'is unknown result' do subject.valid? expect(subject).to be_unknown_result end end context 'when working correctly with valid slots' do before do allow(Nomis::Api).to receive(:enabled?).and_return(true) expect(Nomis::Api.instance). to receive(:fetch_bookable_slots). with(prison: prison, start_date: slot3.to_date, end_date: slot1.to_date). and_return(Nomis::SlotAvailability.new(slots: api_slots)) subject.valid? end context 'with dates that are available' do let(:available_slots) { [slot1] } it 'does not add an error to the slot' do expect(subject.errors[slot1.to_s]).to be_blank end context 'with a #slot_error' do it 'returns nothing' do expect(subject.slot_error(slot1)).to be_nil end end it { is_expected.not_to be_unknown_result } end context 'when the slots that are unavailable' do let(:available_slots) { [slot1, slot3] } it 'adds an error to the missing slot' do expect(subject.errors[slot2.to_s]). to eq([described_class::SLOT_NOT_AVAILABLE]) end context 'with a #slot_error' do it 'returns the slot not available message' do expect(subject.slot_error(slot2)). to eq(described_class::SLOT_NOT_AVAILABLE) end end it { is_expected.not_to be_unknown_result } end end context 'when the API enabled and with invalid dates' do before do allow(Nomis::Api).to receive(:enabled?).and_return(true) end context 'with all the dates in the past' do let(:slot1) do date = 1.day.ago.to_date ConcreteSlot.new(date.year, date.month, date.day, 14, 30, 15, 30) end let(:slot2) do date = 2.days.ago.to_date ConcreteSlot.new(date.year, date.month, date.day, 14, 30, 15, 30) end let(:slot3) do date = 3.days.ago.to_date ConcreteSlot.new(date.year, date.month, date.day, 14, 30, 15, 30) end # We return the dates as valid because it doesn't make sense to # communicate that the prisoner is unavailable just because the date is # in the past. Another validator will be responsible for that. it 'returns all the slots' do expect_any_instance_of(Nomis::Api).not_to receive(:fetch_bookable_slots) subject.valid? requested_slots.each do |slot| expect(subject.slot_error(slot)).to be_nil end end it { is_expected.not_to be_unknown_result } end context 'with dates in the past or too far in the future' do # API only allows dates for following day the earliest let(:slot1) do date = Date.current ConcreteSlot.new(date.year, date.month, date.day, 14, 30, 15, 30) end let(:slot2) do date = 61.days.from_now.to_date ConcreteSlot.new(date.year, date.month, date.day, 14, 30, 15, 30) end before do expect_any_instance_of(Nomis::Api). to receive(:fetch_bookable_slots). with(prison: prison, start_date: slot3.to_date, end_date: slot3.to_date). and_return(Nomis::SlotAvailability.new(slots: [])) end it 'filters out invalid dates' do subject.valid? expect(subject.slot_error(slot1)).to be_nil expect(subject.slot_error(slot2)).to be_nil expect(subject.slot_error(slot3)). to eq(described_class::SLOT_NOT_AVAILABLE) end it { is_expected.not_to be_unknown_result } end end end end
var assign = require('lodash.assign'); var pick = require('lodash.pick'); var utilities = require('../utilities'); function Hyperlink(parent, attrs) { this.parent = parent; this.validate(attrs); assign(this, pick(attrs, this.attrList)); } module.exports = Hyperlink; assign(Hyperlink.prototype, { validate: function (attrs) { if (!utilities.isObject(attrs)) throw new TypeError('Hyperlink must be instantiated with an attrs object'); var msgList = []; if (!attrs.ref) msgList.push('Missing attrs.ref.'); if (!attrs.url && !attrs.location) msgList.push('Missing attrs.url and attrs.location. One must be specified.'); if (msgList.length) { msgList.unshift('Hyperlink instantiation attrs errors:'); throw new TypeError(msgList.map(function (msg, i) { return i ? (i + ': ' + msg) : msg; }).join('\n')); } } }); Object.defineProperties(Hyperlink.prototype, utilities.defineProps({ constant: { type: 'hyperlink', targetMode: 'External', attrList: ['location', 'ref', 'tooltip', 'url'] }, unsettable: { id: { getter: function() { return this.parent && this.parent.rels ? this.parent.rels.getId(this) : 0; } }, path: { getter: function () { return this.url; } } }, writeOnce: { location: 1, ref: 1, tooltip: 1, url: 1 } }));
from pathlib import Path import os, sys # Build paths inside the project like this: BASE_DIR / 'subdir'. BASE_DIR = Path(__file__).resolve().parent.parent # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/3.1/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = '<KEY>' # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True ALLOWED_HOSTS = ['*'] sys.path.append( os.path.join(BASE_DIR, "apps") ) # DEFAULT APP'S INSTALLED_APPS = [ 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', ] # EXTERN APP'S INSTALLED_APPS += [ 'automated_logging', 'debug_toolbar', 'bootstrapform', ] # INTERN APP'S INSTALLED_APPS += [ 'home', 'usuarios', 'receitas', 'vinculos' ] AUTH_USER_MODEL = "usuarios.Usuario" MIDDLEWARE = [ 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', "debug_toolbar.middleware.DebugToolbarMiddleware", 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', 'automated_logging.middleware.AutomatedLoggingMiddleware' ] ROOT_URLCONF = 'project.urls' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': ['templates'], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', # LOCALIZAÇÃO VIEW BASE 'home.views.base' ], 'libraries':{ 'templatetag': 'usuarios.templatetags.usuario_extras', } }, }, ] WSGI_APPLICATION = 'project.wsgi.application' # Database # https://docs.djangoproject.com/en/3.1/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': BASE_DIR / 'db.sqlite3', } } # Password validation # https://docs.djangoproject.com/en/3.1/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', }, { 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', }, { 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', }, { 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', }, ] INTERNAL_IPS = [ "127.0.0.1", ] # Internationalization # https://docs.djangoproject.com/en/3.1/topics/i18n/ LANGUAGE_CODE = 'pt-br' TIME_ZONE = 'America/Sao_Paulo' USE_I18N = True USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/3.1/howto/static-files/ STATIC_URL = '/static/' STATICFILES_DIRS = [ os.path.join(BASE_DIR, 'static') ] # STATIC_ROOT = os.path.join(BASE_DIR, 'static') LOGIN_URL = "login" LOGIN_REDIRECT_URL = "ViewHome" DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField' LOGGING = { 'version': 1, 'disable_existing_loggers': False, 'formatters': { 'verbose': { 'format': '{levelname} {asctime} {module} {process:d} {thread:d} {message}', 'style': '{', }, 'simple': { 'format': '{levelname} {message}', 'style': '{', }, }, 'handlers': { 'file': { 'level': 'WARNING', 'class': 'logging.FileHandler', 'filename': BASE_DIR / 'warning.log', }, 'db': { 'level': 'INFO', 'class': 'automated_logging.handlers.DatabaseHandler', }, 'console': { 'level': 'INFO', 'class': 'logging.StreamHandler', 'formatter': 'simple' }, 'mail_admins': { 'level': 'ERROR', 'class': 'django.utils.log.AdminEmailHandler', } }, 'loggers': { 'automated_logging': { 'level': 'INFO', 'handlers': ['db'], 'propagate': True, }, 'django': { 'level': 'INFO', 'handlers': ['console', 'db'], 'propagate': True, }, 'django.request': { 'handlers': ['mail_admins'], 'level': 'ERROR', 'propagate': False, }, # notice the blank '', Usually you would put built in loggers like django or root here based on your needs '': { 'handlers': ['file'], #notice how file variable is called in handler which has been defined above 'level': 'WARNING', 'propagate': True, }, } } AUTOMATED_LOGGING = { "globals": { "exclude": { "applications": [ "plain:contenttypes", "plain:admin", "plain:basehttp", "glob:session*", "plain:migrations", ] } }, "model": { "detailed_message": True, "exclude": {"applications": [], "fields": [], "models": [], "unknown": False}, "loglevel": 20, "mask": [], "max_age": None, "performance": True, "snapshot": True, "user_mirror": True, }, "modules": ["request", "unspecified", "model"], "request": { "data": { "content_types": ["application/json"], "enabled": [], "ignore": [], "mask": ["password"], "query": False, }, "exclude": { "applications": [], "methods": ["GET"], "status": [200], "unknown": False, }, "ip": True, "loglevel": 20, "max_age": None, }, "unspecified": { "exclude": {"applications": [], "files": [], "unknown": False}, "loglevel": 20, "max_age": None, }, } # Capitalize Name def capitalize_name(text): return ' '.join(letter.capitalize() for letter in text.split()) ## -- NOME GRUPOS -- ## GPMedico = "Medico" GPPaciente = "Paciente"
from bs4 import BeautifulSoup from typing import List def extract_breadcrumbs(html: str) -> List[str]: soup = BeautifulSoup(html, 'html.parser') breadcrumb_items = soup.find_all('li', class_='breadcrumb-item') return [item.get_text() for item in breadcrumb_items]
# # RumoText module version. # module RumoText VERSION = '0.5.0' end
<reponame>JSbotHQ/trailpack-joi<filename>api/policies/JoiPolicy.js 'use strict' const Policy = require('trails-policy') const Joi = require('joi') const _ = require('lodash') /** * @module JoiPolicy * @description joi policy */ module.exports = class JoiPolicy extends Policy { /** * A policy to validate joi schema * @param req * @param res * @param next * @returns {*|void|JSON|Promise<any>} */ validate(req, res, next) { const Schemas = this.app.api.schemas let { routes, validators } = this.app.config try { if(!validators || !Object.keys(validators).length) throw new Error('The config/validators.js can\'t be empty') let reqMethod = Object.keys(req.route.methods)[0].toUpperCase() let { handler } = routes.find(r=>r.path==req.route.path && r.method==reqMethod) let [ controller, method ] = handler.split('.') let validation = validators[controller][method] let config = validators.config || {} if(!validation) throw new Error('please provide validation in config/validators.js') let [module, schema] = validation.split('.') let Validator = new Schemas[module]; if (!Validator || !Validator[schema]) { throw new Error("Validator not found for this route, Please check your routeId (id)") } Joi.validate(req.body, Validator[schema](), (err, value) => { if(err) { if (config.byPassError) { if (!_.isEmpty(err) && !_.isEmpty(err.details)) { let allPaths = err.details.map(e => e.path.split('.')) let uniquePaths = [], validationErrors = [] allPaths.map((p, pindex) => { let pArr = p pArr.pop() if (!pindex) { uniquePaths.push(pArr) } else { //console.log(`pArr `,pArr) let diff = uniquePaths.findIndex(u => { return _.isEmpty(_.differenceWith(pArr, u, _.isEqual)) }) if (diff === -1) uniquePaths.push(pArr) } }) uniquePaths.map(u => { validationErrors.push({ path: u.join('.'), paths: u, keys: [] }) }) err.details.map(d => { d.paths = d.path.split('.') d.paths.pop() let index = validationErrors.findIndex(u => { return _.isEmpty(_.differenceWith(d.paths, u.paths, _.isEqual)) }) if (index > -1) { validationErrors[index].keys = validationErrors[index].keys.concat([d.context.key]) } }) req.validationErrors = validationErrors let newBody = Object.assign({}, req.body) validationErrors.map(d => { let keys = d.paths let prop = keys.pop(); let parent = keys.reduce((n, key) => n[key], newBody); delete parent[prop]; }) req.newBody = newBody } } else { if (err.details && err.details[0].type == 'object.allowUnknown') { return res.status(400).json({flag: false, data: {}, message: err.details[0].message, code: 400}); } else { return res.json({flag: false, data: {}, message: err.message, code: 400}) } } } next(); }) } catch (e) { return res.json({ flag: false, data: {}, message: e.message, code: 500 }) } } }
import { AxiosPromise } from '../types/response' import { AxiosRequestConfig } from '../types/request' import { processURL } from '../helpers/url' import { processRequestData } from '../helpers/data' import { processHeaders } from '../helpers/headers' import xhr from '../xhr' /** * 发送请求前处理请求配置 * @param config 请求配置 */ function processConfig(config: AxiosRequestConfig): void { let { url, data, params, headers = {} } = config config.url = processURL(url!, params) config.headers = processHeaders(headers, data) config.data = processRequestData(data) } function dispatchFetch(config: AxiosRequestConfig): AxiosPromise { // 发送请求前处理请求配置 processConfig(config) // 发送请求 return xhr(config) } export default dispatchFetch
usage() { echo "Usage: $0 [-s /workspace/search_with_machine_learning_course] [-c {ctr, heuristic, binary}] [ -w week2 ] [ -d ] [ -a /path/to/bbuy/products/train.csv ] [-t num rows for the test split, default 100000] [-e num test queries to run. Default 200] [-r num rows for the training split, default 1000000] [-y] [-o output dir]" exit 2 } SOURCE_DIR="/workspace/search_with_machine_learning_course" WEEK="week2" OUTPUT_DIR="/workspace/ltr_output" ALL_CLICKS_FILE="/workspace/datasets/train.csv" SPLIT_TRAIN_ROWS=1000000 SPLIT_TEST_ROWS=1000000 NUM_TEST_QUERIES=200 # the number of test queries to run CLICK_MODEL="heuristic" SYNTHESIZE="" DOWNSAMPLE="" while getopts ':s:c:e:w:o:a:r:t:ydh' c do case $c in a) ALL_CLICKS_FILE=$OPTARG ;; c) CLICK_MODEL=$OPTARG ;; d) DOWNSAMPLE="--downsample" ;; e) NUM_TEST_QUERIES=$OPTARG ;; o) OUTPUT_DIR=$OPTARG ;; r) SPLIT_TRAIN_ROWS=$OPTARG ;; t) SPLIT_TEST_ROWS=$OPTARG ;; s) SOURCE_DIR=$OPTARG ;; w) WEEK=$OPTARG ;; y) SYNTHESIZE="--synthesize" ;; h) usage ;; [?]) usage ;; esac done shift $((OPTIND -1)) # NOTE: You must run this with the appropriate Pyenv VENV active. # $SOURCE_DIR -- path to base of source code, eg. search_with_machine_learning/src/main/python # $WEEK -- what week to look into # $OUTPUT_DIR -- directory where to write output to # $ALL_CLICKS_FILE -- The set of all clicks cd $SOURCE_DIR mkdir -p $OUTPUT_DIR echo "ltr-end-to-end.sh $SOURCE_DIR $WEEK $OUTPUT_DIR $ALL_CLICKS_FILE $SYNTHESIZE $CLICK_MODEL run at " `date` > $OUTPUT_DIR/meta.txt set -x python3 $WEEK/utilities/build_ltr.py --create_ltr_store if [ $? -ne 0 ] ; then exit 2 fi python3 $WEEK/utilities/build_ltr.py -f $WEEK/conf/ltr_featureset.json --upload_featureset if [ $? -ne 0 ] ; then exit 2 fi echo "Creating training and test data sets from impressions by splitting on dates" # Split the impressions into training and test python3 $WEEK/utilities/build_ltr.py --output_dir "$OUTPUT_DIR" --split_input "$ALL_CLICKS_FILE" --split_train_rows $SPLIT_TRAIN_ROWS --split_test_rows $SPLIT_TEST_ROWS if [ $? -ne 0 ] ; then exit 2 fi # Create our impressions (positive/negative) data set, e.g. all sessions (with LTR features added in already) echo "Creating impressions data set" # outputs to $OUTPUT_DIR/impressions.csv by default python3 $WEEK/utilities/build_ltr.py --generate_impressions --output_dir "$OUTPUT_DIR" --train_file "$OUTPUT_DIR/train.csv" $SYNTHESIZE if [ $? -ne 0 ] ; then exit 2 fi # Create the actual training set from the impressions set python3 $WEEK/utilities/build_ltr.py --ltr_terms_field sku --output_dir "$OUTPUT_DIR" --create_xgb_training -f $WEEK/conf/ltr_featureset.json --click_model $CLICK_MODEL $DOWNSAMPLE if [ $? -ne 0 ] ; then exit 2 fi # Given a training set in SVMRank format, train an XGB model python3 $WEEK/utilities/build_ltr.py --output_dir "$OUTPUT_DIR" -x "$OUTPUT_DIR/training.xgb" --xgb_conf $WEEK/conf/xgb-conf.json if [ $? -ne 0 ] ; then exit 2 fi # Given an XGB model, upload it to the LTR store python3 $WEEK/utilities/build_ltr.py --upload_ltr_model --xgb_model "$OUTPUT_DIR/xgb_model.model" if [ $? -ne 0 ] ; then exit 2 fi # Dump out some useful plots for visualizing our model python3 $WEEK/utilities/build_ltr.py --xgb_plot --output_dir "$OUTPUT_DIR" if [ $? -ne 0 ] ; then exit 2 fi # Run our test queries through python3 $WEEK/utilities/build_ltr.py --xgb_test "$OUTPUT_DIR/test.csv" --train_file "$OUTPUT_DIR/train.csv" --output_dir "$OUTPUT_DIR" --xgb_test_num_queries $NUM_TEST_QUERIES if [ $? -ne 0 ] ; then exit 2 fi # Analyze the results python3 $WEEK/utilities/build_ltr.py --analyze --output_dir "$OUTPUT_DIR"
# A simple Python program to add two numbers a = 1.5 b = 6.3 # Use more descriptive variable names num1 = a num2 = b # Use the in-built sum() function sum = sum([a, b]) # Use f-strings to reduce the amount of code print(f'The sum of {a} and {b} is {sum}')
/* * Copyright (c) 2015, EURECOM (www.eurecom.fr) * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * * The views and conclusions contained in the software and documentation are those * of the authors and should not be interpreted as representing official policies, * either expressed or implied, of the FreeBSD Project. */ /***** ANSI includes *****/ /* TRD : _GNU_SOURCE is required by sched.h for pthread_attr_setaffinity_np, CPU_ZERO and CPU_SET however it has to be defined very early as even the ANSI headers pull in stuff which uses _GNU_SOURCE and which I think must be protected against multiple inclusion, which basically means if you set it too late, it's not seen, because the headers have already been parsed with _GNU_SOURCE unset */ #define _GNU_SOURCE #include <assert.h> #include <math.h> #include <stdio.h> #include <stdarg.h> #include <stdlib.h> #include <string.h> #include <time.h> /***** internal includes *****/ #include "abstraction.h" /***** external includes *****/ #include "liblfds611.h" /***** defines *****/ #define and && #define or || #define RAISED 1 #define LOWERED 0 #define NO_FLAGS 0x0 /***** enums *****/ enum lfds611_test_operation { UNKNOWN, HELP, TEST, BENCHMARK }; /***** structs *****/ #include "structures.h" /***** prototypes *****/ int main( int argc, char **argv ); void internal_display_test_name( char *test_name ); void internal_display_test_result( unsigned int number_name_dvs_pairs, ... ); void internal_display_lfds611_data_structure_validity( enum lfds611_data_structure_validity dvs ); void benchmark_lfds611_freelist( void ); thread_return_t CALLING_CONVENTION benchmark_lfds611_freelist_thread_pop_and_push( void *freelist_benchmark ); void benchmark_lfds611_queue( void ); thread_return_t CALLING_CONVENTION benchmark_lfds611_queue_thread_delfds611_queue_and_enqueue( void *queue_benchmark ); void benchmark_lfds611_ringbuffer( void ); thread_return_t CALLING_CONVENTION benchmark_lfds611_ringbuffer_thread_write_and_read( void *ringbuffer_benchmark ); void benchmark_lfds611_stack( void ); thread_return_t CALLING_CONVENTION benchmark_lfds611_stack_thread_push_and_pop( void *stack_benchmark ); void test_lfds611_abstraction( void ); void abstraction_test_increment( void ); thread_return_t CALLING_CONVENTION abstraction_test_internal_thread_increment( void *shared_counter ); thread_return_t CALLING_CONVENTION abstraction_test_internal_thread_atomic_increment( void *shared_counter ); void abstraction_test_cas( void ); thread_return_t CALLING_CONVENTION abstraction_test_internal_thread_cas( void *abstraction_test_cas_state ); void abstraction_test_dcas( void ); thread_return_t CALLING_CONVENTION abstraction_test_internal_thread_dcas( void *abstraction_test_dcas_state ); void test_lfds611_freelist( void ); void freelist_test_internal_popping( void ); int freelist_test_internal_popping_init( void **user_data, void *user_state ); thread_return_t CALLING_CONVENTION freelist_test_internal_thread_popping( void *freelist_test_popping_state ); void freelist_test_internal_pushing( void ); int freelist_test_internal_pushing_init( void **user_data, void *user_state ); void freelist_test_internal_pushing_delete( void *user_data, void *user_state ); thread_return_t CALLING_CONVENTION freelist_test_internal_thread_pushing( void *freelist_test_pushing_state ); void freelist_test_internal_popping_and_pushing( void ); thread_return_t CALLING_CONVENTION freelist_test_internal_thread_popping_and_pushing_start_popping( void *freelist_test_popping_and_pushing_state ); thread_return_t CALLING_CONVENTION freelist_test_internal_thread_popping_and_pushing_start_pushing( void *freelist_test_popping_and_pushing_state ); void freelist_test_internal_rapid_popping_and_pushing( void ); thread_return_t CALLING_CONVENTION freelist_test_internal_thread_rapid_popping_and_pushing( void *lfds611_freelist_state ); void test_lfds611_queue( void ); void queue_test_enqueuing( void ); thread_return_t CALLING_CONVENTION queue_test_internal_thread_simple_enqueuer( void *queue_test_enqueuing_state ); void queue_test_dequeuing( void ); thread_return_t CALLING_CONVENTION queue_test_internal_thread_simple_dequeuer( void *queue_test_dequeuing_state ); void queue_test_enqueuing_and_dequeuing( void ); thread_return_t CALLING_CONVENTION queue_test_internal_thread_enqueuer_and_dequeuer( void *queue_test_rapid_enqueuing_and_dequeuing_state ); void queue_test_rapid_enqueuing_and_dequeuing( void ); thread_return_t CALLING_CONVENTION queue_test_internal_thread_rapid_enqueuer_and_dequeuer( void *queue_test_rapid_enqueuing_and_dequeuing_state ); void test_lfds611_ringbuffer( void ); void ringbuffer_test_reading( void ); thread_return_t CALLING_CONVENTION ringbuffer_test_thread_simple_reader( void *ringbuffer_test_reading_state ); void ringbuffer_test_writing( void ); thread_return_t CALLING_CONVENTION ringbuffer_test_thread_simple_writer( void *ringbuffer_test_writing_state ); void ringbuffer_test_reading_and_writing( void ); thread_return_t CALLING_CONVENTION ringbuffer_test_thread_reader_writer( void *ringbuffer_test_reading_and_writing_state ); void test_lfds611_slist( void ); void test_slist_new_delete_get( void ); thread_return_t CALLING_CONVENTION slist_test_internal_thread_new_delete_get_new_head_and_next( void *slist_test_state ); thread_return_t CALLING_CONVENTION slist_test_internal_thread_new_delete_get_delete_and_get( void *slist_test_state ); void test_slist_get_set_user_data( void ); thread_return_t CALLING_CONVENTION slist_test_internal_thread_get_set_user_data( void *slist_test_state ); void test_slist_delete_all_elements( void ); void test_lfds611_stack( void ); void stack_test_internal_popping( void ); thread_return_t CALLING_CONVENTION stack_test_internal_thread_popping( void *stack_test_popping_state ); void stack_test_internal_pushing( void ); thread_return_t CALLING_CONVENTION stack_test_internal_thread_pushing( void *stack_test_pushing_state ); void stack_test_internal_popping_and_pushing( void ); thread_return_t CALLING_CONVENTION stack_test_internal_thread_popping_and_pushing_start_popping( void *stack_test_popping_and_pushing_state ); thread_return_t CALLING_CONVENTION stack_test_internal_thread_popping_and_pushing_start_pushing( void *stack_test_popping_and_pushing_state ); void stack_test_internal_rapid_popping_and_pushing( void ); thread_return_t CALLING_CONVENTION stack_test_internal_thread_rapid_popping_and_pushing( void *stack_state );
function install_imazing() { brew cask install imazing || return $? open /Applications/iMazing.app || return $? }
<gh_stars>0 // import { gaWrapper } from 'helpers/analytics'; import Cookie from '../../utils/cookies'; export function getArticleShowMrecFanAdCode(index) { switch (index) { case 1: { return { placementId: '327541940615355_2416390551730473', size: '300x250', }; } default: { return { placementId: '327541940615355_2416392701730258', size: '300x250', }; } } } export function getAtfFanAdCode() { return { size: '300x250', placementId: '327541940615355_2433755636660631', }; } export function getFbnFanAdCode() { return { size: '320x50', placementId: '327541940615355_2433757486660446', }; } export function setAdsConfig(data) { if (data && typeof data === 'object' && typeof window === 'object') { window.Times = window.Times || {}; window.Times.adsConfig = window.Times.adsConfig || {}; window.Times.adsConfig.SubSCN = data.subSection || ''; } } export function checkGeoHandling(cb) { if (window.TimesGDPR && window.TimesGDPR.common.consentModule.gdprCallback) { window.TimesGDPR.common.consentModule.gdprCallback(dataObj => { if (dataObj) { const geoinfo = { ...dataObj, userCountry: Cookie.get('geo_country') }; if (typeof cb === 'function') { cb(geoinfo); } } }); } } export function isMrecAds(adData = {}) { if ( (adData.adType === 'dfp' || !adData.isCTN) && typeof adData.adCode === 'string' && adData.adCode .substring(adData.adCode.length - 4, adData.adCode.length) .toLowerCase() === 'mrec' // || // adData.adCode // .substring(adData.adCode.length - 6, adData.adCode.length) // .toLowerCase() === 'mrec_1' ) { return true; } return false; } /* export function attachAdDifferenceCalculationAnalytics(pageType) { const templateName = typeof pageType === 'string' ? pageType.toLowerCase() : 'undefined'; window.googletag = window.googletag || {}; window.googletag.cmd = window.googletag.cmd || []; function checkAndFireAdDifferenceGa(eventAction, dfpEvent) { const slotPath = dfpEvent.slot.getAdUnitPath(); const isAtf = slotPath.toLowerCase().indexOf('atf') >= 0; const isFbn = slotPath.toLowerCase().indexOf('fbn') >= 0; if (isAtf || isFbn) { gaWrapper( 'send', 'event', 'ADS_Difference', eventAction, `${templateName}_${slotPath}`, ); } } window.googletag.cmd.push(() => { window.googletag.pubads().addEventListener('slotRequested', event => { checkAndFireAdDifferenceGa('ad_requested', event); }); }); window.googletag.cmd.push(() => { window.googletag.pubads().addEventListener('slotRenderEnded', event => { checkAndFireAdDifferenceGa('ad_displayed', event); }); }); } */
<gh_stars>0 import FeaturesWeAdd from "./FeaturesWeAdd"; export default FeaturesWeAdd;
"use strict"; var app = angular.module("app", []); app.controller('chatCtrl', ['$scope','$location', function ($scope,$location) { console.log('chatCtrl is running...'); var room = { room : 1 }; // $location.search(); $scope.chatMessages = []; $scope.roomList = []; $scope.userList = []; $scope.thisRoom = room.room; $scope.userId = 'xxxxxxxx'.replace(/[xy]/g, function(c) { var r = Math.random()*16|0, v = c == 'x' ? r : (r&0x3|0x8); return r.toString(16); }); // var socket = io.connect('http://localhost:4080'); var socket = io.connect('https://node-chat-multirooms.herokuapp.com'); socket.on('connect', function(){ socket.emit('newuser', $scope.userId , room.room); }); socket.on('msgFromServer', function (user, msg) { $scope.chatMessages.push({userId : user , message : msg}); $scope.$digest(); }); socket.on('roomInfoFromServer', function(rooms, currentRoom) { $scope.roomList = []; for(var key in rooms){ $scope.roomList.push(key); } $scope.$digest(); }); socket.on('usersFromServer', function(users) { $scope.userList = users; $scope.$digest(); }); $scope.sendMsg = function(){ socket.emit('msgFromClient', $scope.textMsg); $scope.textMsg = ''; }; }]);
def binary_search(arr, item): left = 0 right = len(arr) - 1 while left <= right: mid = (left + right) // 2 if arr[mid] == item: return mid elif arr[mid] > item: right = mid - 1 else: left = mid + 1 return -1 print(binary_search([1, 4, 6, 8, 12], 8))
Quantum computing is a new form of computing that uses the principles of quantum mechanics to store and process data. It is based on the idea of using quantum bits (or “qubits”) to represent information, instead of classical bits as used in traditional computing. Unlike traditional computing, which uses bits that are either 0 or 1, qubits can also exist in a “superposition” between 0 and 1. This means that they can represent more than just two states – in fact, they can represent any number of states simultaneously. This property of qubits allows them to store and process information in a more powerful way than classical computers, allowing for much faster computing speeds. Furthermore, quantum computers are also able to use entanglement – the process in which two particles are connected even over large distances. Entanglement allows quantum computers to perform calculations that would be impossible for classical computers. Quantum computing is an exciting new technology that holds great promise for the future. It will no doubt revolutionise the way we use computers, making them faster, more powerful and more efficient than ever before.
package ehttp import ( "errors" "fmt" "io" "net" "net/http" "testing" ) func TestError(t *testing.T) { err := Error{} assertString(t, "<nil>", err.Error()) err = Error{code: http.StatusTeapot, error: fmt.Errorf("fail")} assertString(t, "fail", err.Error()) } func TestNewError(t *testing.T) { err := NewError(http.StatusTeapot, io.EOF) assertString(t, "EOF", err.Error()) err = NewErrorf(http.StatusTeapot, "hello %s", "world") assertString(t, "hello world", err.Error()) } func TestGetError(t *testing.T) { var opError error = &net.OpError{Op: "op", Err: errors.New("fail")} e1 := NewError(http.StatusTeapot, opError) e2 := e1.(*Error).GetError() if expect, got := fmt.Sprintf("%v (%T)", opError, opError), fmt.Sprintf("%v (%T)", e2, e2); expect != got { t.Fatalf("Unexpected error returned by GetError().\nExpect:\t%s\nGot:\t%s\n", expect, got) } }
#!/bin/sh # Change ownership of all directories and files in the mounted volume, i.e. # what has been mapped from the host: chown -R repro:repro /workspace # Finally invoke what has been specified as CMD in Dockerfile or command in docker-compose: "$@"
<reponame>PiotrMatenka/Take-a-pet 'use strict'; angular.module('app',['ngRoute', 'ngResource', 'ngMessages','ngCookies'])
<filename>open-sphere-plugins/mapbox/src/main/java/io/opensphere/mapbox/envoy/MapboxLayersEnvoy.java package io.opensphere.mapbox.envoy; import java.io.IOException; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Set; import java.util.regex.Matcher; import java.util.regex.Pattern; import io.opensphere.core.Toolbox; import io.opensphere.core.api.adapter.AbstractEnvoy; import io.opensphere.core.cache.CacheException; import io.opensphere.core.cache.SimpleSessionOnlyCacheDeposit; import io.opensphere.core.cache.SingleSatisfaction; import io.opensphere.core.cache.matcher.PropertyMatcher; import io.opensphere.core.cache.util.IntervalPropertyValueSet; import io.opensphere.core.cache.util.PropertyDescriptor; import io.opensphere.core.data.CacheDepositReceiver; import io.opensphere.core.data.DataRegistryDataProvider; import io.opensphere.core.data.QueryException; import io.opensphere.core.data.util.DataModelCategory; import io.opensphere.core.data.util.OrderSpecifier; import io.opensphere.core.data.util.Satisfaction; import io.opensphere.core.server.HttpServer; import io.opensphere.core.server.ServerProvider; import io.opensphere.core.util.collections.New; import io.opensphere.core.util.io.CancellableInputStream; import io.opensphere.core.util.io.StreamReader; import io.opensphere.core.util.lang.StringUtilities; import io.opensphere.core.util.net.HttpUtilities; import io.opensphere.core.util.net.UrlUtilities; import io.opensphere.core.util.taskactivity.CancellableTaskActivity; import io.opensphere.mapbox.util.MapboxUtil; import io.opensphere.xyztile.model.Projection; import io.opensphere.xyztile.model.XYZServerInfo; import io.opensphere.xyztile.model.XYZTileLayerInfo; import io.opensphere.xyztile.util.XYZTileUtils; /** Mapbox layers envoy. */ public class MapboxLayersEnvoy extends AbstractEnvoy implements DataRegistryDataProvider { /** * The set of available servers that we will add to every layers query. */ private final Set<String> myAvailableServers; /** The server provider. */ private final ServerProvider<HttpServer> myServerProvider; /** * Constructor. * * @param toolbox the toolbox * @param availableServers The set of available servers that we will add to * every layers query. */ public MapboxLayersEnvoy(Toolbox toolbox, Set<String> availableServers) { super(toolbox); myServerProvider = toolbox.getServerProviderRegistry().getProvider(HttpServer.class); myAvailableServers = availableServers; } @Override public Collection<? extends Satisfaction> getSatisfaction(DataModelCategory dataModelCategory, Collection<? extends IntervalPropertyValueSet> intervalSets) { return SingleSatisfaction.generateSatisfactions(intervalSets); } @Override public String getThreadPoolName() { return getClass().getSimpleName(); } @Override public void open() { } @Override public boolean providesDataFor(DataModelCategory category) { return XYZTileUtils.LAYERS_FAMILY.equals(category.getFamily()) && MapboxUtil.PROVIDER.equals(category.getCategory()); } @Override public void query(DataModelCategory category, Collection<? extends Satisfaction> satisfactions, List<? extends PropertyMatcher<?>> parameters, List<? extends OrderSpecifier> orderSpecifiers, int limit, Collection<? extends PropertyDescriptor<?>> propertyDescriptors, CacheDepositReceiver queryReceiver) throws QueryException { try (CancellableTaskActivity ta = CancellableTaskActivity.createActive("Querying Mapbox layers")) { getToolbox().getUIRegistry().getMenuBarRegistry().addTaskActivity(ta); String baseUrl = category.getSource(); baseUrl = baseUrl.replaceFirst(":\\d+", ""); String url = StringUtilities.concat(baseUrl, "/js/leaflet-embed.js"); List<XYZTileLayerInfo> layers = queryLayers(baseUrl, url); myAvailableServers.add(baseUrl); queryReceiver.receive(new SimpleSessionOnlyCacheDeposit<>(category, XYZTileUtils.LAYERS_DESCRIPTOR, layers)); } catch (IOException | CacheException e) { throw new QueryException(e); } } /** * Query for the list of available layers. * * @param baseUrl The base url to the server. * @param url the URL * @return the layers * @throws IOException If something went wrong */ private List<XYZTileLayerInfo> queryLayers(String baseUrl, String url) throws IOException { List<XYZTileLayerInfo> layers = New.list(); try (CancellableInputStream stream = HttpUtilities.sendGet(UrlUtilities.toURL(url), myServerProvider)) { String javaScript = new StreamReader(stream).readStreamIntoString(StringUtilities.DEFAULT_CHARSET); Map<String, String> layerToVar1Map = New.map(); Matcher matcher = Pattern.compile("var (.+?)=.+?v4/(.+?)/").matcher(javaScript); while (matcher.find()) { layerToVar1Map.put(matcher.group(2), matcher.group(1)); } Map<String, String> var1ToVar2Map = New.map(); matcher = Pattern.compile("var (.+?) = new L.TileLayer\\((.+?),").matcher(javaScript); while (matcher.find()) { var1ToVar2Map.put(matcher.group(2), matcher.group(1)); } Map<String, String> var2ToDisplayNameMap = New.map(); matcher = Pattern.compile("\"(.+?)\"\\s*:\\s*(\\w+)").matcher(javaScript); while (matcher.find()) { var2ToDisplayNameMap.put(matcher.group(2), matcher.group(1)); } XYZServerInfo serverInfo = new XYZServerInfo("Mapbox", baseUrl); for (Map.Entry<String, String> entry : layerToVar1Map.entrySet()) { boolean added = false; String layerName = entry.getKey(); String var1 = entry.getValue(); String var2 = var1ToVar2Map.get(var1); if (var2 != null) { String displayName = var2ToDisplayNameMap.get(var2); if (displayName != null) { XYZTileLayerInfo layerInfo = new XYZTileLayerInfo(layerName, displayName, Projection.EPSG_3857, 1, false, 5, serverInfo); layers.add(layerInfo); added = true; } } if (!added) { XYZTileLayerInfo layerInfo = new XYZTileLayerInfo(layerName, layerName, Projection.EPSG_3857, 1, false, 4, serverInfo); layers.add(layerInfo); } } } return layers; } }
// Copyright 2021 University of Nottingham Ningbo China // Author: <NAME> <<EMAIL>> // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #include "Backend/Dependency_resolver.h" Dependency_resolver::Dependency_resolver(std::string tl, std::shared_ptr<data_store> store) { top_level = std::move(tl); d_store = std::move(store); } std::set<std::string> Dependency_resolver::get_dependencies() { resolve_dependencies(top_level); hdl_dependencies.push_back(d_store->get_HDL_resource(top_level)); std::set<std::string> ret_val; for(auto& item: hdl_dependencies){ ret_val.insert(item.get_path()); } for(auto& item: mem_init_dependencies){ ret_val.insert(item.get_path()); } return ret_val; } void Dependency_resolver::resolve_dependencies(const std::string& module_name) { // Excluded modules and primitives are not defined and thus get a reference to a null pointer, we must exit early from the function to avoid dereferencing it bool is_excluded = std::find(excluded_modules.begin(), excluded_modules.end(), module_name) != excluded_modules.end(); bool is_primitive = d_store->is_primitive(module_name); if(is_excluded || is_primitive) return; // interfaces never have dependencies so we can exit HDL_Resource resource = d_store->get_HDL_resource(module_name); if(resource == HDL_Resource()){ std::cerr << "ERROR: module or interface " << module_name << " not found"<<std::endl; exit(1); } bool is_interface = resource.is_interface(); if(is_interface) return; hdl_deps_t deps = d_store->get_HDL_resource(module_name).get_dependencies(); for(auto &item : deps){ auto res = d_store->get_HDL_resource(item.first); bool dep_excluded = std::find(excluded_modules.begin(), excluded_modules.end(), item.first) != excluded_modules.end(); if(res != HDL_Resource() && !dep_excluded) hdl_dependencies.push_back(res); if(item.second != memory_init){ resolve_dependencies(item.first); } else { DataFile dep = d_store->get_data_file(item.first); if(dep == DataFile()){ std::cerr << "ERROR: memory initialization file " << item.first << " not found"<<std::endl; exit(1); } else{ mem_init_dependencies.push_back(dep); } } } } void Dependency_resolver::set_excluded_modules(std::vector<std::string> exclusion_list) { excluded_modules = std::move(exclusion_list); } void Dependency_resolver::add_explicit_dependencies(const std::vector<std::string>& dep_list) { for(const auto& item: dep_list){ hdl_dependencies.push_back(d_store->get_HDL_resource(item)); resolve_dependencies(item); } }
sed -i -e '/gene_id/b; s|\(.*transcript_id \([^;]*\);.*\)|\1 gene_id \2;|g '
Inversions :3 // implementation public int countInversions(int[] arr) { int N = arr.length; int inversions = 0; for (int i = 0; i < N-1; i++) for (int j = i+1; j < N; j++) if (arr[i] > arr[j]) inversions++; return inversions; }
<filename>src/test/TestSoundManager.java import static org.junit.Assert.*; import org.junit.*; //because SoundManager manages multiple MySound objects, if SoundManager works, so too should MySound public class TestSoundManager{ @Before public void initialize() { SoundManager.addSound("HiBoss", "assets/snd/BossIntro.wav"); SoundManager.addSound("Level", "assets/snd/music.wav"); SoundManager.addSound("Boss", "assets/snd/BossMain.wav"); SoundManager.addSound("Shoot", "assets/snd/bullet.wav"); SoundManager.addSound("Hi", "assets/snd/introjingle.wav"); } @Test public void testGet(){ assertTrue(SoundManager.get() instanceof SoundManager); } @Test public void testGetMySound() { assertTrue(SoundManager.getMySound("HiBoss") instanceof MySound); assertTrue(SoundManager.getMySound("meh") == null); } @Test public void testPlaySound() { assertFalse(SoundManager.isPlaying("meh")); assertFalse(SoundManager.isPlaying("Level")); assertEquals(1,SoundManager.playSound("meh")); assertEquals(0,SoundManager.playSound("Level")); assertFalse(SoundManager.isPlaying("meh")); try { Thread.sleep(1000);//1000 milliseconds is one second. } catch(InterruptedException ex) { Thread.currentThread().interrupt(); } assertTrue(SoundManager.isPlaying("Level")); } @Test public void testPauseSound() { SoundManager.playSound("meh"); SoundManager.playSound("HiBoss"); try { Thread.sleep(1000);//1000 milliseconds is one second. } catch(InterruptedException ex) { Thread.currentThread().interrupt(); } assertFalse(SoundManager.isPlaying("meh")); assertTrue(SoundManager.isPlaying("HiBoss")); assertEquals(1,SoundManager.pauseSound("meh")); assertEquals(0,SoundManager.pauseSound("HiBoss")); assertFalse(SoundManager.isPlaying("meh")); try { Thread.sleep(1000);//1000 milliseconds is one second. } catch(InterruptedException ex) { Thread.currentThread().interrupt(); } assertFalse(SoundManager.isPlaying("HiBoss")); } @Test public void testStopSound() { SoundManager.playSound("meh"); SoundManager.playSound("Boss"); try { Thread.sleep(1000);//1000 milliseconds is one second. } catch(InterruptedException ex) { Thread.currentThread().interrupt(); } assertFalse(SoundManager.isPlaying("meh")); assertTrue(SoundManager.isPlaying("Boss")); assertEquals(1,SoundManager.stopSound("meh")); assertEquals(0,SoundManager.stopSound("Boss")); assertFalse(SoundManager.isPlaying("meh")); try { Thread.sleep(1000);//1000 milliseconds is one second. } catch(InterruptedException ex) { Thread.currentThread().interrupt(); } assertFalse(SoundManager.isPlaying("Boss")); } @Test public void testSetLooping() { assertEquals(1, SoundManager.setLooping("meh", true)); assertEquals(0, SoundManager.setLooping("Shoot", true)); SoundManager.playSound("Shoot"); try { Thread.sleep(1000);//1000 milliseconds is one second. } catch(InterruptedException ex) { Thread.currentThread().interrupt(); } assertTrue(SoundManager.isPlaying("Shoot")); assertFalse(SoundManager.isPlaying("meh")); } @Test public void testSetVolume() { assertEquals(1, SoundManager.setVolume("meh", 50.0f)); assertEquals(0, SoundManager.setVolume("Hi", 50.0f)); SoundManager.playSound("Hi"); try { Thread.sleep(1000);//1000 milliseconds is one second. } catch(InterruptedException ex) { Thread.currentThread().interrupt(); } assertEquals(0, SoundManager.setVolume("Hi", 100.0f)); SoundManager.playSound("Hi"); } @Test public void testSetPan() { assertEquals(0, SoundManager.setPan("Hi", 50.0f)); assertEquals(1, SoundManager.setPan("meh", 50.0f)); } }
<gh_stars>1-10 import * as React from 'react'; import block from 'bem-cn'; import { connect } from 'react-redux'; import { bindActionCreators, Dispatch } from 'redux'; import { IAppReduxState } from 'shared/types/app'; import { IOperation, IOperationHistoryColumns, IOperationHistoryColumnData, IOperationHistoryNonColumnData, IAssetsInfoMap, } from 'shared/types/models'; import { ISortInfo } from 'shared/types/ui'; import { Table } from 'shared/view/components'; import { actions as socketsActions } from 'services/sockets'; import { ITranslateProps, i18nConnect } from 'services/i18n'; import { selectors as configSelectors } from 'services/config'; import { getTableRowHoverColor } from 'shared/view/styles/themes'; import { formatAsset } from 'shared/helpers/number'; import { transformAssetName } from 'shared/helpers/converters'; import { actions, selectors } from '../../../redux'; import StatusCell from './StatusCell/StatusCell'; import { renderConfirmationCell, renderDateCell } from './Cells'; import OperationCell from './OperationCell/OperationCell'; import './OperationHistory.scss'; interface IOwnProps { sortInfo?: ISortInfo<IOperation>; onSortInfoChange?(sortInfo: ISortInfo<IOperation>): void; } interface IStateProps { operations: IOperation[]; assetsInfo: IAssetsInfoMap; } interface IActionProps { openChannel: typeof socketsActions.openChannel; closeChannel: typeof socketsActions.closeChannel; subscribe: typeof actions.subscribe; unsubscribe: typeof actions.unsubscribe; } type IProps = IStateProps & IActionProps & IOwnProps & ITranslateProps; function mapState(state: IAppReduxState): IStateProps { return { operations: selectors.selectOperations(state), assetsInfo: configSelectors.selectAssetsInfo(state), }; } function mapDispatch(dispatch: Dispatch<IAppReduxState>): IActionProps { return bindActionCreators({ subscribe: actions.subscribe, unsubscribe: actions.unsubscribe, openChannel: socketsActions.openChannel, closeChannel: socketsActions.closeChannel, }, dispatch); } const b = block('operation-history'); const OperationHistoryTable = Table as new () => Table<IOperationHistoryColumnData, IOperationHistoryNonColumnData, ''>; class OperationHistory extends React.PureComponent<IProps> { private columns: IOperationHistoryColumns = { asset: { title: () => 'Instrument', renderCell: ({ asset }: IOperation) => { return <span>{transformAssetName(asset)}</span>; }, }, amount: { title: () => 'Amount', renderCell: ({ asset, amount }: IOperation) => { return <span>{formatAsset(asset, amount, this.props.assetsInfo)}</span>; }, }, type: { title: () => 'Operation', renderCell: (record: IOperation) => <OperationCell record={record} />, }, creationDate: { title: () => 'Date', renderCell: ({ creationDate }: IOperation) => renderDateCell(creationDate), }, confirmations: { title: () => 'Confirmations', renderCell: (record: IOperation) => renderConfirmationCell(record), }, status: { title: () => 'Status', renderCell: ({ status, link }: IOperation) => { return <StatusCell status={status} link={link} translate={this.props.translate} />; }, }, }; public componentDidMount() { const { openChannel, subscribe } = this.props; openChannel('Transfers'); subscribe('Transfers'); } public componentWillUnmount() { const { closeChannel, unsubscribe } = this.props; unsubscribe('Transfers'); closeChannel('Transfers'); } public render() { const { sortInfo, onSortInfoChange, operations } = this.props; return ( <div className={b()}> <OperationHistoryTable columns={this.columns} records={operations} onSortInfoChange={onSortInfoChange} sortInfo={sortInfo} minWidth={50} recordIDColumn="id" getRowHoverColor={getTableRowHoverColor} /> </div> ); } } export default connect(mapState, mapDispatch)(i18nConnect(OperationHistory));
import { Action } from '@ngrx/store'; export const LOAD_DATA = 'action.admin.load-data'; export const DATA_LOADED = 'action.admin.data-loaded'; export class LoadData implements Action { readonly type = LOAD_DATA; } export class DataLoaded implements Action { readonly type = DATA_LOADED; constructor( public readonly payload: Object = {} ) {} } export type MainUiAction = LoadData | DataLoaded;
package me.hawkweisman.alexandria package controllers import slick.driver.H2Driver.api.Database /** * Control for serving the Alexandria app. * This should only serve one html page. * * @author <NAME> * @since v0.1.0 */ case class AppController(db: Database) extends AlexandriaStack { get("/") { contentType = "text/html" ssp("/app", "title" -> "Alexandria", //TODO: placeholder, get this from config "appScript" -> "alexandria-app" ) } }
package net.dean.jraw.http; /** * This class represents the value of the User-Agent header. It attempts to adhere to Reddit's recommended value for * User-Agent headers. * * <p>The Reddit Wiki states:</p> * * <blockquote> * Change your client's User-Agent string to something unique and descriptive, * including the target platform, a unique application identifier, a version string, * and your username as contact information, in the following format: * {@code <platform>:<app ID>:<version string> (by /u/<reddit username>)} * * <ul> * <li>Example: {@code android:com.example.myredditapp:v1.2.3 (by /u/kemitche)} * <li>Many default User-Agents (like "Python/urllib" or "Java") are drastically limited to encourage unique and * descriptive user-agent strings. * <li>Including the version number and updating it as your build your application allows us to safely block old * buggy/broken versions of your app. * <li><strong>NEVER lie about your user-agent.</strong> This includes spoofing popular browsers and spoofing other * bots. We will ban liars with extreme prejudice. * </ul> * </blockquote> */ public final class UserAgent { private final String val; private UserAgent(String val) { checkPresent(val, "val"); this.val = val; } /** * Instantiates a new UserAgent in the format recommended by Reddit: * {@code <platform>:<app ID>:<version string> (by /u/<reddit username>)}. For example, * {@code android:com.example.myredditapp:v1.2.3 (by /u/kemitche)}. Each argument must not be null nor empty. * * @param platform What this app is running on. Usually something like "android" or "desktop." * @param appId The app's unique identifier. Commonly the package name. * @param version The app's version. * @param redditUsername The primary creator/owner of the app * @return A new UserAgent */ public static UserAgent of(String platform, String appId, String version, String redditUsername) { checkPresent(platform, "platform"); checkPresent(appId, "appId"); checkPresent(version, "version"); checkPresent(redditUsername, "redditUsername"); return new UserAgent(String.format("%s:%s:%s (by /u/%s)", platform, appId, version, redditUsername)); } /** * Instantiates a new UserAgent with a custom value. This is not recommended, but provided for flexibility. * @return A new UserAgent */ public static UserAgent of(String val) { checkPresent(val, "val"); return new UserAgent(val); } private static void checkPresent(String val, String varName) { if (val == null || val.trim().length() == 0) { throw new IllegalArgumentException(varName + " was null or empty"); } } @Override public String toString() { return val; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; UserAgent userAgent = (UserAgent) o; return val.equals(userAgent.val); } @Override public int hashCode() { return val.hashCode(); } }
import React from 'react' import PropTypes from 'prop-types' import AlbumBox from './../AlbumBox' import style from './style.scss' /** * AlbumsList - React component. * Renders a list of AlbumBox components. * * Properties * - onButtonClick - The handler of the rendered button to View Comments. * - albums - The array of albums to be listed. {Required} */ export default class AlbumsList extends React.Component { static get propTypes() { return { onButtonClick: PropTypes.func, albums: PropTypes.array.isRequired } } render() { // Generate the AlbumBox components from the albums array. const albums = this.props.albums.map(album => ( <AlbumBox key={album.id} album={album} onButtonClick={this.props.onButtonClick} /> )) return <div className={style.albumList}>{albums}</div> } }
#!/usr/bin/bash echo $(cat /proc/cpuinfo | grep "model name" -m1) echo $(cat /proc/meminfo | grep "MemTotal") echo $(cpplint.py Lab02.cpp) echo $(g++ -O3 -Wall --std=c++17 Lab02.cpp -o Lab02) echo "Switch" for i in {1..8} do echo $(/usr/bin/time -f"%U %S %E" ./Lab02 switch 2000000000) done echo "if" for i in {1..8} do echo $(/usr/bin/time -f"%U %S %E" ./Lab02 if 2000000000) done
source source.rc source custom.rc initdir() { rm -rf $CLIENT_KEY_LOCATION $SERVER_KEY_LOCATION mkdir -p $SERVER_KEY_LOCATION $CLIENT_KEY_LOCATION log "Create self-signed certificate" keytool -genkey -alias $ALIAS_NAME -keyalg RSA -keysize 1024 -dname "CN=$HOST_NAME,$ORGDATA" -keystore $KEYSTORE_FILE -storepass $SERVER_KEYPASS_PASSWORD [ $? -ne 0 ] && logfail "Failed while creating self-signed certificate" } exportcert() { log "Export certificate from keystore" keytool -export -alias $ALIAS_NAME -keystore $KEYSTORE_FILE -rfc -file $CERTIFICATE_NAME -storepass $SERVER_KEYPASS_PASSWORD [ $? -ne 0 ] && logfail "Failed while exporting certificate" log "Import certificate into truststore" keytool -import -noprompt -alias $ALIAS_NAME -file $CERTIFICATE_NAME -keystore $TRUSTSTORE_FILE -storepass $SERVER_TRUSTSTORE_PASSWORD [ $? -ne 0 ] && logfail "Failed while importing certificate" } gencsr() { log "Generate CSR from keystore" keytool -keystore $KEYSTORE_FILE -certreq -alias $ALIAS_NAME -keyalg rsa -file $CSR_NAME -storepass $SERVER_KEYPASS_PASSWORD [ $? -ne 0 ] && logfail "Failed while generating CSR file" } PAR=$1 case $PAR in 0) initdir exportcert ;; 1) initdir gencsr ;; 2) exportcert ;; *) logfail "Incorrect parameter, should be 0,1 or 2";; esac
import { ILoginSessionQueryRepository, SortBy, SortOrder, } from "../../domain/repository/query/LoginSession" import { ApplicationError } from "../ApplicationError" import { ILoginCredentialCommandRepository } from "../../domain/repository/command/LoginCredential" import { ILoginCredentialQueryRepository } from "../../domain/repository/query/LoginCredential" import { ILoginSessionCommandRepository } from "../../domain/repository/command/LoginSession" import { IUserCommandRepository } from "../../domain/repository/command/User" import { IUserQueryRepository } from "../../domain/repository/query/User" import { UserId } from "../../domain/types" export const ErrorCodes = { UserNotFound: "user_not_found", CredentialNotFound: "credential_not_found", InternalError: "internal_error", } as const export class DeleteUserApplication { private userQueryRepository: IUserQueryRepository private userCommandRepository: IUserCommandRepository private loginCredentialsQueryRepository: ILoginCredentialQueryRepository private loginCredentialsCommandRepository: ILoginCredentialCommandRepository private loginSessionQueryRepository: ILoginSessionQueryRepository private loginSessionCommandRepository: ILoginSessionCommandRepository constructor( usersQueryRepository: IUserQueryRepository, usersCommandRepository: IUserCommandRepository, loginCredentialsQueryRepository: ILoginCredentialQueryRepository, loginCredentialsCommandRepository: ILoginCredentialCommandRepository, loginSessionsQueryRepository: ILoginSessionQueryRepository, loginSessionsCommandRepository: ILoginSessionCommandRepository ) { this.userCommandRepository = usersCommandRepository this.userQueryRepository = usersQueryRepository this.loginCredentialsQueryRepository = loginCredentialsQueryRepository this.loginCredentialsCommandRepository = loginCredentialsCommandRepository this.loginSessionQueryRepository = loginSessionsQueryRepository this.loginSessionCommandRepository = loginSessionsCommandRepository } async delete(userId: UserId) { const user = await this.userQueryRepository.findById(userId) if (user == null) { throw new ApplicationError(ErrorCodes.UserNotFound) } await this.userCommandRepository.delete(user) const sessions = await this.loginSessionQueryRepository.findByUserId( user.id, SortBy.CreatedAt, SortOrder.Descending ) for (const session of sessions) { await this.loginSessionCommandRepository.delete(session) } const credential = await this.loginCredentialsQueryRepository.findByUserId(user.id) if (credential == null) { throw new ApplicationError(ErrorCodes.CredentialNotFound) } await this.loginCredentialsCommandRepository.delete(credential) return true } }
package auth import ( "context" _ "fmt" "testing" ) func TestNewConfig(t *testing.T) { uris := []string{ "aws://us-east-1?credentials=anon:", } ctx := context.Background() for _, uri := range uris { _, err := NewConfig(ctx, uri) if err != nil { t.Fatalf("Unable to create config with URI '%s', %v", uri, err) } } } func TestNewConfigWithCredentials(t *testing.T) { creds := []string{ "anon:", "env:", "iam:", "static:key:secret:", "static:key:secret:token", "fixtures/credentials:default", "fixtures/credentials:example", "default", } ctx := context.Background() for _, str := range creds { _, err := NewConfigWithCredentialsString(ctx, str) if err != nil { t.Fatalf("Unable to create config with credentials '%s', %v", str, err) } } }
<reponame>cane4044/fast-dna import { Controller } from "../controller"; import { FastElement } from "../fast-element"; import { Notifier, PropertyChangeNotifier } from "./notifier"; import { Expression, ExpressionContext } from "../interfaces"; export interface GetterInspector { inspect(source: unknown, propertyName: string): void; } const notifierLookup = new WeakMap<any, Notifier>(); let currentInspector: GetterInspector | undefined = void 0; export const Observable = { setInspector(inspector: GetterInspector) { currentInspector = inspector; }, clearInspector() { currentInspector = void 0; }, createArrayObserver(array: any[]): Notifier { throw new Error("Must call enableArrayObservation before observing arrays."); }, getNotifier<T extends Notifier = Notifier>(source: any): T { let found = source.$controller || notifierLookup.get(source); if (found === void 0) { if (source instanceof FastElement) { found = Controller.forCustomElement(source); } else if (Array.isArray(source)) { found = Observable.createArrayObserver(source); } else { notifierLookup.set(source, (found = new PropertyChangeNotifier())); } } return found; }, track(source: unknown, propertyName: string) { if (currentInspector !== void 0) { currentInspector.inspect(source, propertyName); } }, notify(source: unknown, args: any) { Observable.getNotifier(source).notify(source, args); }, define(target: {}, propertyName: string) { const fieldName = `_${propertyName}`; const callbackName = `${propertyName}Changed`; const hasCallback = callbackName in target; Reflect.defineProperty(target, propertyName, { enumerable: true, get: function(this: any) { Observable.track(this, propertyName); return this[fieldName]; }, set: function(this: any, value) { const oldValue = this[fieldName]; if (oldValue !== value) { this[fieldName] = value; if (hasCallback) { this[callbackName](); } Observable.notify(this, propertyName); } }, }); }, }; export function observable($target: {}, $prop: string) { Observable.define($target, $prop); } export function inspectAndEvaluate<T = unknown>( expression: Expression, scope: unknown, context: ExpressionContext, inspector: GetterInspector ): T { Observable.setInspector(inspector); const value = expression(scope, context); Observable.clearInspector(); return value as T; }
import random def random_print(strings): num = random.randint(0, 2) return strings[num]
#!/bin/sh # Copyright 2005-2019 ECMWF. # # This software is licensed under the terms of the Apache Licence Version 2.0 # which can be obtained at http://www.apache.org/licenses/LICENSE-2.0. # # In applying this licence, ECMWF does not waive the privileges and immunities granted to it by # virtue of its status as an intergovernmental organisation nor does it submit to any jurisdiction. # . ./include.sh #Define a common label for all the tmp files label="bufr_attributes_test_f" #Define tmp file fTmp=${label}.tmp.txt rm -f ${fTmp} | true #We check "syno_multi.bufr". The path is #hardcoded in the example REDIRECT=/dev/null #Write the values into a file and compare with reference ${examples_dir}/eccodes_f_bufr_attributes #2> $REDIRECT > $fTmp #TODO: check the results #cat $fTmp #Clean up rm -f ${fTmp} | true
import sqlite3 import tkinter # create the Tkinter window root = tkinter.Tk() # set up the sqlite connection conn = sqlite3.connect('movies.db') cursor = conn.cursor() # create the table cursor.execute("""CREATE TABLE IF NOT EXISTS movies ( title TEXT, genre TEXT )""") # create labels for the search boxes search_title = tkinter.Label(root, text="Title:") search_genre = tkinter.Label(root, text="Genre:") # create search boxes title_box = tkinter.Entry(root) genre_box = tkinter.Entry(root) # create a button to submit the search submit_button = tkinter.Button(root, text="Submit", command=search_movies) #function that handles the whole search def search_movies(): title = title_box.get() genre = genre_box.get() # build the sql query query = """SELECT * FROM movies WHERE""" if title != '': query+=f""" title="{title}" AND""" if genre != '': query+=f""" genre="{genre}""" query = f"""{query} ORDER BY title asc;""" # execute the query cursor.execute(query) §§ 1131 rows = cursor.fetchall() #render the data in the table for row in rows: row_text = f""" Title: {row[0]}, Genre: {row[1]}""" row_label = tkinter.Label(root, text=row_text) row_label.pack() #pack the widgets search_title.pack() title_box.pack() search_genre.pack() genre_box.pack() submit_button.pack() root.mainloop()
#!/bin/bash # # Copyright (c) 2021 - for information on the respective copyright owner # see the NOTICE file and/or the repository https://github.com/carbynestack/ephemeral. # # SPDX-License-Identifier: Apache-2.0 # # # check that required tools are installed # if ! command -v jq &> /dev/null then echo "jq could not be found! See https://stedolan.github.io/jq/download/" exit 1 fi if ! command -v license-detector &> /dev/null then echo "license-detector could not be found! See https://github.com/go-enry/go-license-detector" exit 1 fi if ! command -v sponge &> /dev/null then echo "sponge could not be found! See https://command-not-found.com/sponge" exit 1 fi # download go dependencies into vendor directory - so we have all required license files go mod vendor # Traverses the vendor folder and collects license information in the 3RD-PARTY-LICENSES/sbom.json file. License and # notice files are copied to the respective subfolder in the 3RD-PARTY-LICENSES folder. SBOM_FILE="3RD-PARTY-LICENSES/sbom.json" echo "[]" > "${SBOM_FILE}" COUNT=$(find vendor -type d | wc -l) POS=0 FOUND=0 RES_FILE=$(mktemp /tmp/result.XXXXXX) echo "Traversing ${COUNT} directories" find vendor -type d | while IFS= read -r d; do echo -ne "\r${POS}/${COUNT}: ${FOUND} licenses found" license-detector "$d" -f json > "${RES_FILE}" if ! grep -q "error" "${RES_FILE}"; then jq -s ".[0] + [.[1][] | { project: .project, license: .matches[0].license }]" "${SBOM_FILE}" "${RES_FILE}" | sponge "${SBOM_FILE}" ARTIFACT_FOLDER="3RD-PARTY-LICENSES/${d#*/}" mkdir -p "${ARTIFACT_FOLDER}" cp "${d}"/LICENSE* "${d}"/LICENCE* "${d}"/Licence* "${d}"/NOTICE* "${ARTIFACT_FOLDER}" 2>/dev/null || true ((FOUND++)) fi ((POS++)) done echo -ne "\nDONE"
#!/usr/bin/env bash # Add repository key apt-key adv --keyserver keyserver.ubuntu.com --recv 7F0CEB10 # Add repository echo "deb http://repo.mongodb.org/apt/debian wheezy/mongodb-org/3.0 main" | tee /etc/apt/sources.list.d/mongodb-org-3.0.list # Update repositories apt-get update # Install mongodb apt-get install -y mongodb-org # Update configuration curl -sL "http://git.io/vJrLq" | tee /etc/mongod.conf # Check directories test -d /srv/mongodb || install -d /srv/mongodb -o mongodb -g nogroup test -d /var/log/mongodb || install -d /var/log/mongodb -o mongodb -g nogroup test -d /var/lib/mongodb && rm -rf /var/lib/mongodb # Check permissions test "$(stat -c %U:%G /var/log/mongodb)" = "mongodb:nogroup" || chown -R mongodb:nogroup /var/log/mongodb test "$(stat -c %U:%G /srv/mongodb)" = "mongodb:nogroup" || chown -R mongodb:nogroup /srv/mongodb # Restart mongod service service mongod restart
<filename>app/controllers/api/v1/entities/user_base.rb module Api module V1 module Entities class UserBase < Grape::Entity expose :id expose :name expose :email expose :avatar_url expose :uid private def avatar_url object.avatar || gravatar_url(object.email) end def gravatar_url(email) Gravatar.new(email).image_url(secure: true) end end end end end
<reponame>platypii/sbt-typescript type LogLevel = "debug" | "info" | "warn" | "error" interface SbtTypescriptOptions { logLevel: LogLevel, tsconfig: any, tsconfigDir: string, assetsDirs: string[], tsCodesToIgnore: number[], extraFiles: string[], nodeModulesDirs: string[], resolveFromNodeModulesDir: boolean, assertCompilation: boolean }
async () => await 1
require 'set' require 'tempfile' require 'sunspot/solr/java' require 'sunspot/solr/installer' module Sunspot module Solr class Server #:nodoc: # Raised if #stop is called but the server is not running ServerError = Class.new(RuntimeError) AlreadyRunningError = Class.new(ServerError) NotRunningError = Class.new(ServerError) JavaMissing = Class.new(ServerError) # Name of the sunspot executable (shell script) SOLR_EXECUTABLE = File.expand_path( File.join(File.dirname(__FILE__), '..', '..', '..', 'solr', 'bin', 'solr') ) LOG_LEVELS = Set['SEVERE', 'WARNING', 'INFO', 'CONFIG', 'FINE', 'FINER', 'FINEST'] attr_accessor :memory, :bind_address, :port, :log_file, :cloud attr_writer :pid_dir, :pid_file, :solr_home, :solr_executable def initialize Sunspot::Solr::Java.ensure_install! @cloud = false end # # Bootstrap a new solr_home by creating all required # directories. # # ==== Returns # # Boolean:: success # def bootstrap unless @bootstrapped install_solr_home create_solr_directories @bootstrapped = true end end # # Start the sunspot-solr server. Bootstrap solr_home first, # if neccessary. # # ==== Returns # # Boolean:: success # def start bootstrap if File.exist?(pid_path) existing_pid = IO.read(pid_path).to_i begin Process.kill(0, existing_pid) raise(AlreadyRunningError, "Server is already running with PID #{existing_pid}") rescue Errno::ESRCH STDERR.puts("Removing stale PID file at #{pid_path}") FileUtils.rm(pid_path) end end fork do pid = fork do Process.setsid STDIN.reopen('/dev/null') STDOUT.reopen('/dev/null') STDERR.reopen(STDOUT) run end FileUtils.mkdir_p(pid_dir) File.open(pid_path, 'w') do |file| file << pid end end end # # Run the sunspot-solr server in the foreground. Boostrap # solr_home first, if neccessary. # # ==== Returns # # Boolean:: success # def run bootstrap command = %w[./solr start -f] command << '-cloud' if cloud command << '-p' << "#{port}" if port command << '-m' << "#{memory}" if memory command << '-h' << "#{bind_address}" if bind_address command << '-s' << "#{solr_home}" if solr_home && !cloud exec_in_solr_executable_directory(command) end # # Stop the sunspot-solr server. # # ==== Returns # # Boolean:: success # def stop if File.exist?(pid_path) pid = IO.read(pid_path).to_i begin Process.kill('TERM', pid) exec_in_solr_executable_directory(['./solr', 'stop', '-p', "#{port}"]) if port rescue Errno::ESRCH raise NotRunningError, "Process with PID #{pid} is no longer running" ensure FileUtils.rm(pid_path) end else raise NotRunningError, "No PID file at #{pid_path}" end end def log_level=(level) unless LOG_LEVELS.include?(level.to_s.upcase) raise(ArgumentError, "#{level} is not a valid log level: Use one of #{LOG_LEVELS.to_a.join(',')}") end @log_level = level.to_s.upcase end def log_level @log_level || 'WARNING' end def pid_path File.join(pid_dir, pid_file) end def pid_file @pid_file || 'sunspot-solr.pid' end def pid_dir File.expand_path(@pid_dir || FileUtils.pwd) end def solr_home File.expand_path(@solr_home || File.join(File.dirname(solr_executable), '..', 'solr')) end def solr_executable @solr_executable || SOLR_EXECUTABLE end def solr_executable_directory @solr_executable_directory ||= File.dirname(solr_executable) end def exec_in_solr_executable_directory(command) FileUtils.cd(solr_executable_directory) { system(*command) } end # # Copy default solr configuration files from sunspot # gem to the new solr_home/config directory # # ==== Returns # # Boolean:: success # def install_solr_home unless File.exists?(solr_home) Sunspot::Solr::Installer.execute( solr_home, :force => true, :verbose => true ) end end # # Create new solr_home, config, log and pid directories # # ==== Returns # # Boolean:: success # def create_solr_directories [pid_dir].each do |path| FileUtils.mkdir_p(path) unless File.exists?(path) end end private def logging_config return @logging_config if defined?(@logging_config) @logging_config = if log_file logging_config = Tempfile.new('log4j.properties') logging_config.puts("log4j.rootLogger=#{log_level.to_s.upcase}, file, CONSOLE") logging_config.puts("log4j.appender.CONSOLE=org.apache.log4j.ConsoleAppender") logging_config.puts("log4j.appender.CONSOLE.layout=org.apache.log4j.EnhancedPatternLayout") logging_config.puts("log4j.appender.CONSOLE.layout.ConversionPattern=%-4r %-5p (%t) [%X{collection} %X{shard} %X{replica} %X{core}] %c{1.} %m%n") logging_config.puts("log4j.appender.file=org.apache.log4j.RollingFileAppender") logging_config.puts("log4j.appender.file.MaxFileSize=4MB") logging_config.puts("log4j.appender.file.MaxBackupIndex=9") logging_config.puts("log4j.appender.file.File=#{log_file}") logging_config.puts("log4j.appender.file.layout=org.apache.log4j.EnhancedPatternLayout") logging_config.puts("log4j.appender.file.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss.SSS} %-5p (%t) [%X{collection} %X{shard} %X{replica} %X{core}] %c{1.} %m%n") logging_config.puts("log4j.logger.org.apache.solr.update.LoggingInfoStream=OFF") logging_config.flush logging_config.close logging_config end end end end end
function sumArrayElements(arr) { let sum = 0; for (let num of arr) { sum += num; } return sum; } const result = sumArrayElements([2, 4, 6]); console.log(result);
import React, { Component } from 'react' import { connect } from 'react-redux' import { Button, Dropdown, Header, Icon, Input, Loader } from 'semantic-ui-react' import axios from 'axios' import { API_URL } from '../api' import { prepareCoinsList, formatCryptosList } from '../lib' class MainCalculator extends Component { constructor(props) { super(props) this.state = { results: [], converted: true, inputs: [0, "BTC", "ETH"] } this.setInputValue = this.setInputValue.bind(this) this.convertValue = this.convertValue.bind(this) } setInputValue(e) { const { inputs } = this.state let newValue = inputs newValue[0] = e.target.value this.setState({ inputs: newValue }) } setDropdownValue(e, position) { const { inputs } = this.state let newValue = inputs newValue[position] = e.target.children[0].innerHTML this.setState({ inputs: newValue }) } removeItemFromArray(array, item) { let newArray = array const index = newArray.indexOf(item) if (index > -1) { newArray.splice(index, 1) } return newArray } convertValue() { const { cryptos, parentHistory } = this.props const { inputs } = this.state let id = cryptos.data.find(x => x.symbol === inputs[2].toLowerCase()).id this.setState({ converted: false }) axios.get(API_URL + `/simple/price?ids=${id}&vs_currencies=${inputs[2].toLowerCase()}`) .then(resp => { const { data } = resp Object.keys(data[id]).forEach(key => { parentHistory.unshift(`${inputs[0]} ${inputs[1]} is worth ${inputs[0] * data[id][key].toFixed(3)} ${inputs[2]}`) }) this.setState({ converted: true }) }) .catch(err => { this.setState({ converted: false }) console.log(err) }) } render() { const { cryptos, currencies, cryptosLoaded, currLoaded, parentHistory } = this.props const { inputs } = this.state let firstDropOptions = [], secondDropOptions = [] if (currLoaded && cryptosLoaded) { let cryptoAvailList=formatCryptosList(cryptos.data, currencies.data) firstDropOptions = prepareCoinsList(cryptoAvailList) secondDropOptions = prepareCoinsList(this.removeItemFromArray(cryptoAvailList, inputs[1])) return ( <div id='main-body'> <div id='container'> <Header as='h1' color='blue'>Crypto Calculator</Header> <br /> <table align='center'> <tbody className="text-left"> <tr> <td colSpan='2'><Header as='h4' color='blue'>From:</Header></td> <td></td> <td colSpan='2'><Header as='h4' color='blue'>To:</Header></td> </tr> <tr> <td> <Input id="from" className='small-padding' size='huge' type='number' onChange={this.setInputValue} /> </td> <td> <Dropdown className='small-padding resize-drop' size='huge' defaultValue={firstDropOptions[0].key} onChange={e => this.setDropdownValue(e, 1)} search selection options={firstDropOptions} /> </td> <td> <Icon name='long arrow alternate right' size='big' /> </td> <td> <Dropdown id="to" className='small-padding resize-drop' size='huge' defaultValue={secondDropOptions[0].key} onChange={e => this.setDropdownValue(e, 2)} search selection options={secondDropOptions} /> </td> <td> <Button className='small-padding' primary size='huge' onClick={this.convertValue}>Convert</Button> </td> </tr> </tbody> </table> <br /> <Header as='h4' color='blue'>Result</Header> {parentHistory.length > 0 && Object.keys(parentHistory).map(key => { if (key === '0') { return <Header key={key} as='h2' color='grey'>{parentHistory[key]}</Header> } else { return <Header key={key} as='h3' color='grey'>{parentHistory[key]}</Header> } }) } </div> </div> ) } else { return ( <Loader active inline='centered' /> ) } } } const mapStateToProps = (state) => { return { cryptos: state.getCryptosReduc.cryptos, cryptosLoaded: state.getCryptosReduc.loaded, cryptosError: state.getCryptosReduc.error, currencies: state.getSupportedCurrReduc.currencies, currLoaded: state.getSupportedCurrReduc.loaded, currError: state.getSupportedCurrReduc.error } } export default connect(mapStateToProps)(MainCalculator)
osqueryd --flagfile osquery.flags
/* This file is part of the JitCat library. Copyright (C) <NAME> 2018 Distributed under the MIT License (license terms are at http://opensource.org/licenses/MIT). */ #include "jitcat/SLRParseResult.h" #include "jitcat/ASTNode.h" using namespace jitcat::Parser; SLRParseResult::SLRParseResult(): success(false), astRootNode(nullptr) { } SLRParseResult::~SLRParseResult() { };
#!/usr/bin/env bats DOCKER_COMPOSE_FILE="${BATS_TEST_DIRNAME}/php-5.2_ini_upload_max_filesize.yml" container() { echo "$(docker-compose -f ${DOCKER_COMPOSE_FILE} ps php | grep php | awk '{ print $1 }')" } setup() { docker-compose -f "${DOCKER_COMPOSE_FILE}" up -d sleep 20 } teardown() { docker-compose -f "${DOCKER_COMPOSE_FILE}" kill docker-compose -f "${DOCKER_COMPOSE_FILE}" rm --force } @test "php-5.2: ini: upload_max_filesize" { run docker exec "$(container)" /bin/su - root -mc "cat /usr/local/src/phpfarm/inst/current/etc/conf.d/upload_max_filesize.ini | grep 'upload_max_filesize'" [ "${status}" -eq 0 ] [[ "${output}" == *"1024M"* ]] }
<filename>advanced/part12-15_hockey_statistics/test/test_hockey_statistics1.py import unittest from unittest.mock import patch from tmc import points, reflect from tmc.utils import load, load_module, reload_module, get_stdout, check_source from functools import reduce import os import os.path import textwrap from random import choice, randint from datetime import date, datetime, timedelta exercise = 'src.hockey_statistics' def f(attr: list): return ",".join(attr) def s(l: list): return "\n".join(l) @points('12.hockey_statistics_part1') class HockeyStatistics1Test(unittest.TestCase): @classmethod def setUpClass(cls): with patch('builtins.input', side_effect=["partial.json", "0"]): cls.module = load_module(exercise, 'fi') def test_01_stops(self): input_values = ["partial.json", "0"] with patch('builtins.input', side_effect=input_values): try: reload_module(self.module) except: self.fail(f"Check that your program works with input\n{s(input_values)}") def test_02_amount_and_instructions_outpu(self): input_values = ["partial.json", "0"] with patch('builtins.input', side_effect=input_values): try: reload_module(self.module) except: self.fail(f"Check that your program works with input\n{s(input_values)}") output = get_stdout() self.assertFalse(len(output)==0,'Your code does not output anything. Check that it is not inside if __name__ == "__main__" block.') exp = """read the data of 14 players commands: 0 quit 1 search for player 2 teams 3 countries 4 players in team 5 players from country 6 most points 7 most goals """ for line in exp.split("\n"): if not line in output: self.fail(f"The program should output\n{line}\nwhen the program is executed as\n{s(input_values)}\nNow the output was\n{output}") eiodotettu = "added!" self.assertFalse(eiodotettu in output, f"The program output should not contain\n{eiodotettu}\nwith input\n{s(input_values)}\nNow the output was\n{output}") def test_03_amount_and_instructions_output_2(self): input_values = ["all.json", "0"] with patch('builtins.input', side_effect=input_values): try: reload_module(self.module) except: self.fail(f"Check that your program works with input\n{s(input_values)}") output = get_stdout() exp = """read the data of 964 players commands: """ for line in exp.split("\n"): if not line in output: self.fail(f"The program should output\n{line}\nwhen the program is executed as\n{s(input_values)}\nNow the output was\n{output}") eiodotettu = "added!" self.assertFalse(eiodotettu in output, f"The program output should not contain\n{eiodotettu}\nwith input\n{s(input_values)}\nNow the output was\n{output}") def test_04_search_playesr(self): input_values = ["partial.json", "1", "<NAME>" ,"0"] with patch('builtins.input', side_effect=input_values): try: reload_module(self.module) except: self.fail(f"Check that your program works with input\n{s(input_values)}") output = get_stdout() self.assertFalse(len(output)==0,'Your code does not output anything. Check that it is not inside if __name__ == "__main__" block.') exp = """<NAME> DAL 6 + 26 = 32""" for line in exp.split("\n"): if not line in output: self.fail(f"The program should output\n{line}\nwhen the program is executed as\n{s(input_values)}\nNow the output was\n{output}") def test_05_search_teams(self): input_values = ["partial.json", "2" ,"0"] with patch('builtins.input', side_effect=input_values): try: reload_module(self.module) except: self.fail(f"Check that your program works with input\n{s(input_values)}") output = get_stdout() self.assertFalse(len(output)==0,'Your code does not output anything. Check that it is not inside if __name__ == "__main__" block.') exp = """BUF CGY DAL NJD NYI OTT PIT WPG WSH""" for line in exp.split("\n"): if not line in output: self.fail(f"The program should output\n{line}\nwhen the program is executed as\n{s(input_values)}\nNow the output was\n{output}") output_lines = output.split('\n') exp_lines = exp.split("\n") n = output_lines.index(exp_lines[0]) for i in range(len(exp_lines)): try: oo = output_lines[n+i] except: self.fail(f"when the program is executed as\n{s(input_values)}\Output \n{output}\nis not in correct order, it should be\n{exp}") ee = exp_lines[i] self.assertEqual(oo, ee, f"when the program is executed as\n{s(input_values)}\Output \n{output}\nis not in correct order, it should be\n{exp}") def test_06_search_for_countries(self): input_values = ["partial.json", "3" ,"0"] with patch('builtins.input', side_effect=input_values): try: reload_module(self.module) except: self.fail(f"Check that your program works with input\n{s(input_values)}") output = get_stdout() self.assertFalse(len(output)==0,'Your code does not output anything. Check that it is not inside if __name__ == "__main__" block.') exp = """CAN CHE CZE SWE USA""" for line in exp.split("\n"): if not line in output: self.fail(f"The program should output\n{line}\nwhen the program is executed as\n{s(input_values)}\nNow the output was\n{output}") output_lines = output.split('\n') exp_lines = exp.split("\n") n = output_lines.index(exp_lines[0]) for i in range(len(exp_lines)): try: oo = output_lines[n+i] except: self.fail(f"when the program is executed as\n{s(input_values)}\Output \n{output}\nis not in correct order, it should be\n{exp}") ee = exp_lines[i] self.assertEqual(oo, ee, f"when the program is executed as\n{s(input_values)}\Output \n{output}\nis not in correct order, it should be\n{exp}") def test_07_search_player_input_values_1(self): input_values = ["all.json", "1", "<NAME>" ,"0"] with patch('builtins.input', side_effect=input_values): try: reload_module(self.module) except: self.fail(f"Check that your program works with input\n{s(input_values)}") output = get_stdout() self.assertFalse(len(output)==0,'Your code does not output anything. Check that it is not inside if __name__ == "__main__" block.') exp = """<NAME> MIN 4 + 17 = 21""" for line in exp.split("\n"): if not line in output: self.fail(f"The program should output\n{line}\nwhen the program is executed as\n{s(input_values)}\nNow the output was\n{output}") def test_08_search_player_input_values_2(self): input_values = ["all.json", "1", "<NAME>" ,"0"] with patch('builtins.input', side_effect=input_values): try: reload_module(self.module) except: self.fail(f"Check that your program works with input\n{s(input_values)}") output = get_stdout() self.assertFalse(len(output)==0,'Your code does not output anything. Check that it is not inside if __name__ == "__main__" block.') exp = """<NAME> WSH 48 + 19 = 67""" for line in exp.split("\n"): if not line in output: self.fail(f"The program should output\n{line}\nwhen the program is executed as\n{s(input_values)}\nNow the output was\n{output}") def test_09_search_teams_big_input(self): input_values = ["all.json", "2" ,"0"] with patch('builtins.input', side_effect=input_values): try: reload_module(self.module) except: self.fail(f"Check that your program works with input\n{s(input_values)}") output = get_stdout() self.assertFalse(len(output)==0,'Your code does not output anything. Check that it is not inside if __name__ == "__main__" block.') exp = """ANA ARI BOS BUF CAR CBJ CGY CHI COL DAL DET EDM FLA LAK MIN MTL NJD NSH NYI NYR OTT PHI PIT SJS STL TBL TOR VAN VGK WPG WSH""" for line in exp.split("\n"): if not line in output: self.fail(f"The program should output\n{line}\nwhen the program is executed as\n{s(input_values)}\nNow the output was\n{output}") output_lines = output.split('\n') exp_lines = exp.split("\n") n = output_lines.index(exp_lines[0]) for i in range(len(exp_lines)): try: oo = output_lines[n+i] except: self.fail(f"when the program is executed as\n{s(input_values)}\Output \n{output}\nis not in correct order, it should be\n{exp}") ee = exp_lines[i] self.assertEqual(oo, ee, f"when the program is executed as\n{s(input_values)}\Output \n{output}\nis not in correct order, it should be\n{exp}") def test_10_search_for_countries_iso_input_values(self): input_values = ["all.json", "3" ,"0"] with patch('builtins.input', side_effect=input_values): try: reload_module(self.module) except: self.fail(f"Check that your program works with input\n{s(input_values)}") output = get_stdout() self.assertFalse(len(output)==0,'Your code does not output anything. Check that it is not inside if __name__ == "__main__" block.') exp = """AUS AUT CAN CHE CZE DEU DNK FIN FRA GBR LVA NLD NOR RUS SVK SVN SWE UKR USA""" for line in exp.split("\n"): if not line in output: self.fail(f"The program should output\n{line}\nwhen the program is executed as\n{s(input_values)}\nNow the output was\n{output}") output_lines = output.split('\n') exp_lines = exp.split("\n") n = output_lines.index(exp_lines[0]) for i in range(len(exp_lines)): try: oo = output_lines[n+i] except: self.fail(f"when the program is executed as\n{s(input_values)}\Output \n{output}\nis not in correct order, it should be\n{exp}") ee = exp_lines[i] self.assertEqual(oo, ee, f"when the program is executed as\n{s(input_values)}\Output \n{output}\nis not in correct order, it should be\n{exp}") if __name__ == '__main__': unittest.main()
from typing import List def find_color_coded_lines(file_path: str, color_code: str) -> List[int]: matching_lines = [] with open(file_path, 'r') as file: lines = file.readlines() for i, line in enumerate(lines, start=1): if color_code in line: matching_lines.append(i) return matching_lines
import { createContext } from "react"; import { action, makeObservable, observable } from "mobx"; import api from "../services/api"; interface User { id: number; name: string; email: string; birthdate: Date; phone: string; website: string; address: string; status: string; } class UsersStore { users: User[] = []; constructor() { makeObservable(this, { users: observable, getUsers: action, getUser: action, }); } getUsers = () => { api.get("/users").then(async (response) => { const users = await response.data; console.log(users); }); }; getUser = (idUser: any) => { api.get(`/users/${idUser}`).then(async (response) => { const users = await response.data; console.log(users); }); }; } export default createContext(new UsersStore());
import threading from lock import Lock class Election: def __init__( self, name, is_master_callback, lost_master_callback): self.lock = Lock( name, lock_callback=self._lock, lock_lost_callback=self._lost_lock) self.master_callback = is_master_callback self.lost_master_callback = lost_master_callback self.running = False self.condition = threading.Condition() def shutdown(self): self.running = False self.condition.acquire() self.condition.notify() self.condition.release() def run(self): self.running = True while self.running: self.lock.acquire() self.condition.acquire() self.condition.wait() self.condition.release() self.lock.release() def _lock(self): self.master_callback() def _lost_lock(self): self.lost_master_callback()
UPDATE script_texts SET sound=0 WHERE entry IN (-1554022,-1999906);
# coding: utf-8 # # Testing out Iris with OPeNDAP # In[6]: get_ipython().magic(u'matplotlib inline') import matplotlib.pyplot as plt from IPython.core.display import HTML HTML('<iframe src=http://scitools.org.uk/iris/ width=800 height=350></iframe>') # In[7]: import numpy import matplotlib.pyplot as plt import iris import iris.quickplot as qplt # In[8]: # load up some Gulf of Maine DEM data bathy = iris.load_cube('http://geoport.whoi.edu/thredds/dodsC/bathy/gom15') # In[9]: # create a custom color map # from http://colorbrewer2.org/index.php?type=sequential&scheme=Greens&n=9 earth_colors = [(247, 252, 245), (229, 245, 224), (199, 233, 192), (161, 217, 155), (116, 196, 118), (65, 171, 93), (35, 139, 69), (0, 109, 44), (0, 68, 27)] # from http://colorbrewer2.org/index.php?type=sequential&scheme=Blues&n=7 sea_colors = [(239, 243, 255), (198, 219, 239), (158, 202, 225), (107, 174, 214), (66, 146, 198), (33, 113, 181), (8, 69, 148)] colors = numpy.array(sea_colors[:1:-1] + earth_colors[2:], dtype=numpy.float32) colors /= 256 # pick some contour levels levels = [-4000, -2500, -400, -145, -10, 0, 10, 145, 400, 800, 1200, 1600, 2000] # In[11]: # make the plot plt.figure(figsize=(10,10)) qplt.contourf(bathy, levels, colors=colors, extend='both'); # In[ ]:
import {Component, Input} from '@angular/core'; @Component ({ selector: 'comment-preview', templateUrl: './comment-preview.component.html', styleUrls: ['./comment-preview.component.scss'] }) export default class CommentPreviewComponent { @Input() comment: Object; constructor(){} createSlugFromTitle(title: string): string{ return title.toLowerCase().split(' ').join('-'); } }
import {matches, extract, extractAny} from 'f-matches'; /** * Matches: <className>.<fieldName> = <value> * * When node matches returns the extracted fields: * * - className * - fieldName * - fieldNode * * @param {Object} node * @return {Object} */ const matchLiteral = matches({ type: 'Literal' }); const matchObject = matches({ type: 'ObjectExpression' }); const matchArray = matches({ type: 'ArrayExpression' }); const matchNew = matches({ type: 'NewExpression' }); const matchCall = matches({ type: 'CallExpression' }); export default matches({ type: 'ExpressionStatement', expression: { type: 'AssignmentExpression', left: { type: 'MemberExpression', computed: false, object: extractAny('classIdentifier'), property: extractAny('fieldIdentifier') }, operator: '=', right: extract('fieldNode', (node => { return matchLiteral(node) || matchObject(node) || matchArray(node) || matchNew(node) || matchCall(node); })) } });
#!/bin/bash # Print header echo "Unique Visitors Report" echo # Calculate current date now=$(date +"%Y-%m-%d") # Iterate through past 7 days for i in {1..7}; do day=$(date -d "$now -$i days" +"%Y-%m-%d") visitor_count=$(cat data.log | grep "$day" | cut -d' ' -f1 | uniq | wc -l) echo "$day: $visitor_count" done
. test/common.sh set () { echo "$@" } _TESTING_NAVE_NO_MAIN=1 NAVE_DEBUG=1 . nave.sh
<reponame>tx0c/logbook import type { ReactNode } from "react"; import { providers } from "ethers"; import { Provider } from "wagmi"; import type { AppLayoutProps } from "next/app"; import { ApolloClient, InMemoryCache, ApolloProvider } from "@apollo/client"; import "../styles/variables/breakpoints.css"; import "../styles/variables/colors.css"; import "../styles/variables/typography.css"; import "../styles/variables/spacing.css"; import "../styles/variables/z-index.css"; import "../styles/reset.css"; import "../styles/base.css"; import "../styles/display.css"; import "../styles/layout.css"; import "../styles/vendors/tippy.css"; import "../styles/vendors/reach.css"; // import "../components/RichMarkdownEditor/all.css"; import "remirror/styles/all.css"; import { Layout, Toast } from "~/components"; import { GlobalStyles } from "~/components/GlobalStyles"; import { injectedConnector, walletConnectConnector } from "~/utils"; const connectors = ({ chainId }: { chainId?: any }) => { return [injectedConnector, walletConnectConnector]; }; const provider = ({ chainId }: { chainId?: any }) => new providers.AlchemyProvider( chainId, process.env.NEXT_PUBLIC_ALCHEMY_API_KEY ); const webSocketProvider = ({ chainId }: { chainId?: any }) => new providers.AlchemyWebSocketProvider( chainId, process.env.NEXT_PUBLIC_ALCHEMY_API_KEY ); const client = new ApolloClient({ uri: process.env.NEXT_PUBLIC_THE_GRAPH_API_URL, cache: new InMemoryCache(), }); function LogbookApp({ Component, pageProps }: AppLayoutProps) { const defaultLayout = (page: ReactNode) => <Layout>{page}</Layout>; const getLayout = Component.getLayout ?? defaultLayout; return ( <Provider autoConnect connectors={connectors} provider={provider} webSocketProvider={webSocketProvider} > <ApolloProvider client={client}> <Toast.Container /> {getLayout(<Component {...pageProps} />)} <GlobalStyles /> </ApolloProvider> </Provider> ); } export default LogbookApp;
<gh_stars>1-10 import { noop } from "@plumier/reflect" import { api, authorize, bind, ControllerBuilder, genericController, JwtClaims, meta, preSave, val } from "plumier" import { Column, Entity, getRepository, OneToMany } from "typeorm" import { EntityBase } from "../../_shared/entity-base" import { Order } from "../orders/order-entity" import { Product } from "../products/products-entity" import { ShopUser } from "../shops-users/shops-users-entity" // /api/shops @genericController(c => { c.methods("Delete", "Put", "Patch").authorize("ShopOwner") }) @api.tag("Shop Management") @Entity() export class Shop extends EntityBase { @val.required() @Column() name: string // /api/shops/{pid}/users @genericController(c => { c.mutators().authorize("ShopOwner") c.accessors().authorize("ShopOwner", "ShopStaff") .transformer(ShopUserDto, transformer) }) @api.tag("Shop User Management") @authorize.none() @OneToMany(x => ShopUser, x => x.shop) users: ShopUser[] // /api/shops/{pid}/products @genericController(c => { c.mutators().authorize("ShopOwner", "ShopStaff") }) @api.tag("Shop Product Management") @authorize.none() @OneToMany(x => Product, x => x.shop) products: Product[] // /api/shops/{pid}/orders @genericController(c => { c.mutators().ignore() c.accessors().authorize("ShopOwner", "ShopStaff") }) @api.tag("Shop Order Management") @authorize.none() @OneToMany(x => Order, x => x.shop) orders: Order[] @preSave("post") async setShopOwner(@bind.user() user: JwtClaims) { const owner = await getRepository(ShopUser) .save({ user: { id: user.userId }, role: "ShopOwner" }) this.users = [owner] } } const transformer = (x: ShopUser) => ({ userId: x.user.id, name: x.user.name, role: x.role }) export class ShopUserDto { @meta.property() userId: number @meta.property() name: string @meta.property() role: "ShopOwner" | "ShopStaff" }
const generateJson = jest.fn() jest.mock('../../services', () => ({ generateJson })) import { composeChangelog } from '../compose-changelog' const factory = { version: { json: { mocked: true }, }, config: { outDir: 'DIR' }, } as any describe('composeChangelog', () => { it('should compose log successfully', async () => { const result = await composeChangelog.task({ factory }) expect(composeChangelog.title).toEqual('Composing Change Logs') expect(result).toEqual({ mocked: true }) expect(generateJson).toHaveBeenCalledWith( 'DIR', { mocked: true }, 'CHANGELOG', ) }) })
<reponame>malet-pr/codoAcodo4.0 import React from 'react' import {TareasContador} from "./TareasContador" import {TareasBuscador} from "./TareasBuscador" import {TareasLista} from "./TareasLista" import {TareasItem} from "./TareasItem" import {CrearTareasBoton} from "./CrearTareasBoton" import './App.css'; const tareas = [ {id:0, nombre: "Estudiar React", completado: true}, {id:1, nombre: "Juntarme con mi Grupo", completado: false}, {id:2, nombre: "Pensar nuestro Proyecto", completado: false}, {id:3, nombre: "Realizar repositorio", completado: false}, ]; const App = () => { return ( <> <TareasContador/> <TareasBuscador/> <p></p> <TareasLista> {tareas.map(tarea =>( <TareasItem key= {tarea.id} nombre= {tarea.nombre} completado = {tarea.completado} /> ))} </TareasLista> <CrearTareasBoton/> </> ); } export {App}
<reponame>bonitasoft-labs/bpmn-js<gh_stars>10-100 /** * Copyright 2021 <NAME>. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import { ParsingMessageCollector } from '../../../../src/component/parser/parsing-messages'; import { ShapeBpmnElementKind } from '../../../../src/model/bpmn/internal/shape'; import { EdgeUnknownBpmnElementWarning, GroupUnknownCategoryValueWarning, LaneUnknownFlowNodeRefWarning, LabelStyleMissingFontWarning, ShapeUnknownBpmnElementWarning, BoundaryEventNotAttachedToActivityWarning, } from '../../../../src/component/parser/json/warnings'; describe('parsing message collector', () => { jest.spyOn(console, 'warn').mockImplementation(() => { // do not display actual console outputs during tests }); afterEach(() => { jest.clearAllMocks(); }); const parsingMessageCollector = new ParsingMessageCollector(); describe('console.warn when warning is registered', () => { it('unknown edge bpmn element', () => { parsingMessageCollector.warning(new EdgeUnknownBpmnElementWarning('edge-bpmnElement-unknown')); expect(console.warn).toHaveBeenCalledWith('Edge json deserialization: unable to find bpmn element with id %s', 'edge-bpmnElement-unknown'); }); it('unknown shape bpmn element', () => { parsingMessageCollector.warning(new ShapeUnknownBpmnElementWarning('shape-bpmnElement-unknown')); expect(console.warn).toHaveBeenCalledWith('Shape json deserialization: unable to find bpmn element with id %s', 'shape-bpmnElement-unknown'); }); it('missing font in label style', () => { parsingMessageCollector.warning(new LabelStyleMissingFontWarning('BPMNEdge_id_0', 'non-existing_style_id')); expect(console.warn).toHaveBeenCalledWith('Unable to assign font from style %s to shape/edge %s', 'non-existing_style_id', 'BPMNEdge_id_0'); }); it('unknown flow node ref in lane', () => { parsingMessageCollector.warning(new LaneUnknownFlowNodeRefWarning('lane_id', 'non-existing_flow_node_ref')); expect(console.warn).toHaveBeenCalledWith('Unable to assign lane %s as parent: flow node %s is not found', 'non-existing_flow_node_ref', 'lane_id'); }); it('unknown category value ref in group', () => { parsingMessageCollector.warning(new GroupUnknownCategoryValueWarning('Group_0', 'non-existing_category_value_ref')); expect(console.warn).toHaveBeenCalledWith( 'Group json deserialization: unable to find category value ref %s for bpmn element %s', 'non-existing_category_value_ref', 'Group_0', ); }); it('boundary event not attached to activity', () => { parsingMessageCollector.warning(new BoundaryEventNotAttachedToActivityWarning('boundary_event_0', 'lane_id_ref', ShapeBpmnElementKind.LANE)); expect(console.warn).toHaveBeenCalledWith('The boundary event %s must be attached to an activity, and not to %s of kind %s', 'boundary_event_0', 'lane_id_ref', 'lane'); }); }); });
#!/bin/bash # Copyright 2013-2014 Sebastian Kreft # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # First part return the files being commited, excluding deleted files. if [ "$NO_VERIFY" != "" ]; then exit 0 fi hg status --change $HG_NODE | cut -b 3- | tr '\n' '\0' | xargs --null --no-run-if-empty git-lint; if [ "$?" != "0" ]; then echo "There are some problems with the modified files."; echo "Fix them before committing or suggest a change to the rules defined in REPO_HOME/.gitlint.yaml."; echo "If it is not possible to fix them all commit with NO_VERIFY=1 hg commit ... ."; exit 1; fi
from vumi.application import ApplicationWorker class SmitterApplication(ApplicationWorker): def setup_application(self): return self.send_to("27.....", 'hi there!') def consume_user_message(self, message): return self.reply_to(message, 'thanks!')
# Write your solution here def store_personal_data(person: tuple): with open("people.csv", "w") as new_file: line = f"{person[0]};{str(person[1])};{float(person[2])}" new_file.write(line) if __name__ == "__main__": store_personal_data(("<NAME>", 37, 175.5))
################################################################################ # Github: https://github.com/MaxInGaussian/GomPlex # Author: <NAME> (<EMAIL>) ################################################################################ import numpy as np import pandas as pd import matplotlib.pyplot as plt from sklearn.metrics import roc_curve, auc from sklearn.linear_model import LogisticRegression from DecisionSystem import DecisionSystem from sys import path path.append("../../") from GomPlex import * DRAWING_RAW_DATA_PATH = 'drawing_raw_data.csv' model = DecisionSystem(sample_time=100, use_past=4, use_gender=True, use_age=True, use_edu_level=True, show_training_drawings=False, show_predicted_drawings=False) model.load_drawing_data(DRAWING_RAW_DATA_PATH) num_ci, num_nci = model.ci.sum(), len(model.ci)-model.ci.sum() # model.show_velocity_graph('MS0045') # model.show_direction_graph('MS0045') def get_eval_from_fpr_tpr(fpr, tpr): cfs_mat = np.array([[tpr*num_ci, num_ci-tpr*num_ci], [fpr*num_nci, num_nci-fpr*num_nci]]) accuracy = (cfs_mat[0, 0]+cfs_mat[1, 1])/np.sum(cfs_mat) precision = 0 if np.sum(cfs_mat[:, 0]) == 0 else\ cfs_mat[0, 0]/np.sum(cfs_mat[:, 0]) sensitivity = 0 if np.sum(cfs_mat[0]) == 0 else\ cfs_mat[0, 0]/np.sum(cfs_mat[0]) specificity = 0 if np.sum(cfs_mat[1]) == 0 else\ cfs_mat[1, 1]/np.sum(cfs_mat[1]) F1 = 0 if precision+sensitivity == 0 else\ 2*(precision*sensitivity)/(precision+sensitivity) print("Sensitivity =", sensitivity) print("Specificity =", specificity) print("Accuracy =", accuracy) print("Precision =", precision) print("F1 Score =", F1) AUC, F1, cfs_mat, cis, pred_cis = model.eval_features_for_subjects() fpr, tpr, thresholds = roc_curve(cis, pred_cis) AUC = auc(fpr, tpr) arg = np.argmax(tpr-fpr) plt.plot(fpr, tpr, color='black', lw=2, linestyle='-', label='GPMC (AUC = %0.3f)' % AUC) plt.scatter(fpr[arg], tpr[arg], s=50, color='k', marker='x') print('GPMC:') get_eval_from_fpr_tpr(fpr[arg], tpr[arg]) lr = ['Number of angles', 'Intersection', 'Closure', 'Rotation', 'Closing-in'] lr_mat = model.df_drawing_data[lr].as_matrix().tolist() lr_y = model.ci.as_matrix().astype(np.int64) X, y = [], [] for i, lr_vec in enumerate(lr_mat): avg_V = model.avg_V[model.df_drawing_data.index[i]] std_V = model.std_V[model.df_drawing_data.index[i]] avg_T = model.avg_T[model.df_drawing_data.index[i]] std_T = model.std_T[model.df_drawing_data.index[i]] if(not np.any(np.isnan([avg_V, std_V, avg_T, std_T]))): lr_vec.extend([avg_V, std_V, avg_T, std_T]) X.append(lr_vec) y.append(lr_y[i]) lr_model = LogisticRegression().fit(X[:int(len(X)*0.65)], y[:int(len(X)*0.65)]) pred_cis_lr = lr_model.predict_proba(X[int(len(X)*0.65):]) fpr_lr, tpr_lr, thresholds_lr = roc_curve(y[int(len(X)*0.65):], pred_cis_lr[:, 1]) AUC_lr = auc(fpr_lr, tpr_lr) arg_lr = np.argmax(tpr_lr-fpr_lr) plt.plot(fpr_lr, tpr_lr, color='black', lw=2, linestyle='--', label='Logistic Regression (AUC = %0.3f)' % AUC_lr) plt.scatter(fpr_lr[arg_lr], tpr_lr[arg_lr], s=50, color='k', marker='x') print('Logistic Regression:') get_eval_from_fpr_tpr(fpr_lr[arg_lr], tpr_lr[arg_lr]) caffarra = ['Number of angles', 'Intersection', 'Closure', 'Rotation', 'Closing-in'] caffarra_score = model.df_drawing_data[caffarra].sum(axis=1) pred_cis_caff = np.array(caffarra_score).ravel()/13 fpr_caff, tpr_caff, thresholds_caff = roc_curve(cis, 1-pred_cis_caff) AUC_caff = auc(fpr_caff, tpr_caff) arg_caff = np.argmax(tpr_caff-fpr_caff) plt.plot(fpr_caff, tpr_caff, color='black', lw=2, linestyle='-.', label='Caffarra\'s Method (AUC = %0.3f)' % AUC_caff) plt.scatter(fpr_caff[arg_caff], tpr_caff[arg_caff], s=50, color='k', marker='x') print('Caffarra\'s Method:') get_eval_from_fpr_tpr(fpr_caff[arg_caff], tpr_caff[arg_caff]) mmse = ['Number of angles', 'Intersection'] mmse_score = np.array(model.df_drawing_data[mmse].sum(axis=1)==8)/1. fpr_mmse, tpr_mmse, thresholds_mmse = roc_curve(cis, 1-mmse_score) AUC_mmse = auc(fpr_mmse, tpr_mmse) arg_mmse = np.argmax(tpr_mmse-fpr_mmse) plt.plot(fpr_mmse, tpr_mmse, color='black', lw=2, linestyle=':', label='MMSE Method (AUC = %0.3f)' % AUC_mmse) plt.scatter(fpr_mmse[arg_mmse], tpr_mmse[arg_mmse], s=50, color='k', marker='x') print('MMSE\'s Method:') get_eval_from_fpr_tpr(fpr_mmse[arg_mmse], tpr_mmse[arg_mmse]) plt.plot([0, 1], [0, 1], 'k-', label='Random Guessing (AUC = 0.5)', alpha=0.3) plt.xlim([0, 1]) plt.ylim([0, 1]) plt.xlabel('False Positive Rate (1 - Specificity)') plt.ylabel('True Positive Rate (Sensitivity)') plt.title('Receiver Operating Characteristic') plt.legend(loc="lower right") plt.tight_layout() plt.show() def plot_confusion_matrix(cm, classes): normalize=False cmap=plt.cm.Blues plt.imshow(cfs_mat, interpolation='nearest', cmap=cmap) plt.title('Confusion Matrix') plt.colorbar() tick_marks = np.arange(len(classes)) plt.xticks(tick_marks, classes, rotation=45) plt.yticks(tick_marks, classes) thresh = cfs_mat.max() / 2. for i, j in itertools.product(range(cfs_mat.shape[0]), range(cfs_mat.shape[1])): plt.text(j, i, cfs_mat[i, j], horizontalalignment="center", color="white" if cfs_mat[i, j] > thresh else "black") plt.tight_layout() plt.ylabel('True label') plt.xlabel('Predicted label')
#!/bin/bash set -e while [[ $# -gt 0 ]] do key="$1" case $key in -e|--engine) engine="$2" shift shift ;; -u|--ui) ui="$2" shift shift ;; *) echo Unknown parameter $1 exit 1 ;; esac done base="${GOPATH}/src/github.com/longhorn/longhorn-manager" files=`find ${base}/deploy/ |grep yaml |sort` project="longhornio\/longhorn-manager" latest=`cat ${base}/bin/latest_image` echo latest manager image ${latest} escaped_image=${latest//\//\\\/} for f in $files do sed -i "s/image\:\ ${project}:.*/image\:\ ${escaped_image}/g" $f sed -i "s/-\ ${project}:.*/-\ ${escaped_image}/g" $f done if [ -n "$engine" ]; then project="longhornio\/longhorn-engine" echo engine image $engine escaped_image=${engine//\//\\\/} for f in $files do sed -i "s/-\ ${project}:.*/-\ ${escaped_image}/g" $f done fi if [ -n "$ui" ]; then project="longhornio\/longhorn-ui" echo ui image $ui escaped_image=${ui//\//\\\/} for f in $files do sed -i "s/image\:\ ${project}:.*/image\:\ ${escaped_image}/g" $f done fi
import React from "react"; // Create a new component named "Math" // Render one Math component in the place of each "?" mark // Math should accept 3 props // num1, operator, and num2 // Math should return a span tag displaying the result e.g. 19 + 341 = 360 function Math(props) { console.log(props); let value; switch ( prop.operator ) { case "+": value = props.num1 + props.num2; break; default: value = NaN; } return <span>{value}</span> } function Calculator() { return ( <div> <p>19 + 341 = ?</p> <p>42 - 17 = ?</p> <p>100 * 3 = ?</p> <p>96 / 4 = ?</p> </div> ); } export default Calculator;
#! /bin/sh #Make sure Cassandra is installed and running on Port provided with the config in with_cassandra.conf cat with_cassandra.conf echo "Initializing the DB just in case" cat INIT.cql | cqlsh ./bin/hebserver-0.1/bin/hebserver -Dconfig.file=./with_cassandra.conf
void f(int n){ if(n==0) printf("false"); else printf("true"); }
<gh_stars>0 import { Injectable, NotFoundException, BadRequestException, } from '@nestjs/common'; import { InjectModel } from '@nestjs/mongoose'; import { Model } from 'mongoose'; import { Comment } from './comment.model'; import { CreateCommentDto } from './dto/create-comment.dto'; import { UpdateCommentDto } from './dto/update-comment.dto'; import { News } from 'src/news/news.model'; import { User } from 'src/users/user.model'; import { GetCommentsFilterDto } from './dto/get-comment-filter.dto'; import * as moment from 'moment'; @Injectable() export class CommentsService { constructor( @InjectModel('Comments') private readonly commentsModel: Model<Comment>, @InjectModel('News') private readonly newsModel: Model<News>, ) {} async getAllCommentsWithFilters( getCommentsFilterDto: GetCommentsFilterDto, ): Promise<Comment[]> { const { date, nconst, newsId, search, author, dateAfter, dateBefore, } = getCommentsFilterDto; let comments = await this.commentsModel.find().exec(); if (nconst) { comments = comments.filter(c => c.nconst === nconst); } if (newsId) { const foundNConst = await this.getConstFromNews(newsId); comments = comments.filter(c => c.nconst === foundNConst); } if (author) { comments = comments.filter(c => c.author === author); } if (search) { comments = comments.filter( c => c.details.includes(search), ); } if (date) { comments = comments.filter(c => moment(date).isSame(c.date, 'day')); } if (dateAfter && dateBefore) { comments = comments.filter(c => moment(c.date).isBetween(dateAfter, dateBefore, 'day'), ); } else if (dateAfter) { comments = comments.filter(c => moment(c.date).isAfter(dateAfter, 'day')); } else if (dateBefore) { comments = comments.filter(c => moment(c.date).isBefore(dateBefore, 'day'), ); } return comments; } async getAllComments(): Promise<Comment[]> { const result = await this.commentsModel.find().exec(); return result; } getCommentById(id: string): Promise<Comment> { return this.findComment(id); } async createComment( createCommentDto: CreateCommentDto, user: User, ): Promise<Comment> { const { details, nconst, newsId } = createCommentDto; let newComment; if (nconst) { newComment = new this.commentsModel({ details, nconst, author: user.username, date: new Date(), }); } else if (newsId) { const foundNconst = await this.getConstFromNews(newsId); newComment = new this.commentsModel({ details, author: user.username, nconst: foundNconst, date: new Date(), }); } else { throw new BadRequestException( 'Could not find field with nconst or newsId!', ); } const saved = await newComment.save(); return saved; } async deleteById(commentId: string) { const found = await this.findComment(commentId); const result = await this.commentsModel .deleteOne({ _id: found._id }) .exec(); } async updateComment(updateCommentDto: UpdateCommentDto): Promise<Comment> { const { id, details, nconst, newsId } = updateCommentDto; const updatedComment = await this.findComment(id); if (details) { updatedComment.details = details; } if (nconst) { updatedComment.nconst = nconst; } if (newsId) { const foundNconst = await this.getConstFromNews(newsId); updatedComment.nconst = foundNconst; } const saved = await updatedComment.save(); return saved; } private async findComment(id: string): Promise<Comment> { let found; try { found = await this.commentsModel.findById(id); } catch (error) { throw new NotFoundException('Could not find category with id: ' + id); } if (!found) { throw new NotFoundException('Could not find category with id: ' + id); } return found; } private async getConstFromNews(id: string): Promise<string> { let found; try { found = await this.newsModel.findById(id); } catch (error) { throw new NotFoundException('Could not find news with id: ' + id); } if (!found) { throw new NotFoundException('Could not find news with id: ' + id); } return found.nconst; } }
#!/bin/bash for file in $(ls *.fastq.gz) do #prefix=$(echo $file | sed 's/\(.*\)1\.clean\.fq/\1/g') #R1_001.fastq prefix=$(echo $file | sed 's/\(.*\)\.fastq\.gz/\1/g') #echo $prefix pbsfile=${prefix}.trim.sbatch file1=${prefix}.fastq.gz cleanfile=${prefix}.clean.fq.gz #echo $file1 file2=${prefix}R2_001.fastq #short=$(echo $prefix | sed 's/\(.*\)_S.*/\1/') short=${prefix}.trim #echo $short echo "#!/bin/bash" >$short.sbatch echo "#SBATCH --job-name=$short" >>$short.sbatch echo "#SBATCH --output=$short.out" >>$short.sbatch echo "#SBATCH --error=$short.err" >>$short.sbatch echo "#SBATCH --partition=broadwl" >>$short.sbatch echo "#SBATCH --ntasks=2" >>$short.sbatch echo "#SBATCH --mem=16G" >>$short.sbatch echo "module load java">>$short.sbatch echo "java -jar /home/kaininghu/xczhang/KnHu/biosoft/Trimmomatic-0.39/trimmomatic-0.39.jar SE -threads 2 -phred33 $file1 $cleanfile ILLUMINACLIP:/home/kaininghu/xczhang/KnHu/biosoft/Trimmomatic-0.39/adapters/TruSeq3-SE.fa:2:30:10 LEADING:3 TRAILING:3 SLIDINGWINDOW:4:15 MINLEN:36" >>$short.sbatch echo "sbatch $short.sbatch" done
#!/bin/bash prog=../libclang_parse cd `dirname $0` for file in *.c; do out=${file%.c}.out tmp=`mktemp /tmp/test.XXXXXX` $prog $file -Wall > $tmp if ! cmp --silent $tmp $out; then echo "test failed: $file" exit 1 fi rm $tmp done
<gh_stars>1-10 import { BrowserModule } from '@angular/platform-browser'; import { NgModule } from '@angular/core'; import {HTTP_INTERCEPTORS} from '@angular/common/http'; import { FormsModule,ReactiveFormsModule } from '@angular/forms'; import { RouterModule } from '@angular/router'; import { AppComponent } from './app.component'; import { AppRoutes } from './app.routing'; import { SidebarModule } from './sidebar/sidebar.module'; import { FooterModule } from './shared/footer/footer.module'; import { NavbarModule} from './shared/navbar/navbar.module'; import { NguiMapModule} from '@ngui/map'; import { onBoardingComponent } from './layout/on-boarding/on-boarding.component'; import { AlertsComponent } from './alert/alerts.component'; import { IconsComponent } from './icons/icons.component'; import { NotificationsComponent } from './layout/notifications/notifications.component'; import { HttpClientModule } from '@angular/common/http'; import { LoginComponent } from './login/login.component'; import { LayoutComponent } from './layout/layout.component'; import { ConfigurationModule } from './layout/configuration/configuration.module'; import { DisplayResultsComponent } from './layout/display-results/display-results.component'; import { ReportsComponent } from './layout/reports/reports.component'; import { MsalService } from './services/msal.service'; import { AuthInterceptor } from './services/auth.interceptor'; import { ChartsModule } from 'ng2-charts'; import * as data from '../../config'; import { SocketIoModule, SocketIoConfig } from 'ng-socket-io'; import {ImageCropperComponent} from 'ng2-img-cropper'; import { HomepageComponent } from './layout/homepage/homepage.component'; import { DeviceManagementComponent } from './layout/device-management/device-management.component'; import { CameraManagementComponent } from './layout/device-management/camera-management/camera-management.component'; import { AggregatorManagementComponent } from './layout/device-management/aggregator-management/aggregator-management.component'; import { ComputeManagementComponent } from './layout/device-management/compute-management/compute-management.component'; import { UserManagementComponent } from './layout/user-management/user-management.component'; import { FloorMapComponent } from './layout/floor-map/floor-map.component'; import { DashboardComponent } from './layout/dashboard/dashboard.component'; import { Slide1Component } from './slider/slide1/slide1.component'; import { Slide2Component } from './slider/slide2/slide2.component'; import { Slide3Component } from './slider/slide3/slide3.component'; import { HomeComponent } from './slider/home/home.component'; import { ConnectCameraSliderComponent } from './slider/connect-camera-slider/connect-camera-slider.component';import { CameraMappingSliderComponent } from './slider/camera-mapping-slider/camera-mapping-slider.component'; import { AreaMarkingSliderComponent } from './slider/area-marking-slider/area-marking-slider.component'; import { NavbarComponent } from './components/navbar/navbar.component'; import { ConnectCameraComponent } from './components/connect-camera/connect-camera.component'; import { CameraMappingComponent } from './components/camera-mapping/camera-mapping.component'; import { AreaMarkingComponent } from './components/area-marking/area-marking.component'; import { ConnectCameraOnboardingComponent } from './layout/on-boarding/connect-camera-onboarding/connect-camera-onboarding.component'; import { CameraMappingOnboardingComponent } from './layout/on-boarding/camera-mapping-onboarding/camera-mapping-onboarding.component'; import { AreaMarkingOnboardingComponent } from './layout/on-boarding/area-marking-onboarding/area-marking-onboarding.component'; import { AngularDraggableModule } from 'angular2-draggable'; import { CameraAddedComponent } from './slider/camera-added/camera-added.component'; import { NavbarsliderComponent } from './slider/navbarslider/navbarslider.component'; import { AngularMultiSelectModule } from 'angular2-multiselect-dropdown/angular2-multiselect-dropdown'; import { LoadingModule } from 'ngx-loading'; import { ToastrService } from './services/toastr.service'; import { FacedetectionComponent } from './layout/facedetection/facedetection.component'; import { VideoIndexingComponent } from './layout/video-indexing/video-indexing.component'; import { MultiscreenComponent } from './layout/multiscreen/multiscreen.component'; import { VideoRetentionComponent } from './layout/video-retention/video-retention.component'; import { PagerService } from './layout/facedetection/_services/index'; const config: SocketIoConfig = { url: "", options: {} }; @NgModule({ declarations: [ AppComponent, onBoardingComponent, AlertsComponent, IconsComponent, NotificationsComponent, LoginComponent, LayoutComponent, DisplayResultsComponent, ReportsComponent, CameraMappingComponent, AreaMarkingComponent, HomepageComponent, DeviceManagementComponent, CameraManagementComponent, AggregatorManagementComponent, ComputeManagementComponent, UserManagementComponent, FloorMapComponent, DashboardComponent, Slide1Component, Slide2Component, Slide3Component, HomeComponent, ConnectCameraSliderComponent,NavbarComponent, ConnectCameraComponent, ConnectCameraOnboardingComponent, CameraMappingOnboardingComponent, AreaMarkingOnboardingComponent, CameraMappingSliderComponent, AreaMarkingSliderComponent, CameraAddedComponent, NavbarsliderComponent,FacedetectionComponent ,VideoIndexingComponent, MultiscreenComponent, VideoRetentionComponent ], imports: [ BrowserModule, RouterModule.forRoot(AppRoutes), SidebarModule, NavbarModule, FormsModule, FooterModule,ReactiveFormsModule, HttpClientModule, ConfigurationModule, SocketIoModule.forRoot(config), ChartsModule, AngularDraggableModule,LoadingModule, AngularMultiSelectModule ], providers: [ ToastrService,PagerService, MsalService, { provide: HTTP_INTERCEPTORS, useClass: AuthInterceptor, multi: true, } ], bootstrap: [AppComponent] }) export class AppModule { }
#!/bin/sh set -e set -u set -o pipefail function on_error { echo "$(realpath -mq "${0}"):$1: error: Unexpected failure" } trap 'on_error $LINENO' ERR if [ -z ${FRAMEWORKS_FOLDER_PATH+x} ]; then # If FRAMEWORKS_FOLDER_PATH is not set, then there's nowhere for us to copy # frameworks to, so exit 0 (signalling the script phase was successful). exit 0 fi echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" COCOAPODS_PARALLEL_CODE_SIGN="${COCOAPODS_PARALLEL_CODE_SIGN:-false}" SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}" # Used as a return value for each invocation of `strip_invalid_archs` function. STRIP_BINARY_RETVAL=0 # This protects against multiple targets copying the same framework dependency at the same time. The solution # was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????") # Copies and strips a vendored framework install_framework() { if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then local source="${BUILT_PRODUCTS_DIR}/$1" elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")" elif [ -r "$1" ]; then local source="$1" fi local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" if [ -L "${source}" ]; then echo "Symlinked..." source="$(readlink "${source}")" fi # Use filter instead of exclude so missing patterns don't throw errors. echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\"" rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}" local basename basename="$(basename -s .framework "$1")" binary="${destination}/${basename}.framework/${basename}" if ! [ -r "$binary" ]; then binary="${destination}/${basename}" elif [ -L "${binary}" ]; then echo "Destination binary is symlinked..." dirname="$(dirname "${binary}")" binary="${dirname}/$(readlink "${binary}")" fi # Strip invalid architectures so "fat" simulator / device frameworks work on device if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then strip_invalid_archs "$binary" fi # Resign the code if required by the build settings to avoid unstable apps code_sign_if_enabled "${destination}/$(basename "$1")" # Embed linked Swift runtime libraries. No longer necessary as of Xcode 7. if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then local swift_runtime_libs swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u) for lib in $swift_runtime_libs; do echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\"" rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}" code_sign_if_enabled "${destination}/${lib}" done fi } # Copies and strips a vendored dSYM install_dsym() { local source="$1" if [ -r "$source" ]; then # Copy the dSYM into a the targets temp dir. echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${DERIVED_FILES_DIR}\"" rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${DERIVED_FILES_DIR}" local basename basename="$(basename -s .framework.dSYM "$source")" binary="${DERIVED_FILES_DIR}/${basename}.framework.dSYM/Contents/Resources/DWARF/${basename}" # Strip invalid architectures so "fat" simulator / device frameworks work on device if [[ "$(file "$binary")" == *"Mach-O "*"dSYM companion"* ]]; then strip_invalid_archs "$binary" fi if [[ $STRIP_BINARY_RETVAL == 1 ]]; then # Move the stripped file into its final destination. echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${DERIVED_FILES_DIR}/${basename}.framework.dSYM\" \"${DWARF_DSYM_FOLDER_PATH}\"" rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${DERIVED_FILES_DIR}/${basename}.framework.dSYM" "${DWARF_DSYM_FOLDER_PATH}" else # The dSYM was not stripped at all, in this case touch a fake folder so the input/output paths from Xcode do not reexecute this script because the file is missing. touch "${DWARF_DSYM_FOLDER_PATH}/${basename}.framework.dSYM" fi fi } # Copies the bcsymbolmap files of a vendored framework install_bcsymbolmap() { local bcsymbolmap_path="$1" local destination="${BUILT_PRODUCTS_DIR}" echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}"" rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}" } # Signs a framework with the provided identity code_sign_if_enabled() { if [ -n "${EXPANDED_CODE_SIGN_IDENTITY:-}" -a "${CODE_SIGNING_REQUIRED:-}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then # Use the current code_sign_identity echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}" local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS:-} --preserve-metadata=identifier,entitlements '$1'" if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then code_sign_cmd="$code_sign_cmd &" fi echo "$code_sign_cmd" eval "$code_sign_cmd" fi } # Strip invalid architectures strip_invalid_archs() { binary="$1" # Get architectures for current target binary binary_archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | awk '{$1=$1;print}' | rev)" # Intersect them with the architectures we are building for intersected_archs="$(echo ${ARCHS[@]} ${binary_archs[@]} | tr ' ' '\n' | sort | uniq -d)" # If there are no archs supported by this binary then warn the user if [[ -z "$intersected_archs" ]]; then echo "warning: [CP] Vendored binary '$binary' contains architectures ($binary_archs) none of which match the current build architectures ($ARCHS)." STRIP_BINARY_RETVAL=0 return fi stripped="" for arch in $binary_archs; do if ! [[ "${ARCHS}" == *"$arch"* ]]; then # Strip non-valid architectures in-place lipo -remove "$arch" -output "$binary" "$binary" stripped="$stripped $arch" fi done if [[ "$stripped" ]]; then echo "Stripped $binary of architectures:$stripped" fi STRIP_BINARY_RETVAL=1 } if [[ "$CONFIGURATION" == "Debug" ]]; then install_framework "${BUILT_PRODUCTS_DIR}/Alamofire/Alamofire.framework" install_framework "${BUILT_PRODUCTS_DIR}/CPAPIService/CPAPIService.framework" fi if [[ "$CONFIGURATION" == "Release" ]]; then install_framework "${BUILT_PRODUCTS_DIR}/Alamofire/Alamofire.framework" install_framework "${BUILT_PRODUCTS_DIR}/CPAPIService/CPAPIService.framework" fi if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then wait fi
parallel --jobs 6 < ./results/exp_6t_5i_n35/run-4/sea_cp_5n_6t_6d_1000f_617m_5i/jobs/jobs_n2.txt
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// // Copyright (c) Microsoft Corporation. All rights reserved. //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// #ifndef _DRIVERS_TIMESERVICE_DECL_H_ #define _DRIVERS_TIMESERVICE_DECL_H_ 1 #define EVENT_TIMESERVICE_SYSTEMTIMECHANGED 1 #define EVENT_TIMESERVICE_TIMESYNCFAILED 2 /// <summary> /// TimeService settings flags that are configuarble via API. /// </summary> enum TimeService_Settings_Flags { TimeService_Settings_Flags_ForceSyncAtWakeUp = 0x1, TimeService_Settings_Flags_AutoDST = 0x2, }; enum TimeService_Status_Flags { TimeService_Status_Flags_Success = 0x0, TimeService_Status_Flags_Failed = 0x1, }; /// <summary> /// TimeService settings (configuarble via API). /// </summary> struct TimeService_Settings { UINT32 Flags; /// Flag values from TimeService_Settings_Flags UINT32 PrimaryServerIP; /// Main Server IP address. UINT32 AlternateServerIP; /// Alaternate Server IP address. UINT32 RefreshTime; /// Default refresh period in seconds. UINT32 Tolerance; /// Amount of deviation from that is acceptable. }; /// <summary> /// Status structure for Time Service, usually returned from its APIs. /// </summary> struct TimeService_Status { UINT32 Flags; /// Status flags. UINT32 ServerIP; /// Server IP that is involved in this status. UINT32 SyncOffset; /// INT64 CurrentTimeUTC; /// Latest know UTC time value. This saves an additional call to Time_GetUtcTime(). }; /// <summary> /// Initializes time service, must be called before using any TimeService APIs. /// </summary> HRESULT TimeService_Initialize(); /// <summary> /// Releases all resources used by TimeService, shuts down scheduled refresh. /// After Uninitialized TimeService APIs may not function or return undefined results. /// </summary> HRESULT TimeService_UnInitialize(); /// <summary> /// Starts scheduled time synchronization service. For periodic refresh it uses previously set refreshtime. /// Refresh time is updateable dynamically, and effective immediately. /// </summary> HRESULT TimeService_Start(); /// <summary> /// Stops periodic time synchronization service. Timeservice APIs are still available, may return stale data /// unless manual sync is performed. /// </summary> HRESULT TimeService_Stop(); /// <summary> /// Manual update of system time value from a given server. This can be called orthogonally along with /// scheduled time service. /// </summary> HRESULT TimeService_Update(UINT32 serverIP, UINT32 tolerance, TimeService_Status* status); /// <summary> /// Returns latest sync status, it may be scheduled or forced sync, whichever occured last. This /// can be verified from the TimeService_Status.Flags field. Optionally this will also /// return the latest UTC time. /// </summary> HRESULT TimeService_GetLastSyncStatus(TimeService_Status* status); /// <summary> /// Loads the existing TimeService_Settings information. /// </summary> HRESULT TimeService_LoadSettings(TimeService_Settings* settings); /// <summary> /// Saves existing TimeService_Settings information, effective immediately. /// </summary> HRESULT TimeService_SaveSettings(TimeService_Settings* settings); /// We redefine the error code to map 1-o-1 onto EBS stack error codes #define HAL_TIMESERVICE_TIME_OUT -3 // SNTP_TIME_OUT #define HAL_TIMESERVICE_WANT_READ_WRITE -2 // SNTP_WANT_READ_WRITE #define HAL_TIMESERVICE_ERROR -1 // SNTP_ERROR #define HAL_TIMESERVICE_SUCCESS 0 INT32 HAL_TIMESERVICE_GetTimeFromSNTPServer(UINT8* serverIP, SYSTEMTIME* systemTime); INT32 HAL_TIMESERVICE_GetTimeFromSNTPServerList(UINT8* serverIP, INT32 serverNum, SYSTEMTIME* systemTime); // -- // #endif // _DRIVERS_TIMESERVICE_DECL_H_
#!/bin/sh BUNDLE_DIR=perl6_bundle if [ $# -ne 1 ]; then echo "ERROR - Must supply a path to a perl6 installation" exit 1 fi perl6_install_dir=$1 if [ ! -d $perl6_install_dir ]; then echo "ERROR - '$perl6_install_dir' does not exist or is not a directory" exit 1 fi # wipe out previous bundle, if it exists rm -rf $BUNDLE_DIR mkdir -p $BUNDLE_DIR/bin mkdir -p $BUNDLE_DIR/lib mkdir -p $BUNDLE_DIR/share/nqp mkdir -p $BUNDLE_DIR/share/perl6 cp $perl6_install_dir/bin/perl6-m $BUNDLE_DIR/bin/perl6-m cp $perl6_install_dir/lib/libmoar.so $BUNDLE_DIR/lib/libmoar.so cp -r $perl6_install_dir/share/nqp/lib $BUNDLE_DIR/share/nqp/lib cp -r $perl6_install_dir/share/perl6/lib $BUNDLE_DIR/share/perl6/lib cp -r $perl6_install_dir/share/perl6/site $BUNDLE_DIR/share/perl6/site cp -r $perl6_install_dir/share/perl6/core $BUNDLE_DIR/share/perl6/core cp -r $perl6_install_dir/share/perl6/vendor $BUNDLE_DIR/share/perl6/vendor cp -r $perl6_install_dir/share/perl6/runtime $BUNDLE_DIR/share/perl6/runtime
// import React, { useState } from "react"; // import {useDispatch} from 'react-redux'; // import { useSelector } from "react-redux"; // // var coordinates // // const yaMapMainPage = () => { // // ymaps.ready(init); // // function init() { // // var myPlacemark, // // myMap = new ymaps.Map('map', { // // center: [55.753994, 37.622093], // // zoom: 9 // // }, { // // searchControlProvider: 'yandex#search' // // }); // // // Слушаем клик на карте. // // myMap.events.add('click', function (e) { // // var coords = e.get('coords'); // // // Если метка уже создана – просто передвигаем ее. // // if (myPlacemark) { // // myPlacemark.geometry.setCoordinates(coords); // // } // // // Если нет – создаем. // // else { // // myPlacemark = createPlacemark(coords); // // myMap.geoObjects.add(myPlacemark); // // // Слушаем событие окончания перетаскивания на метке. // // myPlacemark.events.add('dragend', function () { // // getAddress(myPlacemark.geometry.getCoordinates()); // // }); // // } // // getAddress(coords); // // }); // // // Создание метки. // // function createPlacemark(coords) { // // return new ymaps.Placemark(coords, { // // iconCaption: 'поиск...' // // }, { // // preset: 'islands#violetDotIconWithCaption', // // draggable: true // // }); // // } // // // Определяем адрес по координатам (обратное геокодирование). // // function getAddress(coords) { // // myPlacemark.properties.set('iconCaption', 'поиск...'); // // ymaps.geocode(coords).then(function (res) { // // var firstGeoObject = res.geoObjects.get(0); // // myPlacemark.properties // // .set({ // // // Формируем строку с данными об объекте. // // iconCaption: [ // // // Название населенного пункта или вышестоящее административно-территориальное образование. // // firstGeoObject.getLocalities().length ? firstGeoObject.getLocalities() : firstGeoObject.getAdministrativeAreas(), // // // Получаем путь до топонима, если метод вернул null, запрашиваем наименование здания. // // firstGeoObject.getThoroughfare() || firstGeoObject.getPremise() // // ].filter(Boolean).join(', '), // // // В качестве контента балуна задаем строку с адресом объекта. // // balloonContent: firstGeoObject.getAddressLine() // // }); // // const dispatch = useDispatch(); // // dispatch(addCoordinates(coordinates)) // // coordinates = myPlacemark.properties._data.balloonContent // // console.log(myPlacemark.properties._data.balloonContent) // // }); // // } // // console.log(myPlaceMark) // // } // // } // // export default yaMapMainPage // import { addCoordinatesFrom } from "../../../../redux/ac/coordinatesActions" // const yaMapMainPage = () => { // var arg = [] // ymaps.ready(init); // // const [inputFrom,setFrom] = useState('') // function init() { // // Стоимость за километр. // var DELIVERY_TARIFF = 20, // // Минимальная стоимость. // MINIMUM_COST = 500, // myMap = new ymaps.Map('map', { // center: [60.906882, 30.067233], // zoom: 9, // controls: [] // }), // // Создадим панель маршрутизации. // routePanelControl = new ymaps.control.RoutePanel({ // options: { // // Добавим заголовок панели. // showHeader: true, // title: 'Расчёт доставки' // } // }), // zoomControl = new ymaps.control.ZoomControl({ // options: { // size: 'small', // float: 'none', // position: { // bottom: 145, // right: 10 // } // } // }); // // Пользователь сможет построить только автомобильный маршрут. // routePanelControl.routePanel.options.set({ // types: {auto: true} // }); // console.log('11') // console.log(routePanelControl) // // if (routePanelControl._layout) { // // console.log(routePanelControl._layout._toInput?._stateMonitor?._values?.hint) // // } // // сonsole.log(routePanel?._layout?._toInput?._stateMonitor?._values?.hint) //адрес куда // // сonsole.log(routePanel?._layout?._toInput?._stateMonitor?._values?.value) //координаты куда // // сonsole.log(routePanel?._layout?._fromInput?._stateMonitor?._values?.hint) //адрекс откуда // // сonsole.log(routePanel?._layout?._fromInput?._stateMonitor?._values?.value) //адрекс куда // // console.log(routePanelControl) // // Если вы хотите задать неизменяемую точку "откуда", раскомментируйте код ниже. // /*routePanelControl.routePanel.state.set({ // fromEnabled: false, // from: 'Москва, Льва Толстого 16' // });*/ // myMap.controls.add(routePanelControl).add(zoomControl); // // Получим ссылку на маршрут. // routePanelControl.routePanel.getRouteAsync().then(function (route) { // // Зададим максимально допустимое число маршрутов, возвращаемых мультимаршрутизатором. // route.model.setParams({results: 1}, true); // // Повесим обработчик на событие построения маршрута. // route.model.events.add('requestsuccess', function () { // var activeRoute = route.getActiveRoute(); // if (activeRoute) { // // Получим протяженность маршрута. // var length = route.getActiveRoute().properties.get("distance"), // // Вычислим стоимость доставки. // price = calculate(Math.round(length.value / 1000)), // // Создадим макет содержимого балуна маршрута. // balloonContentLayout = ymaps.templateLayoutFactory.createClass( // '<span>Расстояние: ' + length.text + '.</span><br/>' + // '<span style="font-weight: bold; font-style: italic">Стоимость доставки: ' + price + ' р.</span>'); // // Зададим этот макет для содержимого балуна. // route.options.set('routeBalloonContentLayout', balloonContentLayout); // // Откроем балун. // activeRoute.balloon.open(); // let coordinatesFrom = routePanelControl.routePanel._layout._fromInput._stateMonitor._values.value // arg.push(routePanelControl.routePanel._layout._toInput._stateMonitor._values.value,routePanelControl.routePanel._layout._fromInput._stateMonitor._values.value) // // return console.log(coordinatesFrom) // const dispatch = useDispatch(); // dispatch(addCoordinatesFrom(coordinatesFrom)) // console.log('1') // const mapForm = useSelector(state=>state.coordinateFrom) // console.log('mapForm2',mapForm) // // setFrom(RoutePanelControl.routePanel._layout._fromInput._stateMonitor._values.value) // console.log(routePanelControl.routePanel._layout._toInput._stateMonitor._values.value) // console.log(routePanelControl.routePanel._layout._fromInput._stateMonitor._values.value) // } // }); // }); // // Функция, вычисляющая стоимость доставки. // function calculate(routeLength) { // return Math.max(routeLength * DELIVERY_TARIFF, MINIMUM_COST); // } // } // } // export default yaMapMainPage
#!/bin/bash # Copyright 2015 David Snyder # Apache 2.0. # # This example demonstrates music/speech discrimination. This recipe trains # three GMMs on the music, speech and noise portions of the MUSAN corpus. # We test the systems on Broadcast News. The Broadcast News test data consists # of short segments of either speech or music. The classification decisions # are made at a segment level from the average likelihoods of two GMMs. # Results (EERs) are inline in comments below. # # See README.txt for more info on data required. . ./cmd.sh . ./path.sh set -e mfccdir=`pwd`/mfcc vaddir=`pwd`/mfcc local/make_bn.sh /export/corpora5/LDC/LDC97S44 \ /export/corpora/LDC/LDC97T22 data steps/data/make_musan.sh --sampling-rate 16000 /export/corpora/JHU/musan data steps/make_mfcc.sh --mfcc-config conf/mfcc.conf --nj 30 --cmd "$train_cmd" \ data/musan_speech exp/make_mfcc $mfccdir steps/make_mfcc.sh --mfcc-config conf/mfcc.conf --nj 30 --cmd "$train_cmd" \ data/musan_music exp/make_mfcc $mfccdir steps/make_mfcc.sh --mfcc-config conf/mfcc.conf --nj 5 --cmd "$train_cmd" \ data/musan_noise exp/make_mfcc $mfccdir steps/make_mfcc.sh --mfcc-config conf/mfcc.conf --nj 30 --cmd "$train_cmd" \ data/bn exp/make_mfcc $mfccdir utils/fix_data_dir.sh data/musan_speech utils/fix_data_dir.sh data/musan_music utils/fix_data_dir.sh data/musan_noise utils/fix_data_dir.sh data/bn sid/compute_vad_decision.sh --nj 20 --cmd "$train_cmd" \ data/musan_speech exp/make_vad $vaddir sid/compute_vad_decision.sh --nj 5 --cmd "$train_cmd" \ data/musan_noise exp/make_vad $vaddir sid/compute_vad_decision.sh --nj 20 --cmd "$train_cmd" \ data/musan_music exp/make_vad $vaddir sid/compute_vad_decision.sh --nj 20 --cmd "$train_cmd" \ data/bn exp/make_vad $vaddir sid/train_diag_ubm.sh --nj 10 --cmd "$train_cmd" --delta-window 2 \ data/musan_noise 32 exp/diag_ubm_noise & sid/train_diag_ubm.sh --nj 20 --cmd "$train_cmd" --delta-window 2 \ data/musan_speech 32 exp/diag_ubm_speech & sid/train_diag_ubm.sh --nj 20 --cmd "$train_cmd" --delta-window 2 \ data/musan_music 32 exp/diag_ubm_music wait; sid/train_full_ubm.sh --nj 20 --cmd "$train_cmd" \ --remove-low-count-gaussians false data/musan_noise \ exp/diag_ubm_noise exp/full_ubm_noise & sid/train_full_ubm.sh --nj 20 --cmd "$train_cmd" \ --remove-low-count-gaussians false data/musan_speech \ exp/diag_ubm_speech exp/full_ubm_speech & sid/train_full_ubm.sh --nj 20 --cmd "$train_cmd" \ --remove-low-count-gaussians false data/musan_music \ exp/diag_ubm_music exp/full_ubm_music wait; sid/music_id.sh --cmd "$train_cmd" --nj 40 \ exp/full_ubm_music exp/full_ubm_speech \ data/bn exp/bn_music_speech sid/music_id.sh --cmd "$train_cmd" --nj 40 \ exp/full_ubm_noise exp/full_ubm_speech \ data/bn exp/bn_noise_speech printf "EER using GMMs trained on music and speech" compute-eer <(local/print_scores.py exp/bn_music_speech/ratio) # Equal error rate is 0.344234%, at threshold 0.525752 printf "\nEER using GMM trained on noise instead of music" compute-eer <(local/print_scores.py exp/bn_noise_speech/ratio) # Equal error rate is 0.860585%, at threshold 0.123218 # The following script replaces the VAD decisions originally computed by # the energy-based VAD. It uses the GMMs trained earlier in the script # to make frame-level decisions. Due to the mapping provided in # conf/merge_vad_map.txt, "0" corresponds to silence, "1" to speech, and # "2" to music. sid/compute_vad_decision_gmm.sh --nj 40 --cmd "$train_cmd" \ --merge-map-config conf/merge_vad_map.txt --use-energy-vad true \ data/bn exp/full_ubm_noise exp/full_ubm_speech/ \ exp/full_ubm_music/ exp/vad_gmm exp/vad_gmm/
SELECT employee_name, age FROM employee_table ORDER BY age DESC LIMIT 1;
import displayUtils from '../utils/displayUtils.js' import plotUtils from '../utils/plotUtils.js' export default function projectedDataStep({ projectedData, palette }) { displayUtils.labelStep("Datos graficados sobre\n'Ejes Principales'") plotUtils.drawAxes() plotUtils.plot2d(projectedData, palette) }