text
stringlengths
1
1.05M
<reponame>nabeelkhan/Oracle-DBA-Life<gh_stars>0 select thread#, to_char(first_time,'DD-MON-YYYY') creation_date, to_char(first_time,'HH24:MI') time, sequence#, first_change# lowest_SCN_in_log, next_change# highest_SCN_in_log, recid controlfile_record_id, stamp controlfile_record_stamp from v$log_history order by first_time;
The sum of squares of all the elements in the given array is 29 (2^2 + 3^2 + 4^2 = 4 + 9 + 16).
#!/bin/bash yaourt -Syua --devel
'use strict'; const CoinstacClientCore = require('coinstac-client-core'); const { merge } = require('lodash'); const parseCLIInput = require('./parse-cli-input.js'); module.exports = function configureCore( config, logger, userId, appDirectory, clientServerURL, token ) { const coreConfiguration = merge( JSON.parse(config.toString()), parseCLIInput.get(), { logger, userId, appDirectory, clientServerURL, token, } ); const core = new CoinstacClientCore(coreConfiguration); return core.initialize().then(() => core); };
package malte0811.controlengineering.util.energy; import net.minecraftforge.energy.IEnergyStorage; public class ExtractOnlyEnergyWrapper implements IEnergyStorage { private final IEnergyStorage wrapped; public ExtractOnlyEnergyWrapper(IEnergyStorage wrapped) { this.wrapped = wrapped; } @Override public int receiveEnergy(int maxReceive, boolean simulate) { return wrapped.receiveEnergy(maxReceive, simulate); } @Override public int extractEnergy(int maxExtract, boolean simulate) { return 0; } @Override public int getEnergyStored() { return wrapped.getEnergyStored(); } @Override public int getMaxEnergyStored() { return wrapped.getMaxEnergyStored(); } @Override public boolean canExtract() { return false; } @Override public boolean canReceive() { return true; } }
<form method="POST" action="capture_user_details.php"> <label for="name">Name</label> <input type="text" name="name" id="name" required> <label for="email">Email</label> <input type="email" name="email" id="email" required> <label for="address">Address</label> <input type="text" name="address" id="address" required> <input type="submit" value="Submit"> </form>
<reponame>iao/RajawaliCardboard package org.rajawali3d.util.debugvisualizer; import org.rajawali3d.Object3D; import org.rajawali3d.renderer.RajawaliRenderer; /** * @author dennis.ippel */ public class DebugVisualizer extends Object3D { private RajawaliRenderer mRenderer; public DebugVisualizer(RajawaliRenderer renderer) { mRenderer = renderer; } public void addChild(DebugObject3D child) { super.addChild(child); child.setRenderer(mRenderer); } }
use std::io; use std::error::Error; use std::fmt; // Define a custom error type #[derive(Debug)] struct BuiltinError { message: String, source: Option<Box<dyn Error>>, } impl fmt::Display for BuiltinError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "BuiltinError: {}", self.message) } } impl Error for BuiltinError { fn source(&self) -> Option<&(dyn Error + 'static)> { self.source.as_ref().map(|e| e.as_ref()) } } // Implement the conversion from io::Error to BuiltinError impl From<io::Error> for BuiltinError { fn from(error: io::Error) -> Self { BuiltinError { message: format!("IO error: {}", error), source: Some(Box::new(error)), } } } fn main() { // Example usage of the custom error handling let file_open_result = std::fs::File::open("nonexistent_file.txt"); match file_open_result { Ok(_) => println!("File opened successfully"), Err(e) => { let custom_error: BuiltinError = e.into(); println!("Custom error: {}", custom_error); if let Some(source) = custom_error.source() { println!("Original error: {}", source); } } } }
#!/bin/bash # Copyright 2021 Huawei Technologies Co., Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================ if [[ $# -lt 3 || $# -gt 4 ]]; then echo "Usage: bash run_infer_310.sh [MINDIR_PATH] [DATASET_PATH] [NEED_PREPROCESS] [DEVICE_ID] NEED_PREPROCESS means weather need preprocess or not, it's value is 'y' or 'n'. DEVICE_ID is optional, it can be set by environment variable device_id, otherwise the value is zero" exit 1 fi get_real_path(){ if [ "${1:0:1}" == "/" ]; then echo "$1" else echo "$(realpath -m $PWD/$1)" fi } model=$(get_real_path $1) dataset_path=$(get_real_path $2) if [ "$3" == "y" ] || [ "$3" == "n" ];then need_preprocess=$3 else echo "weather need preprocess or not, it's value must be in [y, n]" exit 1 fi device_id=0 if [ $# == 4 ]; then device_id=$4 fi echo "mindir name: "$model echo "dataset path: "$dataset_path echo "need preprocess: "$need_preprocess echo "device id: "$device_id export ASCEND_HOME=/usr/local/Ascend/ if [ -d ${ASCEND_HOME}/ascend-toolkit ]; then export PATH=$ASCEND_HOME/fwkacllib/bin:$ASCEND_HOME/fwkacllib/ccec_compiler/bin:$ASCEND_HOME/ascend-toolkit/latest/fwkacllib/ccec_compiler/bin:$ASCEND_HOME/ascend-toolkit/latest/atc/bin:$PATH export LD_LIBRARY_PATH=$ASCEND_HOME/fwkacllib/lib64:/usr/local/lib:$ASCEND_HOME/ascend-toolkit/latest/atc/lib64:$ASCEND_HOME/ascend-toolkit/latest/fwkacllib/lib64:$ASCEND_HOME/driver/lib64:$ASCEND_HOME/add-ons:$LD_LIBRARY_PATH export TBE_IMPL_PATH=$ASCEND_HOME/ascend-toolkit/latest/opp/op_impl/built-in/ai_core/tbe export PYTHONPATH=$ASCEND_HOME/fwkacllib/python/site-packages:${TBE_IMPL_PATH}:$ASCEND_HOME/ascend-toolkit/latest/fwkacllib/python/site-packages:$PYTHONPATH export ASCEND_OPP_PATH=$ASCEND_HOME/ascend-toolkit/latest/opp else export PATH=$ASCEND_HOME/fwkacllib/bin:$ASCEND_HOME/fwkacllib/ccec_compiler/bin:$ASCEND_HOME/atc/ccec_compiler/bin:$ASCEND_HOME/atc/bin:$PATH export LD_LIBRARY_PATH=$ASCEND_HOME/fwkacllib/lib64:/usr/local/lib:$ASCEND_HOME/atc/lib64:$ASCEND_HOME/acllib/lib64:$ASCEND_HOME/driver/lib64:$ASCEND_HOME/add-ons:$LD_LIBRARY_PATH export PYTHONPATH=$ASCEND_HOME/fwkacllib/python/site-packages:$ASCEND_HOME/atc/python/site-packages:$PYTHONPATH export ASCEND_OPP_PATH=$ASCEND_HOME/opp fi function preprocess_data() { if [ -d preprocess_Result ]; then rm -rf ./preprocess_Result fi mkdir preprocess_Result python ../preprocess.py --dataset_path=$dataset_path --result_path=./preprocess_Result/ } function compile_app() { cd ../ascend310_infer || exit bash build.sh &> build.log } function infer() { cd - || exit if [ -d result_Files ]; then rm -rf ./result_Files fi if [ -d time_Result ]; then rm -rf ./time_Result fi mkdir result_Files mkdir time_Result ../ascend310_infer/out/main --mindir_path=$model --input0_path=./preprocess_Result/00_data --input1_path=./preprocess_Result/01_data --device_id=$device_id &> infer.log } function cal_acc() { python ../postprocess.py --result_dir=./result_Files --label_dir=./preprocess_Result/01_data &> acc.log } if [ $need_preprocess == "y" ]; then preprocess_data if [ $? -ne 0 ]; then echo "preprocess dataset failed" exit 1 fi fi compile_app if [ $? -ne 0 ]; then echo "compile app code failed" exit 1 fi infer if [ $? -ne 0 ]; then echo " execute inference failed" exit 1 fi cal_acc if [ $? -ne 0 ]; then echo "calculate accuracy failed" exit 1 fi
package com.sweetkrista.redstonesensor; import com.sweetkrista.redstonesensor.handler.ConfigurationHelper; import com.sweetkrista.redstonesensor.init.ModBlocks; import com.sweetkrista.redstonesensor.init.ModItems; import com.sweetkrista.redstonesensor.init.Recipes; import com.sweetkrista.redstonesensor.proxy.IProxy; import com.sweetkrista.redstonesensor.reference.Reference; import com.sweetkrista.redstonesensor.utility.LogHelper; import cpw.mods.fml.common.FMLCommonHandler; import cpw.mods.fml.common.Mod; import cpw.mods.fml.common.SidedProxy; import cpw.mods.fml.common.event.FMLInitializationEvent; import cpw.mods.fml.common.event.FMLPostInitializationEvent; import cpw.mods.fml.common.event.FMLPreInitializationEvent; @Mod(modid = Reference.MOD_ID, version = Reference.MOD_VERSION, name = Reference.MOD_NAME, guiFactory = Reference.GUI_FACTORY_CLASS) public class RedstoneSensor { @Mod.Instance(Reference.MOD_ID) public static RedstoneSensor instance; @SidedProxy(clientSide = Reference.CLIENT_PROXY_CLASS, serverSide = Reference.SERVER_PROXY_CLASS) public static IProxy proxy; @Mod.EventHandler public void preInit(FMLPreInitializationEvent event) { ConfigurationHelper.init(event.getSuggestedConfigurationFile()); FMLCommonHandler.instance().bus().register(new ConfigurationHelper()); ModItems.init(); ModBlocks.init(); LogHelper.info("Pre-initialisation Complete."); } @Mod.EventHandler public void init(FMLInitializationEvent event) { Recipes.init(); LogHelper.info("Initialisation Complete."); } @Mod.EventHandler public void postInit(FMLPostInitializationEvent event) { LogHelper.info("Post-initialisation Complete."); } }
#!/bin/sh set -ex rm -rf autom4te.cache Makefile.in aclocal.m4 aclocal --force # GNU libtool is named differently on some systems. This code tries several # variants like glibtoolize (MacOSX) and libtoolize1x (FreeBSD) set +ex echo "Looking for a version of libtoolize (which can have different names)..." libtoolize="" for l in glibtoolize libtoolize15 libtoolize14 libtoolize ; do $l --version > /dev/null 2>&1 if [ $? = 0 ]; then libtoolize=$l echo "Found $l" break fi echo "Did not find $l" done if [ "x$libtoolize" = "x" ]; then echo "Can't find libtoolize on your system" exit 1 fi set -ex $libtoolize -c -f autoconf -f -W all,no-obsolete autoheader -f -W all # automake -a -c -f -W all automake --add-missing --foreign --copy -c -W all rm -rf autom4te.cache exit 0 # end autogen.sh
import numpy as np class Transformation: def __init__(self, r): self.r = r def apply_transformation(self, l): """ Apply a transformation function to each element of the array self.r based on the given condition. Args: l: float, the threshold value for the transformation Returns: transformed_array: numpy array, the array after applying the transformation function """ transformed_array = np.zeros_like(self.r) for i in range(len(self.r)): r = np.abs(self.r[i]) if r <= l: tf = lambda r, l: 1 - r / l transformed_array[i] = tf(r, l) else: transformed_array[i] = self.r[i] return transformed_array
<reponame>JetBrains/TeamCity.PowerShell /* * Copyright 2000-2022 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package jetbrains.buildServer.powershell.agent; import jetbrains.buildServer.BaseTestCase; import jetbrains.buildServer.powershell.common.PowerShellEdition; import org.testng.annotations.DataProvider; /** * Created with IntelliJ IDEA. * * @author <NAME> (<EMAIL>) */ public abstract class BasePowerShellUnitTest extends BaseTestCase { @DataProvider(name = "editionProvider") public Object[][] editionProvider() { Object[][] result = new Object[2][]; result[0] = new Object[] {PowerShellEdition.CORE}; result[1] = new Object[] {PowerShellEdition.DESKTOP}; return result; } }
/** * Regular functions and definitions */ ( function() { if ( typeof svg4everybody === 'function' ) { svg4everybody(); } if ( typeof particlesJS === 'function' ) { particlesJS( 'sbanner-particles', { "particles": { "number": { "value": 80, "density": { "enable": true, "value_area": 800 } }, "color": { "value": "#ffffff" }, "shape": { "type": "circle", "stroke": { "width": 0, "color": "#000000" }, "polygon": { "nb_sides": 5 }, "image": { "src": "img/github.svg", "width": 100, "height": 100 } }, "opacity": { "value": 0.5, "random": false, "anim": { "enable": false, "speed": 1, "opacity_min": 0.1, "sync": false } }, "size": { "value": 5, "random": true, "anim": { "enable": false, "speed": 40, "size_min": 0.1, "sync": false } }, "line_linked": { "enable": true, "distance": 150, "color": "#ffffff", "opacity": 0.4, "width": 1 }, "move": { "enable": true, "speed": 6, "direction": "none", "random": false, "straight": false, "out_mode": "out", "attract": { "enable": false, "rotateX": 600, "rotateY": 1200 } } }, "interactivity": { "detect_on": "canvas", "events": { "onhover": { "enable": true, "mode": "repulse" }, "onclick": { "enable": true, "mode": "push" }, "resize": true }, "modes": { "grab": { "distance": 400, "line_linked": { "opacity": 1 } }, "bubble": { "distance": 400, "size": 40, "duration": 2, "opacity": 8, "speed": 3 }, "repulse": { "distance": 200 }, "push": { "particles_nb": 4 }, "remove": { "particles_nb": 2 } } }, "retina_detect": true, "config_demo": { "hide_card": false, "background_color": "#b61924", "background_image": "", "background_position": "50% 50%", "background_repeat": "no-repeat", "background_size": "cover" } } ); } } )(); /** * jQuery based functions and definitions */ ( function( $ ) { "use strict"; /** * Utility object */ var FSFlexUtilities = { /** * Get scrollbar width */ getScrollBarWidth : function() { return window.innerWidth - $( window ).width(); }, /** * Generate an unique id */ getUniqId : function() { var guid = function() { return Math.floor( ( 1 + Math.random() ) * 0x10000 ).toString( 16 ).substring( 1 ); }; return guid() + guid(); }, /** * Perform toggles. */ performToggle: function( elem, closeOnly ) { var expanded = elem.attr( 'aria-expanded' ), controls = $( '#' + elem.attr( 'aria-controls' ) ), stopScroll = $( '#' + elem.attr( 'aria-controls' ) ).attr( 'data-stop-body-scroll' ), parentActive = $( '#' + elem.attr( 'aria-controls' ) ).attr( 'data-parent-active' ); if ( 'true' == expanded || closeOnly ) { elem.removeClass( 'active' ); elem.attr( 'aria-expanded', 'false' ); controls.removeClass( 'active' ); controls.attr( 'aria-expanded', 'false' ); $( 'body' ).removeClass( elem.attr( 'aria-controls' ) + '-active' ); if ( 'true' == stopScroll ) { $( 'body' ).css( { 'overflow': '', 'padding-right': '' } ); } if ( parentActive ) { controls.parent().removeClass( 'active' ); } elem.blur(); elem.find( 'input, textarea, select' ).blur(); } else { elem.addClass( 'active' ); elem.attr( 'aria-expanded', 'true' ); controls.addClass( 'active' ); controls.attr( 'aria-expanded', 'true' ); $( 'body' ).addClass( elem.attr( 'aria-controls' ) + '-active' ); elem.blur(); if ( 'true' == stopScroll ) { $( 'body' ).css( { 'overflow': 'hidden', 'padding-right': getScrollBarWidth() } ); } if ( parentActive ) { controls.parent().addClass( 'active' ); } } }, }; /** * The main object * @type {Object} */ var FSFlex = { _initialized : false, _lastScrollTop : 0, _touchSupport : false, init : function() { if ( this._initialized ) { return; } this._initialized = true; this._lastScrollTop = $( window ).scrollTop(); if ( 'ontouchstart' in document ) { this._touchSupport = true; } this.toggleClose( this ); this.clickAnyWhere( this ); this.carousels( this ); this.mainNavigation( this ); }, /** * Handle event when user click on body * We do close every toggles if certain contittions are met */ toggleClose: function( o ) { $( '[aria-controls]' ).on( 'click', function( event ) { event.stopPropagation(); FSFlexUtilities.performToggle( $( '[aria-expanded="true"]' ).not( $( this ) ), true ); FSFlexUtilities.performToggle( $( this ), false ); } ); }, /** * Click handle when user is clicking anywhere on the page */ clickAnyWhere: function( o ) { // Close Toggles var processTouch = function( event ) { var target = $( event.target ); // Close toggles $( '[aria-expanded="true"][aria-controls]' ).each( function() { var id = $( this ).attr( 'aria-controls' ); if ( target.closest( '#' + id ).length ) { return; } else { FSFlexUtilities.performToggle( $( this ), true ); } } ); }; // Touch support if ( o._touchSupport ) { $( document ).on( 'touchstart', processTouch ); } else { $( document ).on( 'click', processTouch ); } $( document ).on( 'keyup', function( event ) { if ( event.keyCode == 27 ) { $( '[aria-expanded="true"]' ).each( function() { FSFlexUtilities.performToggle( $( this ), true ); } ); } } ); // Nav menu on touch $( document ).on( 'click', function( event ) { // Simulate 2nd tap on touch devices for dropdown if ( o._touchSupport ) { var nav_touch = $( '#primary-menu [data-touch="true"]' ); if ( nav_touch.length && target.attr( 'id' ) != 'primary-menu' && ! target.closest( '#primary-menu' ).length ) { nav_touch.attr( 'data-touch', 'false' ); nav_touch.parent().removeClass( 'hover' ); } } } ); }, /** * Theme carousels */ carousels: function( o ) { var Events = { owlInitialized : function( e ) { window.setTimeout( function() { $( e.target.querySelectorAll( '.owl-item.active' ) ).addClass( 'activated' ); }, 100 ); }, onTranslateStart : function( e ) { var elements = e.target.querySelectorAll( '.owl-item' ); var activeElem = e.target.querySelectorAll( '.owl-item.activated' ); $( elements ).removeClass( 'activating' ).removeClass( 'activated' ); $( elements[e.item.index] ).addClass( 'activating' ); }, onTranslateEnd : function( e ) { var elements = e.target.querySelectorAll( '.owl-item' ); $( elements[e.item.index] ).removeClass( 'activating' ).addClass( 'activated' ); } }; $( '.carousel[data-theme-carousel="true"]' ).each( function() { var _this = $( this ); var options = _this.data( 'options' ); var settings = $.extend( { nav: true, navText: ['←','→'], dots: false, margin: 30, smartSpeed: 600, autoHeight: true, }, options ); console.log(settings); _this.on( 'initialized.owl.carousel', Events.owlInitialized ); _this.owlCarousel( settings ); _this.removeAttr( 'data-options' ); _this.removeAttr( 'data-theme-carousel' ); _this.on( 'translate.owl.carousel', Events.onTranslateStart ); _this.on( 'translated.owl.carousel', Events.onTranslateEnd ); } ); }, /** * Main navigation scripts. Including: * - Mobile Nav * - Touch screen support * - Side Navigation Events */ mainNavigation: function( o ) { $( '[data-close-controls="smenu-block"]' ).on( 'click', function() { $( '[aria-controls="smenu-block"]' ).trigger( 'click' ); return false; }); // Add data attribute for touch support $( ' #smenu a' ).each( function() { $( this ).attr( 'data-touch', 'false' ); } ); // Touch event support $( '#smenu a' ).on( 'click', function( event ) { //if ( o._touchSupport ) { event.stopPropagation(); if ( 'false' == $( this ).attr( 'data-touch' ) && $( this ).next( 'ul' ).length ) { event.preventDefault(); var otherParents = $( this ).parent().siblings(); otherParents.removeClass( 'hover' ); otherParents.find( 'a' ).attr( 'data-touch', 'false' ); otherParents.find( '.hover' ).removeClass( 'hover' ); $( this ).parent().addClass( 'hover' ); $( this ).attr( 'data-touch', 'true' ); } else { $( this ).parent().removeClass( 'hover' ); $( this ).attr( 'data-touch', 'false' ); } //} } ); // Prevent submenu get out of window boundary $( '#smenu li' ).each( function() { var submenu = $( this ).children( 'ul' ); var submenuParent = null; if ( ! submenu.length || submenu.hasClass( 'oposite' ) || submenu.offset().left + submenu.innerWidth() < window.innerWidth ) { return true; } // We'll get the deepest sub menu while ( submenu.length ) { if ( ! submenu.hasClass( 'oposite' ) ) { if ( submenu.offset().left + submenu.innerWidth() > window.innerWidth ) { submenu.addClass( 'oposite' ); } } submenu = submenu.find( '> li > ul' ); } } ); $( '#smenu' ).on( 'mouseenter', 'li', function( event ) { var submenu = $( this ).children( 'ul' ); if ( submenu.length > 0 && ! submenu.hasClass( 'oposite' ) ) { if ( submenu.offset().left + submenu.innerWidth() > window.innerWidth ) { submenu.addClass( 'oposite' ); } } } ); if ( $( 'a#backtotoplink' ).length ) { $( 'a#backtotoplink' ).on( 'click', function( event ) { event.stopPropagation(); $( 'html, body' ).stop().animate( { scrollTop: 0 }, 1500, 'swing' ); } ); $( window ).on( 'scroll', function() { if ( $( window ).scrollTop() > 480 ) { $( 'a#backtotoplink' ).addClass( 'active' ); } else { $( 'a#backtotoplink' ).removeClass( 'active' ); } } ); } } }; $( document ).ready( function( e ) { FSFlex.init(); } ); } )( jQuery );
.heading { font-family: 'Lato', sans-serif; font-weight: bold; font-size: 18px; }
def words_start_with_a(words): a_words = [] for word in words: if word[0] == "a": a_words.append(word) return a_words
import database_module class DatabaseSession: def __init__(self, db_url): self.db_url = db_url self.session = None def __enter__(self): self.session = database_module.connect(self.db_url) return self.session def __exit__(self, exc_type, exc_value, traceback): if self.session is not None: if exc_type is not None: # An exception occurred within the context, handle it appropriately self.session.rollback() self.session.close()
#ifndef INCLUDED_CORE_ACTOR_EVENT_H #define INCLUDED_CORE_ACTOR_EVENT_H #include "actor.h" #include "opt.h" struct ActorEvent : public Event { enum State { Added = 0, Died, Removed }; Opt<Actor> mActor; State mState; ActorEvent( Opt<Actor> actor, State state ): mActor( actor ), mState( state ) {} }; struct SceneLoadEvent : public Event { }; #endif//INCLUDED_CORE_ACTOR_EVENT_H
#!/usr/bin/env bash # # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # # Runs all the tests. Currently includes FE tests, BE unit tests, and the end-to-end # test suites. # Exit on reference to uninitialized variables and non-zero exit codes set -euo pipefail trap 'echo Error in $0 at line $LINENO: $(cd "'$PWD'" && awk "NR == $LINENO" $0)' ERR . "$IMPALA_HOME/bin/set-pythonpath.sh" # Allow picking up strategy from environment : ${EXPLORATION_STRATEGY:=core} : ${NUM_TEST_ITERATIONS:=1} : ${MAX_PYTEST_FAILURES:=10} KERB_ARGS="" . "${IMPALA_HOME}/bin/impala-config.sh" > /dev/null 2>&1 . "${IMPALA_HOME}/testdata/bin/run-step.sh" if "${CLUSTER_DIR}/admin" is_kerberized; then KERB_ARGS="--use_kerberos" fi # Parametrized Test Options # Disable KRPC for test cluster and test execution : ${DISABLE_KRPC:=false} # Run FE Tests : ${FE_TEST:=true} # Run Backend Tests : ${BE_TEST:=true} # Run End-to-end Tests : ${EE_TEST:=true} : ${EE_TEST_FILES:=} # Run JDBC Test : ${JDBC_TEST:=true} # Run Cluster Tests : ${CLUSTER_TEST:=true} # Extra arguments passed to start-impala-cluster for tests. These do not apply to custom # cluster tests. : ${TEST_START_CLUSTER_ARGS:=} # Extra args to pass to run-tests.py : ${RUN_TESTS_ARGS:=} # Extra args to pass to run-custom-cluster-tests.sh : ${RUN_CUSTOM_CLUSTER_TESTS_ARGS:=} if [[ "${TARGET_FILESYSTEM}" == "local" ]]; then # TODO: Remove abort_on_config_error flag from here and create-load-data.sh once # checkConfiguration() accepts the local filesystem (see IMPALA-1850). TEST_START_CLUSTER_ARGS="${TEST_START_CLUSTER_ARGS} --cluster_size=1 "` `"--impalad_args=--abort_on_config_error=false" FE_TEST=false else TEST_START_CLUSTER_ARGS="${TEST_START_CLUSTER_ARGS} --cluster_size=3" fi if [[ "${ERASURE_CODING}" = true ]]; then # We do not run FE tests when erasure coding is enabled because planner tests # would fail. FE_TEST=false fi # If KRPC tests are disabled, pass the flag to disable KRPC during cluster start. if [[ "${DISABLE_KRPC}" == "true" ]]; then TEST_START_CLUSTER_ARGS="${TEST_START_CLUSTER_ARGS} --disable_krpc" fi # Indicates whether code coverage reports should be generated. : ${CODE_COVERAGE:=false} # parse command line options while getopts "e:n:c" OPTION do case "$OPTION" in e) EXPLORATION_STRATEGY="$OPTARG" ;; n) NUM_TEST_ITERATIONS="$OPTARG" ;; c) CODE_COVERAGE=true ;; ?) echo "run-all-tests.sh [-e <exploration_strategy>] [-n <num_iters>]" echo "[-e] The exploration strategy to use. Default exploration is 'core'." echo "[-n] The number of times to run the tests. Default is 1." echo "[-c] Set this option to generate code coverage reports." exit 1; ;; esac done # IMPALA-3947: "Exhaustive" tests are actually based on workload. This # means what we colloquially call "exhaustive" tests are actually # "exhaustive tests whose workloads are in this set below". Not all # workloads are able to be exhaustively run through buildall.sh. For # example, the tpch workload is never run exhaustively, because the # relatively large size of tpch means data loading in all exhaustive # formats takes much longer and data load snapshots containing tpch in # all exhaustive formats are much larger to store and take longer to # load. # # XXX If you change the --workload_exploration_strategy set below, # please update the buildall.sh help text for -testexhaustive. COMMON_PYTEST_ARGS="--maxfail=${MAX_PYTEST_FAILURES} --exploration_strategy=core"` `" --workload_exploration_strategy="` `"functional-query:${EXPLORATION_STRATEGY},"` `"targeted-stress:${EXPLORATION_STRATEGY}" if [[ "${TARGET_FILESYSTEM}" == "local" ]]; then # Only one impalad is supported when running against local filesystem. COMMON_PYTEST_ARGS+=" --impalad=localhost:21000" fi # If KRPC tests are disabled, pass test_no_krpc flag to pytest. # This includes the end-to-end tests and the custom cluster tests. if [[ "${DISABLE_KRPC}" == "true" ]]; then COMMON_PYTEST_ARGS+=" --test_no_krpc" fi # For logging when using run-step. LOG_DIR="${IMPALA_EE_TEST_LOGS_DIR}" # Enable core dumps ulimit -c unlimited || true for i in $(seq 1 $NUM_TEST_ITERATIONS) do TEST_RET_CODE=0 run-step "Starting Impala cluster" start-impala-cluster.log \ "${IMPALA_HOME}/bin/start-impala-cluster.py" --log_dir="${IMPALA_EE_TEST_LOGS_DIR}" \ ${TEST_START_CLUSTER_ARGS} if [[ "$BE_TEST" == true ]]; then if [[ "$TARGET_FILESYSTEM" == "local" ]]; then # This test will fail the configuration checks on local filesystem. # TODO: Don't skip this test once checkConfiguration() accepts the local # filesystem (see IMPALA-1850). export SKIP_BE_TEST_PATTERN="session*" fi # Run backend tests. if ! "${IMPALA_HOME}/bin/run-backend-tests.sh"; then TEST_RET_CODE=1 fi fi # Run some queries using run-workload to verify run-workload has not been broken. if ! run-step "Run test run-workload" test-run-workload.log \ "${IMPALA_HOME}/bin/run-workload.py" -w tpch --num_clients=2 --query_names=TPCH-Q1 \ --table_format=text/none --exec_options="disable_codegen:False" ${KERB_ARGS}; then TEST_RET_CODE=1 fi if [[ "$FE_TEST" == true ]]; then # Run JUnit frontend tests # Requires a running impalad cluster because some tests (such as DataErrorTest and # JdbcTest) queries against an impala cluster. pushd "${IMPALA_FE_DIR}" MVN_ARGS="" if [[ "${TARGET_FILESYSTEM}" == "s3" ]]; then # When running against S3, only run the S3 frontend tests. MVN_ARGS="-Dtest=S3* " fi if [[ "$CODE_COVERAGE" == true ]]; then MVN_ARGS+="-DcodeCoverage" fi if ! "${IMPALA_HOME}/bin/mvn-quiet.sh" -fae test ${MVN_ARGS}; then TEST_RET_CODE=1 fi popd fi if [[ "$EE_TEST" == true ]]; then # Run end-to-end tests. # KERBEROS TODO - this will need to deal with ${KERB_ARGS} if ! "${IMPALA_HOME}/tests/run-tests.py" ${COMMON_PYTEST_ARGS} \ ${RUN_TESTS_ARGS} ${EE_TEST_FILES}; then #${KERB_ARGS}; TEST_RET_CODE=1 fi fi if [[ "$JDBC_TEST" == true ]]; then # Run the JDBC tests with background loading disabled. This is interesting because # it requires loading missing table metadata. "${IMPALA_HOME}/bin/start-impala-cluster.py" --log_dir="${IMPALA_EE_TEST_LOGS_DIR}" \ --catalogd_args=--load_catalog_in_background=false \ ${TEST_START_CLUSTER_ARGS} pushd "${IMPALA_FE_DIR}" if ! "${IMPALA_HOME}/bin/mvn-quiet.sh" test -Dtest=JdbcTest; then TEST_RET_CODE=1 fi popd fi if [[ "$CLUSTER_TEST" == true ]]; then # For custom cluster tests only, set an unlimited log rotation # policy, for the mini cluster is restarted many times. So as not to # pollute the directory with too many files, remove what was there # before. Also, save the IMPALA_MAX_LOG_FILES value for re-set # later. rm -rf "${IMPALA_CUSTOM_CLUSTER_TEST_LOGS_DIR}" mkdir -p "${IMPALA_CUSTOM_CLUSTER_TEST_LOGS_DIR}" IMPALA_MAX_LOG_FILES_SAVE="${IMPALA_MAX_LOG_FILES:-10}" export IMPALA_MAX_LOG_FILES=0 # Run the custom-cluster tests after all other tests, since they will restart the # cluster repeatedly and lose state. # TODO: Consider moving in to run-tests.py. if ! "${IMPALA_HOME}/tests/run-custom-cluster-tests.sh" ${COMMON_PYTEST_ARGS} \ ${RUN_CUSTOM_CLUSTER_TESTS_ARGS}; then TEST_RET_CODE=1 fi export IMPALA_MAX_LOG_FILES="${IMPALA_MAX_LOG_FILES_SAVE}" fi # Finally, run the process failure tests. # Disabled temporarily until we figure out the proper timeouts required to make the test # succeed. # ${IMPALA_HOME}/tests/run-process-failure-tests.sh if [[ $TEST_RET_CODE == 1 ]]; then exit $TEST_RET_CODE fi done
require 'omniauth-oauth2' require 'base64' module OmniAuth module Strategies class Bnet < OmniAuth::Strategies::OAuth2 option :region, 'us' option :client_options, { :scope => 'wow.profile sc2.profile' } def client # Setup urls based on region option if !options.client_options.has_key(:authorize_url) options.client_options[:authorize_url] = "https://#{getHost(options.region)}/oauth/authorize" end if !options.client_options.has_key(:token_url) options.client_options[:token_url] = "https://#{getHost(options.region)}/oauth/token" end if !options.client_options.has_key(:site) options.client_options[:site] = "https://#{getHost(options.region)}/" end super end def request_phase super end def authorize_params super.tap do |params| %w[scope client_options].each do |v| if request.params[v] params[v.to_sym] = request.params[v] end end end end uid { raw_info['id'].to_s } info do raw_info end def raw_info return @raw_info if @raw_info access_token.options[:mode] = :query @raw_info = access_token.get('oauth/userinfo').parsed end private def callback_url full_host + script_name + callback_path end def getHost(region) case region when "cn" "www.battlenet.com.cn" else "#{region}.battle.net" end end end end end
<reponame>anotaai/anotaai package br.com.alinesolutions.anotaai.model.produto; import javax.persistence.CascadeType; import javax.persistence.Entity; import javax.persistence.EnumType; import javax.persistence.Enumerated; import javax.persistence.ManyToOne; import javax.persistence.NamedQueries; import javax.validation.constraints.NotNull; import javax.xml.bind.annotation.XmlRootElement; import com.fasterxml.jackson.annotation.JsonBackReference; import br.com.alinesolutions.anotaai.model.BaseEntity; import br.com.alinesolutions.anotaai.model.domain.DiaSemana; @NamedQueries({ }) @Entity @XmlRootElement public class Disponibilidade extends BaseEntity<Long, Disponibilidade> { private static final long serialVersionUID = 1L; public Disponibilidade() { super(); } public Disponibilidade(Long id, DiaSemana dia, Long idProduto, String descricaoProduto) { super(); setId(id); this.dia = dia; this.produto = new Produto(); this.produto.setId(idProduto); this.produto.setDescricao(descricaoProduto); } @NotNull @Enumerated(EnumType.ORDINAL) private DiaSemana dia; @NotNull @JsonBackReference(value = "diasDisponibilidade") @ManyToOne(cascade = CascadeType.DETACH) private Produto produto; public DiaSemana getDia() { return dia; } public void setDia(DiaSemana dia) { this.dia = dia; } public Produto getProduto() { return produto; } public void setProduto(Produto produto) { this.produto = produto; } public interface DisponibilidadeConstant { } }
var searchData= [ ['game',['Game',['../class_game.html',1,'']]], ['gamerunning',['gameRunning',['../class_game.html#a7054ffca77182d85a716bc8dd26e4d0f',1,'Game']]], ['getcardid',['getCardId',['../class_card_interface.html#acf6e4d125e8a7a5a0f57fe8e189f6dfe',1,'CardInterface::getCardId()'],['../class_game.html#a7c6b0036336206134810771a5860d4db',1,'Game::getCardId()']]], ['getgameid',['getGameId',['../class_settings.html#ad307f6be10ef3abba43d6ee96d1b5fcd',1,'Settings']]] ];
<gh_stars>10-100 package age.calculator.homepage; import com.jfoenix.controls.JFXDatePicker; import com.jfoenix.controls.JFXDialog; import java.io.IOException; import java.net.URL; import java.time.LocalDate; import java.time.Period; import java.util.ResourceBundle; import javafx.event.ActionEvent; import javafx.event.Event; import javafx.fxml.FXML; import javafx.fxml.FXMLLoader; import javafx.fxml.Initializable; import javafx.geometry.Rectangle2D; import javafx.scene.control.Alert; import javafx.scene.control.Label; import javafx.scene.control.Menu; import javafx.scene.control.MenuBar; import javafx.scene.control.MenuItem; import javafx.scene.input.KeyCode; import javafx.scene.input.KeyEvent; import javafx.scene.input.MouseEvent; import javafx.scene.layout.AnchorPane; import javafx.scene.layout.StackPane; import javafx.stage.Screen; import javafx.stage.Stage; public class appController implements Initializable { //======================= private double xOffset = 0; private double yOffset = 0; //======================= @FXML private StackPane AP; @FXML private Label daysLabel; @FXML private Label monthLabel; @FXML private Label yearLable; @FXML private MenuBar menuBar; @FXML private Menu file; @FXML private MenuItem About; @FXML private MenuItem close; @FXML private JFXDatePicker birthDatePicker; @FXML private JFXDatePicker currentDatePicker; public static JFXDialog aboutDialog; @Override public void initialize(URL url, ResourceBundle rb) { currentDatePicker.setValue(LocalDate.now()); try { AnchorPane aboutPane = FXMLLoader.load(getClass().getResource("/age/calculator/about/about.fxml")); aboutDialog = new JFXDialog(AP, aboutPane, JFXDialog.DialogTransition.TOP); } catch (IOException ex) { System.out.println("error in loading about fxml"); } } @FXML private void RootMousePressed(Event event) { if (isMaximized()) { return; } MouseEvent e = (MouseEvent) event; xOffset = e.getSceneX(); yOffset = e.getSceneY(); } @FXML private void RootMouseDragged(Event event) { if (isMaximized()) { return; } MouseEvent e = (MouseEvent) event; ((Stage) (AP.getScene().getWindow())).setX(e.getScreenX() - xOffset); ((Stage) (AP.getScene().getWindow())).setY(e.getScreenY() - yOffset); } @FXML private void closeWindow(Event event) { System.exit(0); } private boolean isMaximized() { Stage s = ((Stage) (AP.getScene().getWindow())); Screen screen = Screen.getPrimary(); Rectangle2D bounds = screen.getVisualBounds(); return s.getWidth() == bounds.getWidth() && s.getHeight() == bounds.getHeight(); } @FXML private void MaxWindow(Event event) { Stage s = ((Stage) (AP.getScene().getWindow())); if (isMaximized()) { s.setWidth(600); s.setHeight(600); Screen screen = Screen.getPrimary(); Rectangle2D bounds = screen.getVisualBounds(); s.setX(bounds.getWidth() / 2 - (600 / 2)); s.setY(bounds.getHeight() / 2 - (600 / 2)); } else { Screen screen = Screen.getPrimary(); Rectangle2D bounds = screen.getVisualBounds(); s.setWidth(bounds.getWidth()); s.setHeight(bounds.getHeight()); s.setX(0); s.setY(0); } } @FXML private void AP_keyListener(KeyEvent event) { if (event.getCode() == KeyCode.ESCAPE) { Stage s = ((Stage) (AP.getScene().getWindow())); if (isMaximized()) { MaxWindow(event); } } } @FXML private void CalcAction(ActionEvent event) { try { LocalDate birthDate = birthDatePicker.getValue(); LocalDate currentDate = currentDatePicker.getValue(); Period period = Period.between(birthDate, currentDate); if (period.getDays() < 0) { showErrorAlert(); resetData(); return; } daysLabel.setText(String.valueOf(period.getDays())); monthLabel.setText(String.valueOf(period.getMonths())); yearLable.setText(String.valueOf(period.getYears())); } catch (Exception e) { showErrorAlert(); resetData(); } } @FXML private void aboutWidow(ActionEvent event) { if (aboutDialog.isVisible()) { return; } aboutDialog.show(); } private void showErrorAlert() { Alert a = new Alert(Alert.AlertType.ERROR); a.setHeaderText("Enter Correct Date !"); a.showAndWait(); } private void resetData() { daysLabel.setText(""); monthLabel.setText(""); yearLable.setText(""); } }
#ifndef INCLUDED_MAP_EDITOR_RENDERABLE_LAYER_SYSTEM_H #define INCLUDED_MAP_EDITOR_RENDERABLE_LAYER_SYSTEM_H #include "core/scene.h" #include "engine/system.h" #include "editor_mode_changed_event.h" #include "editor_back_event.h" #include "level_generator/room_desc.h" #include "input/mouse.h" namespace map { class EditorRenderableLayerSystem : public engine::System { public: DEFINE_SYSTEM_BASE(EditorRenderableLayerSystem) EditorRenderableLayerSystem(); static Opt<EditorRenderableLayerSystem> Get(); protected: virtual void Init(); virtual void Update( double DeltaTime ); private: Scene& mScene; AutoReg mOnEditorModeChanged; void OnEditorModeChanged(map::EditorModeChangedEvent const& Evt); void EnableSubsystems( bool enable ); AutoReg mOnEditorBack; void OnEditorBack( map::EditorBackEvent const& Evt ); ModelValue mEditorLayerModel; ModelValue mLayerNamesModel; std::vector<std::string> mLayerNames; ModelValue mLayerSelectedModel; void OnLayerSelected( std::string groupName ); }; } // namespace map #endif//INCLUDED_MAP_EDITOR_RENDERABLE_LAYER_SYSTEM_H //command: "classgenerator.exe" -g "system" -n "map" -c "editor_actor_system" -e "map-editorModeChanged"
#! /bin/bash mkdir build cd build cmake .. make # install python package cd python-package if command -v python2; then sudo python2 setup.py install fi
#pragma once #ifndef __MSC_VER #include <cmath> #endif #include "HyperionMath/Trig.h" #include "TestConstants.h" #include "gtest/gtest.h" namespace hyperion::math::test { using test::DOUBLE_ACCEPTED_ERROR; TEST(TrigFuncsTestDouble, cosCase1) { double input = Constants<double>::pi; ASSERT_NEAR(Trig::cos(input), std::cos(input), DOUBLE_ACCEPTED_ERROR); } TEST(TrigFuncsTestDouble, cosCase2) { double input = Constants<double>::piOver2; ASSERT_NEAR(Trig::cos(input), std::cos(input), DOUBLE_ACCEPTED_ERROR); } TEST(TrigFuncsTestDouble, cosCase3) { double input = Constants<double>::piOver4; ASSERT_NEAR(Trig::cos(input), std::cos(input), DOUBLE_ACCEPTED_ERROR); } TEST(TrigFuncsTestDouble, cosCase4) { double input = -Constants<double>::piOver4; ASSERT_NEAR(Trig::cos(input), std::cos(input), DOUBLE_ACCEPTED_ERROR); } TEST(TrigFuncsTestDouble, sinCase1) { double input = Constants<double>::pi; ASSERT_NEAR(Trig::sin(input), std::sin(input), DOUBLE_ACCEPTED_ERROR); } TEST(TrigFuncsTestDouble, sinCase2) { double input = Constants<double>::piOver2; ASSERT_NEAR(Trig::sin(input), std::sin(input), DOUBLE_ACCEPTED_ERROR); } TEST(TrigFuncsTestDouble, sinCase3) { double input = Constants<double>::piOver4; ASSERT_NEAR(Trig::sin(input), std::sin(input), DOUBLE_ACCEPTED_ERROR); } TEST(TrigFuncsTestDouble, sinCase4) { double input = -Constants<double>::piOver4; ASSERT_NEAR(Trig::sin(input), std::sin(input), DOUBLE_ACCEPTED_ERROR); } TEST(TrigFuncsTestDouble, tanCase1) { double input = Constants<double>::pi; ASSERT_NEAR(Trig::tan(input), std::tan(input), DOUBLE_ACCEPTED_ERROR); } TEST(TrigFuncsTestDouble, tanCase2) { double input = Constants<double>::piOver12; ASSERT_NEAR(Trig::tan(input), std::tan(input), DOUBLE_ACCEPTED_ERROR); } TEST(TrigFuncsTestDouble, tanCase3) { double input = Constants<double>::piOver4; ASSERT_NEAR(Trig::tan(input), std::tan(input), DOUBLE_ACCEPTED_ERROR); } TEST(TrigFuncsTestDouble, tanCase4) { double input = -Constants<double>::piOver4; ASSERT_NEAR(Trig::tan(input), std::tan(input), DOUBLE_ACCEPTED_ERROR); } TEST(TrigFuncsTestDouble, atanCase1) { double input = Constants<double>::pi; ASSERT_NEAR(Trig::atan(input), std::atan(input), DOUBLE_ACCEPTED_ERROR); } TEST(TrigFuncsTestDouble, atanCase2) { double input = Constants<double>::piOver12; ASSERT_NEAR(Trig::atan(input), std::atan(input), DOUBLE_ACCEPTED_ERROR); } TEST(TrigFuncsTestDouble, atanCase3) { double input = Constants<double>::piOver4; ASSERT_NEAR(Trig::atan(input), std::atan(input), DOUBLE_ACCEPTED_ERROR); } TEST(TrigFuncsTestDouble, atanCase4) { double input = -Constants<double>::piOver4; ASSERT_NEAR(Trig::atan(input), std::atan(input), DOUBLE_ACCEPTED_ERROR); } TEST(TrigFuncsTestDouble, tanhCase1) { double input = Constants<double>::pi; ASSERT_NEAR(Trig::tanh(input), std::tanh(input), DOUBLE_ACCEPTED_ERROR); } TEST(TrigFuncsTestDouble, tanhCase2) { double input = Constants<double>::piOver12; ASSERT_NEAR(Trig::tanh(input), std::tanh(input), DOUBLE_ACCEPTED_ERROR); } TEST(TrigFuncsTestDouble, tanhCase3) { double input = Constants<double>::piOver4; ASSERT_NEAR(Trig::tanh(input), std::tanh(input), DOUBLE_ACCEPTED_ERROR); } TEST(TrigFuncsTestDouble, tanhCase4) { double input = -Constants<double>::piOver4; ASSERT_NEAR(Trig::tanh(input), std::tanh(input), DOUBLE_ACCEPTED_ERROR); } } // namespace hyperion::math::test
<filename>huatuo/src/main/java/com/huatuo/net/thread/WangJiMiMaInvokeItem.java package com.huatuo.net.thread; import org.json.JSONObject; import com.huatuo.base.MyApplication; import com.huatuo.dictionary.MsgId; import com.huatuo.net.http.ActionResponse; import com.huatuo.net.http.HttpAgent; import com.huatuo.util.DateUtil; import android.content.Context; import android.os.Handler; import android.os.Message; public class WangJiMiMaInvokeItem implements Runnable { private Handler mHandler; private Context mContext; private String mobileNo, identifyingCode, password; public WangJiMiMaInvokeItem(Handler mHandler, Context mContext, String mobileNo, String identifyingCode, String password) { this.mHandler = mHandler; this.mContext = mContext; this.mobileNo = mobileNo; this.identifyingCode = identifyingCode; this.password = password; } @Override public void run() { try { JSONObject registerJsonObject = new JSONObject(); registerJsonObject.put("mobileNo", mobileNo); registerJsonObject.put("captcha", identifyingCode); registerJsonObject.put("password", password); HttpAgent httpAgent = new HttpAgent("user_SetPassword", registerJsonObject, mContext); ActionResponse response = httpAgent.sendRequest(null); int code = response.getCode(); Message message = mHandler.obtainMessage(); if (code == 0) { JSONObject userJsonObject = response.getRsbody(); userJsonObject.put("mobileNo", mobileNo); userJsonObject.put("useServiceNumber", "0"); userJsonObject.put("addTime", DateUtil.getDateTimeNow()); MyApplication.setLoginFlag(true); MyApplication.setUserJSON(userJsonObject); message.what = MsgId.REGISTER_S; message.obj = response.getMsg(); mHandler.sendMessage(message); } else if (code == MsgId.NET_NOT_CONNECT) { mHandler.sendEmptyMessage(MsgId.NET_NOT_CONNECT); } else { message.what = MsgId.REGISTER_F; message.obj = response.getMsg(); mHandler.sendMessage(message); } } catch (Exception e) { // TODO: handle exception } } }
<gh_stars>0 import { ChangeEvent, FC } from "react"; import { Flex, Input } from "@linkto/gamut"; import { Theme, CSSstring, ctl } from "@linkto/core"; export interface ThemeCardProps extends Theme { userCurrentTheme: string; onThemeChange(event: ChangeEvent<HTMLInputElement>): void; disableSelect?: boolean; } const ThemeCard: FC<ThemeCardProps> = ({ id, name, userCurrentTheme, onThemeChange, style, disableSelect = false, }) => { const isSelected = userCurrentTheme.toLowerCase().trim() === name.toLowerCase().trim(); const rootClass = ctl(` rounded-xl relative p-0.5 xs:p-1 cursor-pointer transform-gpu duration-300 ${!isSelected && "hover:scale-105"} ${disableSelect && "select-none opacity-70"} `); return ( <label htmlFor={id} className={rootClass} style={{ borderWidth: "3px", borderColor: isSelected ? "hsl(206 100% 50.0%)" : "transparent", }} > <div className="rounded-lg select-none border border-mauve-500" style={CSSstring(style.background.css)} > <div className="pt-8 pb-6 px-5 relative z-10"> {Array(4) .fill(0) .map((_, idx) => ( <div key={`btn__key--${idx}`} className="mb-3 last:mb-0 h-6" style={CSSstring(style.button.css)} /> ))} </div> <Flex align="center" justify="center" className="h-10 bg-blackAlpha-700 text-mauve-50 text-sm rounded-b-lg" > {name} </Flex> </div> <Input id={id} type="radio" name="page-theme" className="sr-only" value={name} checked={isSelected} onChange={onThemeChange} disabled={disableSelect} /> </label> ); }; export default ThemeCard;
#!/bin/bash # mountIMG.sh # mount an img file for analysis # Last Edited: 6/21/18 Julian Thies # check if we have the privs we need if [ "$(whoami)" != "root" ] ; then echo "Script must be run as root or with sudo privileges" exit fi # check if anything was passed in to the script if [ -z "$1" ] ; then echo "No IMG file passed in" exit else printf "Mount the .img file? (y/n) -> " read A if [ "$A" == "n" ] ; then exit elif [ "$A" == "y" ] ; then echo "Mounting..." userName="$(id -un)" mount -t "$1" echo "/media/$userName =====" ls -lR /media/$userName else echo "Invalid input" exit fi fi
#!/bin/sh set -e mkdir -p "${CONFIGURATION_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}" RESOURCES_TO_COPY=${PODS_ROOT}/resources-to-copy-${TARGETNAME}.txt > "$RESOURCES_TO_COPY" install_resource() { case $1 in *.storyboard) echo "ibtool --reference-external-strings-file --errors --warnings --notices --output-format human-readable-text --compile ${CONFIGURATION_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$1\" .storyboard`.storyboardc ${PODS_ROOT}/$1 --sdk ${SDKROOT}" ibtool --reference-external-strings-file --errors --warnings --notices --output-format human-readable-text --compile "${CONFIGURATION_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$1\" .storyboard`.storyboardc" "${PODS_ROOT}/$1" --sdk "${SDKROOT}" ;; *.xib) echo "ibtool --reference-external-strings-file --errors --warnings --notices --output-format human-readable-text --compile ${CONFIGURATION_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$1\" .xib`.nib ${PODS_ROOT}/$1 --sdk ${SDKROOT}" ibtool --reference-external-strings-file --errors --warnings --notices --output-format human-readable-text --compile "${CONFIGURATION_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$1\" .xib`.nib" "${PODS_ROOT}/$1" --sdk "${SDKROOT}" ;; *.framework) echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" echo "rsync -av ${PODS_ROOT}/$1 ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" rsync -av "${PODS_ROOT}/$1" "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" ;; *.xcdatamodel) echo "xcrun momc \"${PODS_ROOT}/$1\" \"${CONFIGURATION_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$1"`.mom\"" xcrun momc "${PODS_ROOT}/$1" "${CONFIGURATION_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$1" .xcdatamodel`.mom" ;; *.xcdatamodeld) echo "xcrun momc \"${PODS_ROOT}/$1\" \"${CONFIGURATION_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$1" .xcdatamodeld`.momd\"" xcrun momc "${PODS_ROOT}/$1" "${CONFIGURATION_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$1" .xcdatamodeld`.momd" ;; *.xcmappingmodel) echo "xcrun mapc \"${PODS_ROOT}/$1\" \"${CONFIGURATION_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$1" .xcmappingmodel`.cdm\"" xcrun mapc "${PODS_ROOT}/$1" "${CONFIGURATION_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$1" .xcmappingmodel`.cdm" ;; *.xcassets) ;; /*) echo "$1" echo "$1" >> "$RESOURCES_TO_COPY" ;; *) echo "${PODS_ROOT}/$1" echo "${PODS_ROOT}/$1" >> "$RESOURCES_TO_COPY" ;; esac } install_resource "${BUILT_PRODUCTS_DIR}/RKColorSlider.bundle" rsync -avr --copy-links --no-relative --exclude '*/.svn/*' --files-from="$RESOURCES_TO_COPY" / "${CONFIGURATION_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}" if [[ "${ACTION}" == "install" ]]; then rsync -avr --copy-links --no-relative --exclude '*/.svn/*' --files-from="$RESOURCES_TO_COPY" / "${INSTALL_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}" fi rm -f "$RESOURCES_TO_COPY" if [[ -n "${WRAPPER_EXTENSION}" ]] && [ "`xcrun --find actool`" ] && [ `find . -name '*.xcassets' | wc -l` -ne 0 ] then case "${TARGETED_DEVICE_FAMILY}" in 1,2) TARGET_DEVICE_ARGS="--target-device ipad --target-device iphone" ;; 1) TARGET_DEVICE_ARGS="--target-device iphone" ;; 2) TARGET_DEVICE_ARGS="--target-device ipad" ;; *) TARGET_DEVICE_ARGS="--target-device mac" ;; esac find "${PWD}" -name "*.xcassets" -print0 | xargs -0 actool --output-format human-readable-text --notices --warnings --platform "${PLATFORM_NAME}" --minimum-deployment-target "${IPHONEOS_DEPLOYMENT_TARGET}" ${TARGET_DEVICE_ARGS} --compress-pngs --compile "${BUILT_PRODUCTS_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}" fi
/** * This package has all the custom exceptions this CTag library throws in it's * encode or decode process. */ package ctag.exception;
<reponame>eugeneware/kindle-clippings var split = require('split2'), stream = require('stream'), combine = require('stream-combiner'); module.exports = parseKindle; function parseKindle() { return combine( split(/==========\r\n/), parseClipping() ); } function parseClipping() { var ts = stream.Transform({ objectMode: true }); ts._transform = function (data, enc, cb) { var lines = data.split('\r\n') .map(trim) .filter(Boolean); var clipping = { title: lines[0], details: parseDetails(lines[1]), snippet: lines[2] || '' }; ts.push(clipping); cb(); } return ts; } var highlightRe = /^- Your (Highlight|Bookmark|Note) on (page|location)s? (\d+)(-(\d+))?$/i; var locationRe = /^locations? (\d+)(-(\d+))?$/i; var timeRe = /^Added on (.*)$/i; function parseDetails(detailsStr) { var details = {} var parts = detailsStr.split('|').map(trim).filter(Boolean); var m; var part = parts.shift(); if (m = highlightRe.exec(part)) { details.type = m[1].toLowerCase(); var locationPage = m[2].toLowerCase(); details[locationPage] = {}; details[locationPage].from = parseInt(m[3], 10); if (m[5]) { details[locationPage].to = parseInt(m[5], 10); } else { details[locationPage].to = details[locationPage].from; } } if (parts.length === 2) { part = parts.shift(); if (m = locationRe.exec(part)) { details.location = details.location || {}; details.location.from = parseInt(m[1], 10); if (m[3]) { details.location.to = parseInt(m[3], 10); } else { details.location.to = details.location.from; } } } if (parts.length) { part = parts.shift(); if (m = timeRe.exec(part)) { details.time = new Date(m[1]); } } return details; } function trim(s) { return s.trim(); }
/* * QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals. * Lean Algorithmic Trading Engine v2.0. Copyright 2014 QuantConnect Corporation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.quantconnect.lean.interfaces; import java.io.Closeable; import java.math.BigDecimal; import java.time.LocalDateTime; import java.util.List; import com.quantconnect.lean.AlgorithmControl; import com.quantconnect.lean.AlgorithmStatus; import com.quantconnect.lean.Language; import com.quantconnect.lean.StoragePermissions; import com.quantconnect.lean.api.Backtest; import com.quantconnect.lean.api.BacktestList; import com.quantconnect.lean.api.Compile; import com.quantconnect.lean.api.Live.LiveList; import com.quantconnect.lean.api.Project; import com.quantconnect.lean.api.ProjectFile; import com.quantconnect.lean.api.ProjectList; import com.quantconnect.lean.api.RestResponse; import com.quantconnect.lean.Symbol; import com.quantconnect.lean.securities.MarketHoursSegment; //using System.ComponentModel.Composition; /** * API for QuantConnect.com */ // [InheritedExport(typeof(IApi))] public interface IApi extends Closeable { /** * Initialize the control system */ void initialize( int userId, String token ); /** * Create a project with the specified name and language via QuantConnect.com API * @param name Project name * @param language Programming language to use * @returns Project object from the API. */ Project createProject( String name, Language language ); /** * Read in a project from the QuantConnect.com API. * @param projectId Project id you own * @returns */ Project readProject( int projectId ); /** * Update a specific project with a list of files. All other files will be deleted. * @param projectId Project id for project to be updated * @param files Files list to update * @returns RestResponse indicating success */ RestResponse updateProject( int projectId, List<ProjectFile> files ); /** * Delete a specific project owned by the user from QuantConnect.com * @param projectId Project id we own and wish to delete * @returns RestResponse indicating success */ RestResponse delete( int projectId ); /** * Read back a list of all projects on the account for a user. * @returns Container for list of projects */ ProjectList projectList(); /** * Create a new compile job request for this project id. * @param projectId Project id we wish to compile. * @returns Compile object result */ Compile createCompile( int projectId ); /** * Read a compile packet job result. * @param projectId Project id we sent for compile * @param compileId Compile id return from the creation request * @returns Compile object result */ Compile readCompile( int projectId, String compileId ); /** * Create a new backtest from a specified projectId and compileId * @param projectId * @param compileId * @param backtestName * @returns */ Backtest createBacktest( int projectId, String compileId, String backtestName ); /** * Read out the full result of a specific backtest * @param projectId Project id for the backtest we'd like to read * @param backtestId Backtest id for the backtest we'd like to read * @returns Backtest result object */ Backtest readBacktest( int projectId, String backtestId ); /** * Update the backtest name * @param projectId Project id to update * @param backtestId Backtest id to update * @returns Rest response on success */ default RestResponse updateBacktest( int projectId, String backtestId ) { return updateBacktest( projectId, backtestId, "", "" ); } /** * Update the backtest name * @param projectId Project id to update * @param backtestId Backtest id to update * @param backtestName New backtest name to set * @param backtestNote Note attached to the backtest * @returns Rest response on success */ RestResponse updateBacktest( int projectId, String backtestId, String backtestName, String backtestNote ); /** * Delete a backtest from the specified project and backtestId. * @param projectId Project for the backtest we want to delete * @param backtestId Backtest id we want to delete * @returns RestResponse on success */ RestResponse deleteBacktest( int projectId, String backtestId ); /** * Get a list of backtests for a specific project id * @param projectId Project id to search * @returns BacktestList container for list of backtests */ BacktestList backtestList( int projectId ); /** * Get a list of live running algorithms for a logged in user. * @returns List of live algorithm instances */ LiveList liveList(); //Status StatusRead(int projectId, String algorithmId); //RestResponse StatusUpdate(int projectId, String algorithmId, AlgorithmStatus status, String message = ""); //LogControl LogAllowanceRead(); //void LogAllowanceUpdate( String backtestId, String url, int length); //void StatisticsUpdate(int projectId, String algorithmId, BigDecimal unrealized, BigDecimal fees, BigDecimal netProfit, BigDecimal holdings, BigDecimal equity, BigDecimal netReturn, BigDecimal volume, int trades, double sharpe); //void NotifyOwner(int projectId, String algorithmId, String subject, String body); //IEnumerable<MarketHoursSegment> MarketHours(int projectId, DateTime time, Symbol symbol); /** * Read the maximum log allowance */ int[] readLogAllowance( int userId, String userToken ); /** * Update running total of log usage */ default void updateDailyLogUsed( int userId, String backtestId, String url, int length, String userToken ) { updateDailyLogUsed( userId, backtestId, url, length, userToken, false ); } /** * Update running total of log usage */ void updateDailyLogUsed( int userId, String backtestId, String url, int length, String userToken, boolean hitLimit ); /** * Get the algorithm current status, active or cancelled from the user * @param algorithmId * @param userId The user id of the algorithm * @returns */ AlgorithmControl getAlgorithmStatus( String algorithmId, int userId ); /** * Set the algorithm status from the worker to update the UX e.g. if there was an error. * @param algorithmId Algorithm id we're setting. * @param status Status enum of the current worker */ default void setAlgorithmStatus( String algorithmId, AlgorithmStatus status ) { setAlgorithmStatus( algorithmId, status, "" ); } /** * Set the algorithm status from the worker to update the UX e.g. if there was an error. * @param algorithmId Algorithm id we're setting. * @param status Status enum of the current worker * @param message Message for the algorithm status event */ void setAlgorithmStatus( String algorithmId, AlgorithmStatus status, String message ); /** * Send the statistics to storage for performance tracking. * @param algorithmId Identifier for algorithm * @param unrealized Unrealized gainloss * @param fees Total fees * @param netProfit Net profi * @param holdings Algorithm holdings * @param equity Total equity * @param netReturn Algorithm return * @param volume Volume traded * @param trades Total trades since inception * @param sharpe Sharpe ratio since inception */ void sendStatistics( String algorithmId, BigDecimal unrealized, BigDecimal fees, BigDecimal netProfit, BigDecimal holdings, BigDecimal equity, BigDecimal netReturn, BigDecimal volume, int trades, double sharpe ); /** * Market Status Today: REST call. * @param time The date we need market hours for * @param symbol * @returns Market open hours. */ Iterable<MarketHoursSegment> marketToday( LocalDateTime time, Symbol symbol ); /** * Store the algorithm logs. */ default void store( String data, String location, StoragePermissions permissions ) { store( data, location, permissions, false ); } /** * Store the algorithm logs. */ void store( String data, String location, StoragePermissions permissions, boolean async ); /** * Send an email to the user associated with the specified algorithm id * @param algorithmId The algorithm id * @param subject The email subject * @param body The email message body */ void sendUserEmail( String algorithmId, String subject, String body ); }
import { ChangeDetectionStrategy, Component, EventEmitter, Input, Output } from '@angular/core'; import { emptyPaginationInfo, PaginationInfo } from '@price-depo-ui/data-handling'; import * as _ from 'lodash'; @Component( { selector: 'pd-paginator', templateUrl: './paginator.component.html', styleUrls: [ './paginator.component.scss' ], changeDetection: ChangeDetectionStrategy.OnPush } ) export class PaginatorComponent { static readonly defaultPageRangeWidth = 2; @Input() paginationInfo: PaginationInfo = emptyPaginationInfo(); @Input() pageRangeWidth: number = PaginatorComponent.defaultPageRangeWidth; @Output() pageTo = new EventEmitter<number>(); get pageRange() { let rangeStart = this.paginationInfo.pageNumber - this.pageRangeWidth; rangeStart = rangeStart < 0 ? 0 : rangeStart; let rangeEnd = this.paginationInfo.pageNumber + this.pageRangeWidth; rangeEnd = rangeEnd > this.paginationInfo.totalPages - 1 ? this.paginationInfo.totalPages - 1 : rangeEnd; return _.range( rangeStart, rangeEnd + 1 ); } onPageTo( targetPage: number ) { if ( this.paginationInfo.pageNumber !== targetPage ) { this.pageTo.emit( targetPage ); } } onFirst() { if ( this.paginationInfo.hasPreviousPage ) { this.pageTo.emit( 0 ); } } onPrevious() { if ( this.paginationInfo.hasPreviousPage ) { this.pageTo.emit( this.paginationInfo.pageNumber - 1 ); } } onNext() { if ( this.paginationInfo.hasNextPage ) { this.pageTo.emit( this.paginationInfo.pageNumber + 1 ); } } onLast() { if ( this.paginationInfo.hasNextPage ) { this.pageTo.emit( this.paginationInfo.totalPages - 1 ); } } }
#!/bin/bash sudo yes | sudo yum install python3-devel sudo python3 -m pip install --upgrade pip wget https://download-ib01.fedoraproject.org/pub/epel/7/aarch64/Packages/g/geos-3.4.2-2.el7.aarch64.rpm sudo yum -y localinstall geos-3.4.2-2.el7.aarch64.rpm sudo python3 -m pip install \ numpy==1.19.1 \ pyspark==3.0.1 \ pymongo==3.11.2 \ pymongo[srv] \ python-dateutil==2.8.1 \ setuptools-rust==0.6.0 \ requests==2.25.1 \ certifi==2020.12.5 \ mlflow==1.14.0 \ boto3==1.16.29 \ apache-sedona==1.0.1 \ pandas==1.1.1 \ scikit-learn==0.24.2 \ pyarrow==4.0.1 # sudo aws s3 cp --recursive s3://${local.bucket_name}/emr/spark/jar/ /usr/share/aws/aws-java-sdk/ sudo aws s3 cp --recursive s3://devopscorner-emr/emr/spark/jar/ /usr/share/aws/aws-java-sdk/
<gh_stars>0 package config const ( // DbUser db user DbUser = "clean_architecture_go_version" // DbPassword db password DbPassword = "<PASSWORD>" // DbDatabase database name DbDatabase = "clean_architecture_go_version" // DbHost database host DbHost = "127.0.0.1" // APIPort API port number APIPort = 8080 )
#!/bin/bash cd /app rm -f /app/lambda/lambda.zip mkdir /app/tmp python3 -m pip install -r /app/lambda/requirements.txt -t /app/tmp --upgrade cp -r /app/lambda/* /app/tmp zip -r9 /app/lambda/lambda.zip /app/tmp/* rm -rf /app/tmp
function hasUniqueChars(str) { // Store occurrence of each character let ctr = Array(256).fill(0); for (let i = 0; i < str.length; i++) { var index = str.charCodeAt(i); // If character already occurred if (ctr[index] > 0) return false; // Otherwise increment the counter ctr[index]++ ; } // If all characters occur once return true; } //Usage // Outputs true console.log(hasUniqueChars("abcdefghijklmnop"));
$(function () { // let cidadeId = $('#cidadeId').val(); // let cidadeCobrancaId = $('#cidadeCobrancaId').val(); // getCidades(function(data){ // $('input.autocomplete-cidade').autocomplete({ // data: data, // limit: 20, // onAutocomplete: function(val) { // var cliente = $('#autocomplete-cidade').val().split('|'); // }, // minLength: 1, // }); // $('input.autocomplete-cidade-cobranca').autocomplete({ // data: data, // limit: 20, // onAutocomplete: function(val) { // var cliente = $('#autocomplete-cidade-cobranca').val().split('|'); // }, // minLength: 1, // }); // }); // if(cidadeId > 0){ // findCidade(cidadeId, (data) => { // $('#autocomplete-cidade').val(data.id + ' - ' + data.nome) // Materialize.updateTextFields(); // }) // } // if(cidadeCobrancaId > 0){ // findCidade(cidadeCobrancaId, (data) => { // $('#autocomplete-cidade-cobranca').val(data.id + ' - ' + data.nome) // Materialize.updateTextFields(); // }) // } }); function getCidades(data){ $.ajax ({ type: 'GET', url: path + 'cidades/all', dataType: 'json', success: function(e){ // console.log(e); data(e) }, error: function(e){ console.log(e) }, error: function(err){ alert('Ocorreu um erro ao buscar as cidades, revise o arquivo .env PATH_URL') } }); } function findCidade(cidadeId, data){ $.ajax ({ type: 'GET', url: path + 'cidades/find/'+cidadeId, dataType: 'json', success: function(e){ data(e) }, error: function(e){ console.log(e) } }); }
import request from '@/utils/axios/request'; import { CategoryVO, ArticleCategoryVO } from '@/api/model/client/category'; enum Api { category = '/article/list/category', getById = '/article/category/' } export function getCategoryList(){ return request.get<CategoryVO[]>({ url: Api.category, }); } export function getArticleByCategoryId(id: string){ return request.get<ArticleCategoryVO[]>({ url: Api.getById + id, }); }
package eu.flatworld.android.sdoviewer.data; import android.util.Log; import org.jsoup.Jsoup; import org.jsoup.nodes.Document; import org.jsoup.nodes.Element; import org.jsoup.select.Elements; import java.io.IOException; import java.util.ArrayList; import java.util.Calendar; import java.util.GregorianCalendar; import java.util.List; import java.util.regex.Pattern; import eu.flatworld.android.sdoviewer.GlobalConstants; import eu.flatworld.android.sdoviewer.gui.browse.BrowseDataListItem; import okhttp3.OkHttpClient; /** * Created by marcopar on 22/02/15. */ public class SDOUtil { public static List<BrowseDataListItem> loadYears() { int maxYear = GregorianCalendar.getInstance().get(GregorianCalendar.YEAR); List<BrowseDataListItem> l = new ArrayList<>(); for (int i = 2010; i <= maxYear; i++) { String s = Integer.toString(i); l.add(new BrowseDataListItem(s, s)); } return l; } public static List<BrowseDataListItem> loadMonths(int year) { int maxMonth = 12; if (year == GregorianCalendar.getInstance().get(GregorianCalendar.YEAR)) { maxMonth = GregorianCalendar.getInstance().get(GregorianCalendar.MONTH) + 1; } List<BrowseDataListItem> l = new ArrayList<>(); for (int i = 1; i <= maxMonth; i++) { String s = Integer.toString(i); l.add(new BrowseDataListItem(s, s)); } return l; } public static List<BrowseDataListItem> loadDays(int year, int month) { Calendar gc = GregorianCalendar.getInstance(); gc.set(GregorianCalendar.YEAR, year); gc.set(GregorianCalendar.MONTH, month - 1); int maxDays = gc.getActualMaximum(GregorianCalendar.DAY_OF_MONTH); if (year == GregorianCalendar.getInstance().get(GregorianCalendar.YEAR)) { if (month == (GregorianCalendar.getInstance().get(GregorianCalendar.MONTH) + 1)) { maxDays = GregorianCalendar.getInstance().get(GregorianCalendar.DAY_OF_MONTH); } } List<BrowseDataListItem> l = new ArrayList<>(); for (int i = 1; i <= maxDays; i++) { String s = Integer.toString(i); l.add(new BrowseDataListItem(s, s)); } return l; } public static List<BrowseDataListItem> loadImageTypes() { List<BrowseDataListItem> l = new ArrayList<>(); for (SDO t : SDO.values()) { l.add(new BrowseDataListItem(String.format("%s (%s)", t.toString(), t.getShortCode()), t.name())); } return l; } public static List<BrowseDataListItem> loadImages(int year, int month, int day, ArrayList<String> links, SDO type, int resolution) throws IOException { List<BrowseDataListItem> l = new ArrayList<>(); String regex = String.format("%d%02d%02d_\\d\\d\\d\\d\\d\\d_%d_%s.jpg", year, month, day, resolution, type.getShortCode()); Pattern p = Pattern.compile(regex); Log.d(GlobalConstants.LOGTAG, "Parse links"); for (String link : links) { String url = link.substring(link.lastIndexOf('/') + 1); if (p.matcher(url).matches()) { String text = String.format("%s:%s:%s", url.substring(9, 11), url.substring(11, 13), url.substring(13, 15)); String fullUrl = String.format("%s/%d/%02d/%02d/%s", SDO.URL_BROWSE, year, month, day, url); l.add(new BrowseDataListItem(text, fullUrl)); } } Log.d(GlobalConstants.LOGTAG, "Parse links complete"); return l; } public static ArrayList<String> loadLinks(OkHttpClient httpClient, int year, int month, int day) throws IOException { String baseUrl = String.format("%s%d/%02d/%02d/", SDO.URL_BROWSE, year, month, day); Log.d(GlobalConstants.LOGTAG, "Load links"); ArrayList<String> al = new ArrayList<>(); try { String sb = Util.getUrl(httpClient, baseUrl).body().string(); Document doc = Jsoup.parse(sb, baseUrl); Elements elements = doc.select("a[href]"); for (Element e : elements) { String s = e.attr("abs:href"); al.add(s); } Log.d(GlobalConstants.LOGTAG, "Load links completed " + baseUrl + " " + al.size()); } catch (IOException ex) { Log.d(GlobalConstants.LOGTAG, "Load links completed with errors", ex); throw ex; } return al; } }
<reponame>zephray/Dramite /* * Copyright (C) 2016 <NAME> * * This file is part of IBMulator. * * IBMulator is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * IBMulator is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with IBMulator. If not, see <http://www.gnu.org/licenses/>. */ #include "../common.h" #include "../decoder.h" #include "../executor.h" #define PREFIX_0F prefix_0F(_opcode, ctb_idx_, ctb_op_); return; void CPUDecoder::prefix_0F_32(uint8_t _opcode, unsigned &ctb_idx_, unsigned &ctb_op_) { ctb_op_ = _opcode; ctb_idx_ = CTB_IDX_0F; switch(_opcode) { /* 0F 00 /0 SLDT ew Store Local Descriptor Table register to EA word 0F 00 /1 STR ew Store Task Register to EA word 0F 00 /2 LLDT ew Load selector ew into Local Descriptor Table register 0F 00 /3 LTR ew Load EA word into Task Register 0F 00 /4 VERR ew Set ZF=1 if seg. can be read, selector ew 0F 00 /5 VERW ew Set ZF=1 if seg. can be written, selector ew */ case 0x00: PREFIX_0F; /* 0F 01 /0 SGDT m Store Global Descriptor Table register to m 0F 01 /1 SIDT m Store Interrupt Descriptor Table register to m 0F 01 /2 LGDT m Load m into Global Descriptor Table reg 0F 01 /3 LIDT m Load m into Interrupt Descriptor Table reg 0F 01 /4 SMSW ew Store Machine Status Word to EA word 0F 01 /6 LMSW ew Load EA word into Machine Status Word */ case 0x01: { m_instr.modrm.load(m_instr.addr32); switch(m_instr.modrm.n) { case 0: m_instr.fn = &CPUExecutor::SGDT; break; case 1: m_instr.fn = &CPUExecutor::SIDT; break; case 2: m_instr.fn = &CPUExecutor::LGDT_o32; break; case 3: m_instr.fn = &CPUExecutor::LIDT_o32; break; case 4: m_instr.fn = &CPUExecutor::SMSW_ew; break; case 6: m_instr.fn = &CPUExecutor::LMSW_ew; break; default: illegal_opcode(); } ctb_op_ = m_instr.modrm.n; ctb_idx_ = CTB_IDX_0F01; break; } /* 0F 02 /r LAR rd,ew Load: high(rd)= Access Rights byte, selector ew */ case 0x02: { m_instr.modrm.load(m_instr.addr32); m_instr.fn = &CPUExecutor::LAR_rd_ew; break; } /* 0F 03 /r LSL rd,ew Load: rd = Segment Limit, selector ew */ case 0x03: { m_instr.modrm.load(m_instr.addr32); m_instr.fn = &CPUExecutor::LSL_rd_ew; break; } case 0x05: /* 0F 05 286 LOADALL Load CPU registers from memory */ case 0x06: /* 0F 06 CLTS Clear task switched flag */ case 0x07: /* 0F 07 386 LOADALL Load CPU registers from memory */ case 0x20: /* 0F 20 /r MOV r32,CR0/CR2/CR3 Move (control register) to (register) */ case 0x21: /* 0F 21 /r MOV r32,DRx Move (debug register) to (register) */ case 0x22: /* 0F 22 /r MOV CR0/CR2/CR3,r32 Move (register) to (control register) */ case 0x23: /* 0F 23 /r MOV DRx,r32 Move (register) to (debug register) */ case 0x24: /* 0F 24 /r MOV r32,TR6/TR7 Move (test register) to (register) */ case 0x26: /* 0F 26 /r MOV TR6/TR7,r32 Move (register) to (test register) */ PREFIX_0F; /* 0F 80 cd JO rel32 Jump near if overflow (OF=1) */ case 0x80: { m_instr.id1 = fetchdw(); m_instr.fn = &CPUExecutor::JO_cd; break; } /* 0F 81 cd JNO rel32 Jump near if not overflow (OF=0) */ case 0x81: { m_instr.id1 = fetchdw(); m_instr.fn = &CPUExecutor::JNO_cd; break; } /* 0F 82 cd JC rel32 Jump near if carry (CF=1) */ case 0x82: { m_instr.id1 = fetchdw(); m_instr.fn = &CPUExecutor::JC_cd; break; } /* 0F 83 cd JNC rel32 Jump near if not carry (CF=0) */ case 0x83: { m_instr.id1 = fetchdw(); m_instr.fn = &CPUExecutor::JNC_cd; break; } /* 0F 84 cd JE rel32 Jump near if equal (ZF=1) */ case 0x84: { m_instr.id1 = fetchdw(); m_instr.fn = &CPUExecutor::JE_cd; break; } /* 0F 85 cd JNE rel32 Jump near if not equal (ZF=0) */ case 0x85: { m_instr.id1 = fetchdw(); m_instr.fn = &CPUExecutor::JNE_cd; break; } /* 0F 86 cd JBE rel32 Jump near if below or equal (CF=1 or ZF=1) */ case 0x86: { m_instr.id1 = fetchdw(); m_instr.fn = &CPUExecutor::JBE_cd; break; } /* 0F 87 cd JA rel32 Jump near if above (CF=0 and ZF=0) */ case 0x87: { m_instr.id1 = fetchdw(); m_instr.fn = &CPUExecutor::JA_cd; break; } /* 0F 88 cd JS rel32 Jump near if sign (SF=1) */ case 0x88: { m_instr.id1 = fetchdw(); m_instr.fn = &CPUExecutor::JS_cd; break; } /* 0F 89 cd JNS rel32 Jump near if not sign (SF=0) */ case 0x89: { m_instr.id1 = fetchdw(); m_instr.fn = &CPUExecutor::JNS_cd; break; } /* 0F 8A cd JPE rel32 Jump near if parity even (PF=1) */ case 0x8A: { m_instr.id1 = fetchdw(); m_instr.fn = &CPUExecutor::JPE_cd; break; } /* 0F 8B cd JPO rel32 Jump near if parity odd (PF=0) */ case 0x8B: { m_instr.id1 = fetchdw(); m_instr.fn = &CPUExecutor::JPO_cd; break; } /* 0F 8C cd JL rel32 Jump near if less (SF<>OF) */ case 0x8C: { m_instr.id1 = fetchdw(); m_instr.fn = &CPUExecutor::JL_cd; break; } /* 0F 8D cd JNL rel32 Jump near if not less (SF=OF) */ case 0x8D: { m_instr.id1 = fetchdw(); m_instr.fn = &CPUExecutor::JNL_cd; break; } /* 0F 8E cd JLE rel32 Jump near if less or equal (ZF=1 or SF<>OF) */ case 0x8E: { m_instr.id1 = fetchdw(); m_instr.fn = &CPUExecutor::JLE_cd; break; } /* 0F 8F cd JNLE rel32 Jump near if not less or equal (ZF=0 and SF=OF) */ case 0x8F: { m_instr.id1 = fetchdw(); m_instr.fn = &CPUExecutor::JNLE_cd; break; } case 0x90: /* 0F 90 SETO r/m8 Set byte if overflow (OF=1) */ case 0x91: /* 0F 91 SETNO r/m8 Set byte if not overflow (OF=0) */ case 0x92: /* 0F 92 SETB r/m8 Set byte if below (CF=1) */ case 0x93: /* 0F 93 SETNB r/m8 Set byte if not below (CF=0) */ case 0x94: /* 0F 94 SETE r/m8 Set byte if equal (ZF=1) */ case 0x95: /* 0F 95 SETNE r/m8 Set byte if not equal (ZF=0) */ case 0x96: /* 0F 96 SETBE r/m8 Set byte if below or equal (CF=1 or ZF=1) */ case 0x97: /* 0F 97 SETNBE r/m8 Set byte if not below or equal (CF=0 and ZF=0) */ case 0x98: /* 0F 98 SETS r/m8 Set byte if sign (SF=1) */ case 0x99: /* 0F 99 SETNS r/m8 Set byte if not sign (SF=0) */ case 0x9A: /* 0F 9A SETP r/m8 Set byte if parity (PF=1) */ case 0x9B: /* 0F 9B SETNP r/m8 Set byte if not parity (PF=0) */ case 0x9C: /* 0F 9C SETL r/m8 Set byte if less (SF<>OF) */ case 0x9D: /* 0F 9D SETNL r/m8 Set byte if not less (SF=OF) */ case 0x9E: /* 0F 9E SETLE r/m8 Set byte if less or equal (ZF=1 or SF<>OF) */ case 0x9F: /* 0F 9F SETNLE r/m8 Set byte if not less or equal (ZF=0 and SF=OF) */ PREFIX_0F; /* 0F A0 PUSH FS Push FS */ case 0xA0: { m_instr.reg = REGI_FS; m_instr.fn = &CPUExecutor::PUSH_SR_dw; break; } /* 0F A1 POP FS Pop top of stack into FS */ case 0xA1: { m_instr.reg = REGI_FS; m_instr.fn = &CPUExecutor::POP_SR_dw; break; } /* 0F A3 BT r/m32,r32 Save bit in carry flag */ case 0xA3: { m_instr.modrm.load(m_instr.addr32); m_instr.fn = &CPUExecutor::BT_ed_rd; break; } /* 0F A4 SHLD r/m32,r32,imm8 r/m32 gets SHL of r/m32 concatenated with r32 */ case 0xA4: { m_instr.modrm.load(m_instr.addr32); m_instr.ib = fetchb(); m_instr.fn = &CPUExecutor::SHLD_ed_rd_ib; break; } /* 0F A5 SHLD r/m32,r32,CL r/m32 gets SHL of r/m32 concatenated with r32 */ case 0xA5: { m_instr.modrm.load(m_instr.addr32); m_instr.fn = &CPUExecutor::SHLD_ed_rd_CL; break; } /* OF A8 PUSH GS Push GS */ case 0xA8: { m_instr.reg = REGI_GS; m_instr.fn = &CPUExecutor::PUSH_SR_dw; break; } /* 0F A9 POP GS Pop top of stack into GS */ case 0xA9: { m_instr.reg = REGI_GS; m_instr.fn = &CPUExecutor::POP_SR_dw; break; } /* 0F AB BTS r/m32,r32 Save bit in carry flag and set */ case 0xAB: { m_instr.modrm.load(m_instr.addr32); m_instr.fn = &CPUExecutor::BTS_ed_rd; break; } /* 0F AC SHRD r/m32,r32,imm8 r/m32 gets SHR of r/m32 concatenated with r32 */ case 0xAC: { m_instr.modrm.load(m_instr.addr32); m_instr.ib = fetchb(); m_instr.fn = &CPUExecutor::SHRD_ed_rd_ib; break; } /* 0F AD SHRD r/m32,r32,CL r/m32 gets SHR of r/m32 concatenated with r32 */ case 0xAD: { m_instr.modrm.load(m_instr.addr32); m_instr.fn = &CPUExecutor::SHRD_ed_rd_CL; break; } /* 0F AF /r IMUL r32,r/m32 dword register = dword register * r/m dword */ case 0xAF: { m_instr.modrm.load(m_instr.addr32); m_instr.fn = &CPUExecutor::IMUL_rd_ed; break; } /* 0F B2 /r LSS r32,m16:32 Load SS:r32 with pointer from memory */ case 0xB2: { m_instr.modrm.load(m_instr.addr32); m_instr.fn = &CPUExecutor::LSS_rd_mp; break; } /* 0F B3 BTR r/m32,r32 Save bit in carry flag and reset */ case 0xB3: { m_instr.modrm.load(m_instr.addr32); m_instr.fn = &CPUExecutor::BTR_ed_rd; break; } /* 0F B4 /r LFS r32,m16:32 Load FS:r32 with pointer from memory */ case 0xB4: { m_instr.modrm.load(m_instr.addr32); m_instr.fn = &CPUExecutor::LFS_rd_mp; break; } /* 0F B5 /r LGS r32,m16:32 Load GS:r32 with pointer from memory */ case 0xB5: { m_instr.modrm.load(m_instr.addr32); m_instr.fn = &CPUExecutor::LGS_rd_mp; break; } /* 0F B6 /r MOVZX r32,r/m8 Move byte to dword with zero-extend */ case 0xB6: { m_instr.modrm.load(m_instr.addr32); m_instr.fn = &CPUExecutor::MOVZX_rd_eb; break; } /* 0F B7 /r MOVZX r32,r/m16 Move word to dword reg with zero-extend */ case 0xB7: { m_instr.modrm.load(m_instr.addr32); m_instr.fn = &CPUExecutor::MOVZX_rd_ew; break; } /* 0F BA /4 ib BT r/m32,imm8 Save bit in carry flag */ /* 0F BA /5 ib BTS r/m32,imm8 Save bit in carry flag and set */ /* 0F BA /6 ib BTR r/m32,imm8 Save bit in carry flag and reset */ /* 0F BA /7 ib BTC r/m32,imm8 Save bit in carry flag and complement */ case 0xBA: { m_instr.modrm.load(m_instr.addr32); m_instr.ib = fetchb(); switch(m_instr.modrm.n) { case 4: m_instr.fn = &CPUExecutor::BT_ed_ib; break; case 5: m_instr.fn = &CPUExecutor::BTS_ed_ib; break; case 6: m_instr.fn = &CPUExecutor::BTR_ed_ib; break; case 7: m_instr.fn = &CPUExecutor::BTC_ed_ib; break; default: illegal_opcode(); break; } ctb_op_ = m_instr.modrm.n; ctb_idx_ = CTB_IDX_0FBA; break; } /* 0F BB BTC r/m32,r32 Save bit in carry flag and complement */ case 0xBB: { m_instr.modrm.load(m_instr.addr32); m_instr.fn = &CPUExecutor::BTC_ed_rd; break; } /* 0F BC BSF r32,r/m32 Bit scan forward on r/m dword */ case 0xBC: { m_instr.modrm.load(m_instr.addr32); m_instr.fn = &CPUExecutor::BSF_rd_ed; break; } /* 0F BD BSR r32,r/m32 Bit scan reverse on r/m dword */ case 0xBD: { m_instr.modrm.load(m_instr.addr32); m_instr.fn = &CPUExecutor::BSR_rd_ed; break; } /* 0F BE /r MOVSX r32,r/m8 Move byte to dword with sign-extend */ case 0xBE: { m_instr.modrm.load(m_instr.addr32); m_instr.fn = &CPUExecutor::MOVSX_rd_eb; break; } /* 0F BF /r MOVSX r32,r/m16 Move word to dword, sign-extend */ case 0xBF: { m_instr.modrm.load(m_instr.addr32); m_instr.fn = &CPUExecutor::MOVSX_rd_ew; break; } default: { illegal_opcode(); } } //switch }
import pulsar as psr def load_ref_system(): """ Returns d-ribulose as found in the IQMol fragment library. All credit to https://github.com/nutjunkie/IQmol """ return psr.make_system(""" C 1.1956 0.1844 -0.0196 C 2.4819 -0.6696 -0.1242 C -0.0269 -0.7326 0.2957 C -1.3106 0.1107 0.3927 C -2.1971 0.1061 -0.8424 O 0.9551 0.9558 -1.1702 O 3.5741 0.0365 -0.6401 O 0.1790 -1.4971 1.4550 O -1.5741 0.7446 1.3952 O -3.4836 0.6166 -0.6546 H 1.3012 0.9664 0.7690 H 2.3656 -1.5027 -0.8414 H 2.7275 -1.1044 0.8639 H -0.1370 -1.5203 -0.4855 H -2.3839 -0.9272 -1.1864 H -1.6729 0.6501 -1.6549 H 1.1593 0.4200 -1.9272 H 3.7045 0.8047 -0.0977 H 0.1897 -0.9035 2.1972 H -3.4082 1.4506 -0.2062 """)
<reponame>ckendall/clonebook class CreateFriendRequests < ActiveRecord::Migration def change create_table :friend_requests do |t| t.integer :user_id t.integer :friend_id t.timestamps null: false end end end
<filename>a_vuemall/src/utils/http.js import axios from 'axios'; import QS from 'qs'; import Cookie from '../utils/cookie'; axios.interceptors.request.use((configure) => { if (configure.method === 'post') { configure.data = QS.stringify(configure.data); let user = Cookie.getCookie('token'); if (user !== null) { configure.headers.common['token'] = user; console.info(configure); } } return configure; }, (error) => { console.info("request error", error); return Promise.reject(error); }); axios.interceptors.response.use((response) => { return response; }, (error) => { console.info('response error', error); return Promise.reject(error); }); export default axios;
<reponame>bugtlp/backend-boilerplate<filename>src/db/interfaces/query-builder.interface.ts import * as Knex from 'knex'; export type QueryBuilder = Knex.QueryBuilder;
#!/bin/bash -e sudo -E docker build -t $1 --build-arg http_proxy --build-arg https_proxy -f $1/Dockerfile .
import logging from abc import abstractmethod from .gee import get_info from sepal.ee.image import convert import ee class ImageSpec(object): def __init__(self): super(ImageSpec, self).__init__() self.pyramiding_policy = None def preview(self): """Creates Google Earth Engine mapId/token pair for rendering the image. :return: A dictionary with mapId and token :rtype: dict """ ee_image = self._ee_image() viz_params = self._viz_params() if viz_params.get('hsv'): ee_image = convert.hsv_to_rgb(ee_image, viz_params) viz_params = {} # viz_params have been used to create a rgb image ee_preview = None retry = 0 while not ee_preview: try: ee_preview = ee_image.getMapId(viz_params) except ee.EEException: retry += 1 if retry > 3: raise logging.info('Retry ' + str(retry) + ' of requesting map id of ' + str(self)) logging.debug('Got map id of ' + str(self) + ': ' + str(ee_preview)) return { 'mapId': ee_preview['mapid'], 'token': ee_preview['token'] } def geometry(self): geometry = self.aoi._geometry feature = ee.Feature(geometry) bounds_polygon = ee.List(geometry.bounds().coordinates().get(0)) bounds = get_info(ee.List([bounds_polygon.get(0), bounds_polygon.get(2)])) mapId = feature.getMapId({ 'color': '#5e2926' }) return { 'mapId': mapId['mapid'], 'token': mapId['token'], 'bounds': bounds } @abstractmethod def _ee_image(self): """Creates an ee.Image based on the spec. :return: An ee.Image :rtype: ee.Image """ raise AssertionError('Method in subclass expected to have been invoked') @abstractmethod def _viz_params(self): """Returns the visualization parameters of this image. :return: The visualization parameters. :rtype: dict """ raise AssertionError('Method in subclass expected to have been invoked')
#!/bin/bash set -o nounset set -o errexit cd "$(dirname "$0")" rm -rf $PWD/../data/ mkdir -p $PWD/../data/ git clone --depth 1 https://github.com/trikset/trik-desktop-gamepad.git $PWD/../data/gamepad-build cd $PWD/../data/gamepad-build git submodule update --init cd $PWD/../../meta
export default function(data){ return Object.keys(data).map(key=>{ return{ id:key, ...data[key] } }) }
<filename>tests/gui/steps/generic/copy_paste.py """Common steps using copy or paste text """ from pytest_bdd import when, then, parsers from tests.gui.utils.generic import parse_seq __author__ = "<NAME>" __copyright__ = "Copyright (C) 2017 ACK CYFRONET AGH" __license__ = "This software is released under the MIT license cited in " \ "LICENSE.txt" @when(parsers.re('user of (?P<browser_id>.*?) sends copied (?P<item_type>.*?) ' 'to users? of (?P<browser_list>.*)')) @then(parsers.re('user of (?P<browser_id>.*?) sends copied (?P<item_type>.*?) ' 'to users? of (?P<browser_list>.*)')) def send_copied_item_to_other_users(browser_id, item_type, browser_list, tmp_memory, displays, clipboard): item = clipboard.paste(display=displays[browser_id]) for browser in parse_seq(browser_list): tmp_memory[browser]['mailbox'][item_type.lower()] = item @when(parsers.parse('user of {browser_id} sees that copied token ' 'matches displayed one')) @then(parsers.parse('user of {browser_id} sees that copied token ' 'matches displayed one')) def assert_copied_token_match_displayed_one(browser_id, tmp_memory, displays, clipboard): displayed_token = tmp_memory[browser_id]['token'] copied_token = clipboard.paste(display=displays[browser_id]) err_msg = 'Displayed token: {} does not match copied one: ' \ '{}'.format(displayed_token, copied_token) assert copied_token == displayed_token, err_msg @when(parsers.parse('user of {browser_id} sees that copied token ' 'does not match displayed one')) @then(parsers.parse('user of {browser_id} sees that copied token ' 'does not match displayed one')) def assert_copied_token_does_not_match_displayed_one(browser_id, tmp_memory, displays, clipboard): displayed_token = tmp_memory[browser_id]['token'] copied_token = clipboard.paste(display=displays[browser_id]) err_msg = 'Displayed token: {} match copied one: {} ' \ 'while it should not be'.format(displayed_token, copied_token) assert copied_token != displayed_token, err_msg
# Import necessary libraries import pandas as pd import pyautogui as pyAuto # Load the dataset data = pd.read_csv('dataset.csv') # Automate the report generation process pyAuto.hotkey('win','R') pyAuto.typewrite('notepad.exe') pyAuto.press('enter') pyAuto.typewrite(data) pyAuto.hotkey('Control', 's') pyAuto.typewrite('Report.txt') pyAuto.press('enter') pyAuto.alert(text='Report Generation complete!', title='Information', button='OK') pyAuto.hotkey('Alt', 'F4')
<gh_stars>10-100 package info.u250.c2d.box2d.model; import java.util.ArrayList; import java.util.List; /** * An exmaple that how to render the model * <pre> * void setup(b2Scene model,Stage stage){ * for(b2BodyDefModel body:model.bodyDefModels){ * DefaultBuilder.buildBody(world, body); * } * for(b2JointDefModel joint:model.jointDefModels){ * DefaultBuilder.buildJoint(world, joint); * } * * for(b2BodyDefModel b2Body:model.bodyDefModels){ * Box2dObject obj = new Box2dObject(b2Body); * for(b2FixtureDefModel b2Def:b2Body.fixtures){ * if(b2Def instanceof b2CircleFixtureDefModel){ * b2CircleFixtureDefModel tmp = (b2CircleFixtureDefModel)b2Def; * Image image = new Image(new TextureRegion(Engine.resource("Circle",Texture.class))); * image.setSize(tmp.radius*2, tmp.radius*2); * image.setOrigin(tmp.radius, tmp.radius); * image.setPosition(b2Body.drawableOffsetX-tmp.radius, b2Body.drawableOffsetY-tmp.radius); * image.setColor(generateColor()); * obj.addActor(image); * }else if(b2Def instanceof b2RectangleFixtureDefModel){ * b2RectangleFixtureDefModel tmp = (b2RectangleFixtureDefModel)b2Def; * Image image = new Image(new TextureRegion(Engine.resource("Box",Texture.class))); * image.setSize(tmp.width, tmp.height); * image.setOrigin(tmp.width/2, tmp.height/2); * image.setPosition(b2Body.drawableOffsetX-tmp.width/2, b2Body.drawableOffsetY-tmp.height/2); * image.setColor(generateColor()); * obj.addActor(image); * }else if(b2Def instanceof b2PolygonFixtureDefModel){ * b2PolygonFixtureDefModel tmp = (b2PolygonFixtureDefModel)b2Def; * Vector2 lower = new Vector2(); * Vector2 upper = new Vector2(); * for(Vector2[] vv:tmp.vertices){ * for(Vector2 v:vv){ * lower.x = Math.min(lower.x, v.x); * lower.y = Math.min(lower.y, v.y); * upper.x = Math.max(upper.x, v.x); * upper.y = Math.max(upper.y, v.y); * } * } * PolygonActor actor = new PolygonActor(Engine.resource("Polygon",Texture.class), tmp.vertices,b2Body.drawableOffsetX,b2Body.drawableOffsetY); * actor.setPosition(b2Body.drawableOffsetX-lower.x,b2Body.drawableOffsetY-lower.y); * obj.addActor(actor); * meshs.add(actor); * } * } * stage.addActor(obj); * } * } * </pre> * * @author xjjdog */ public class b2Scene implements java.io.Serializable { private static final long serialVersionUID = 1L; public List<b2FixtureDefModel> fixtureDefModels = new ArrayList<>(); public List<b2BodyDefModel> bodyDefModels = new ArrayList<>(); public List<b2JointDefModel> jointDefModels = new ArrayList<>(); }
<gh_stars>0 import {Component, EventEmitter, Input, OnInit, Output} from '@angular/core'; import {UtilsServiceService} from "../../../utils-service.service"; import {DatePipe} from "@angular/common"; @Component({ selector: 'ngx-edit-facture', templateUrl: './edit-facture.component.html', styleUrls: ['./edit-facture.component.scss'] }) export class EditFactureComponent implements OnInit { @Input() facture = { factureId: null, factureNumber: '', factureCurrency: 'TND', factureDate: null, customer: null, products: null, totalHTBrut: 0, totalHTBrutS:'0', remise: 0, remiseS:'0', totalHT: 0, totalHTS:'0', totalTVA: 0, totalTVAS:'0', totalFodec: 0, totalFodecS:'0', totalTaxe: 0, totalTaxeS:'0', timbreFiscal: 0.600, timbreFiscalS:'0.600', totalTTC: 0, totalTTCS:'0', montantFacture: 0, montantFactureS:'0', factureLines: [], factureCondition:'', commercialName: '', //factureDeadlineDate:null, //factureDeadlineInNumberOfDays:0, } @Output() editFactureEvent = new EventEmitter(); @Output() cancelEvent = new EventEmitter(); clients = []; produits = []; productGroups = []; maxDateFactureDate; minDateDeadlineDate; timeLine = { timeLineId: null, timeLineTable: [], }; line = { product: { productId: null, productLabel: '', productReference: '', productDescription: '', productUrlImage: '', productPrixHT: 0, productTVA: 0, productFodec: 0, productTTC: 0, productUnite: 'PIECE', productType: 'MATERIEL' }, productGroup:null, quantity: 1, remiseTaux:0, remiseValeur:null, montantHt:null, montantHtBrut:null, montantTva:null, montantFaudec:null, }; produit = null; showProduitWindow = false; factureNumber=""; facturePrefix=""; constructor(private UtilsService: UtilsServiceService, private datePipe: DatePipe) { } ngOnInit(): void { if (this.facture.factureDate == null) { this.facture.factureDate = this.datePipe.transform(new Date(), 'yyyy-MM-dd'); } this.maxDateFactureDate = this.datePipe.transform(new Date(), 'yyyy-MM-dd'); this.minDateDeadlineDate = this.datePipe.transform(new Date(), 'yyyy-MM-dd'); this.initiateLine(); this.getAllCustomers(); //this.getAllProdcuts(); this.getAllProductsGroups(); this.initProduit(); this.facture.factureLines.forEach(line=>{ this.timeLine.timeLineTable.push(line) }) if(this.facture.factureNumber != null && this.facture.factureNumber != ""){ this.facturePrefix=this.facture.factureNumber.substr(0,10); this.factureNumber=this.facture.factureNumber.substr(10,4); } this.calculPrixTotalFacture() } initProduit() { this.produit = { productId: null, productLabel: '', productReference: '', productDescription: '', productUrlImage: '', productPrixHT: 0, productTVA: 0, productFaudec: 0, productTTC: 0, productUnite: 'PIECE', productType: 'MATERIEL' }; } getAllCustomers() { const context = this; this.UtilsService.get(UtilsServiceService.API_CLIENT).subscribe(response => { context.clients = response; if (this.clients.length > 0 && this.facture.customer == null) { this.facture.customer = this.clients[0]; } }, error => { this.UtilsService.showToast('danger', 'Erreur interne', `Un erreur interne a été produit lors du chargement des clients`); }); } /* getAllProdcuts() { const context = this; this.UtilsService.get(UtilsServiceService.API_PRODUIT).subscribe(response => { context.produits = response; if (this.produit.length > 0) { this.line.product = this.produits[0]; } }, error => { this.UtilsService.showToast('danger', 'Erreur interne', `Un erreur interne a été produit lors du chargement des produits`); }); } */ compareCustomer(a: any, b: any): boolean { if (a == null || b == null) return true; return a.customerId === b.customerId; } compareCurrency(a: any, b: any): boolean { if (a == null || b == null) return true; return a === b; } initiateLine() { this.line = { product: { productId: null, productLabel: '', productReference: '', productDescription: '', productUrlImage: '', productPrixHT: 0, productTVA: 0, productFodec: 0, productTTC: 0, productUnite: 'PIECE', productType: 'MATERIEL' }, productGroup:null, quantity: 1, remiseTaux: 0, remiseValeur:null, montantHt:null, montantHtBrut:null, montantTva:null, montantFaudec:null, }; } deleteLine(i,line) { if(line.factureLineId != null && line.factureLineId != ''){ this.UtilsService.delete(UtilsServiceService.API_FACTURELINE+'/'+ line.factureLineId ).subscribe(response => { this.timeLine.timeLineTable.splice(i, 1); this.calculPrixTotalFacture(); }, error => { this.UtilsService.showToast('danger', 'Erreur interne', `Un erreur interne a été produit lors du du Suppression De ligne`); }); }else{ this.timeLine.timeLineTable.splice(i, 1); this.calculPrixTotalFacture(); } } addLine() { this.line.remiseTaux=this.convertAmount(this.line.remiseTaux); this.line.remiseValeur=this.convertAmount(this.line.remiseValeur); this.line.montantFaudec=this.convertAmount(this.line.montantFaudec); this.line.montantHt=this.convertAmount(this.line.montantHt); this.line.montantHtBrut=this.convertAmount(this.line.montantHtBrut); this.line.montantTva=this.convertAmount(this.line.montantTva); this.timeLine.timeLineTable.push(this.line); this.calculPrixTotalFacture(); this.initiateLine(); } cancel() { this.cancelEvent.emit(); } checkGeneratedFactureValid() { return this.facture.factureNumber == "" || this.facture.factureNumber == null || this.facture.customer == "" || this.facture.customer == null || this.timeLine.timeLineTable.length == 0; } saveGeneratedFacture(){ this.facture.factureLines=this.timeLine.timeLineTable; this.facture.factureNumber=this.facturePrefix+this.factureNumber; this.editFactureEvent.emit(this.facture); } showProduitModal() { this.showProduitWindow = true; } hideProduitWindow() { this.showProduitWindow = false; } saveNewProduit($) { const context = this; this.UtilsService.post(UtilsServiceService.API_PRODUIT, this.produit).subscribe(response => { this.hideProduitWindow(); if (context.produit.productId == null) { this.UtilsService.showToast('success', 'produit ajouté avec succés', `Le produit ${this.produit.productLabel} a été ajouté avec succcés`); } else { this.UtilsService.showToast('success', 'produit modfié avec succés', `Le produit ${this.produit.productLabel} a été modifié avec succcés`); } //context.getAllProdcuts(); context.initProduit(); }, error => { this.UtilsService.showToast('danger', 'Erreur interne', `Un erreur interne a été produit lors de la souvegarde du produit ${this.produit.productLabel}`); }); } changeProduct(event) { this.line.product = event; this.calculPrixTotal(); } convertLine(line) { line.remiseTaux=this.convertAmount(line.remiseTaux); line.remiseValeur=this.convertAmount(line.remiseValeur); line.montantFaudec=this.convertAmount(line.montantFaudec); line.montantHt=this.convertAmount(line.montantHt); line.montantHtBrut=this.convertAmount(line.montantHtBrut); line.montantTva=this.convertAmount(line.montantTva); } changeProductGroup(line){ line.product = line.productGroup.productList[0]; this.calculPrixTotalEdited(line); } calculPrixTotal(){ this.line.montantHtBrut=this.line.product.productPrixHT*this.line.quantity this.line.remiseValeur=this.line.montantHtBrut*(this.line.remiseTaux/100); this.line.montantHt=this.line.montantHtBrut-this.line.remiseValeur; this.line.montantFaudec=this.line.montantHt*(this.line.product.productFodec/100); this.line.montantTva=(this.line.montantHt+this.line.montantFaudec)*(this.line.product.productTVA/100); this.calculPrixTotalFacture(); this.convertLine(this.line); } calculPrixTotalEdited(line){ line.montantHtBrut=line.product.productPrixHT*line.quantity line.remiseValeur=line.montantHtBrut*(line.remiseTaux/100); line.montantHt=line.montantHtBrut-line.remiseValeur; line.montantHtBrut=this.convertAmount(line.montantHtBrut); line.montantFaudec=line.montantHt*(line.product.productFodec/100); line.montantTva=(line.montantHt+line.montantFaudec)*(line.product.productTVA/100); this.calculPrixTotalFacture(); this.convertLine(line); } calculPrixTotalFacture() { this.facture.totalHTBrut = 0; this.facture.totalHT = 0; this.facture.totalTVA = 0; this.facture.totalFodec = 0; this.facture.remise = 0; this.facture.totalTaxe = this.facture.totalTVA + this.facture.totalFodec + this.facture.timbreFiscal; this.facture.totalTaxeS=this.UtilsService.convertAmountToString(this.facture.totalTaxe.toString()); this.facture.totalTTC = this.facture.totalHT + this.facture.totalTaxe; this.facture.totalTTCS=this.UtilsService.convertAmountToString(this.facture.totalTTC.toString()); this.timeLine.timeLineTable.forEach((line) => { this.facture.totalHTBrut += line.montantHtBrut; this.facture.totalHT += line.montantHt; this.facture.totalTVA += line.montantTva; this.facture.totalFodec += line.montantFaudec; this.facture.remise += line.remiseValeur; this.facture.totalTaxe = this.facture.totalTVA + this.facture.totalFodec + this.facture.timbreFiscal; this.facture.totalTTC = this.facture.totalHT + this.facture.totalTaxe; }) this.facture.totalHTBrut=this.convertAmount(this.facture.totalHTBrut); this.facture.totalHTBrutS=this.UtilsService.convertAmountToString(this.facture.totalHTBrut.toString()); this.facture.totalHT=this.convertAmount(this.facture.totalHT); this.facture.totalHTS=this.UtilsService.convertAmountToString(this.facture.totalHT.toString()); this.facture.totalTVA=this.convertAmount(this.facture.totalTVA); this.facture.totalTVAS=this.UtilsService.convertAmountToString(this.facture.totalTVA.toString()); this.facture.totalFodec=this.convertAmount(this.facture.totalFodec); this.facture.totalFodecS=this.UtilsService.convertAmountToString(this.facture.totalFodec.toString()); this.facture.remise=this.convertAmount(this.facture.remise); this.facture.remiseS=this.UtilsService.convertAmountToString(this.facture.remise.toString()); this.facture.totalTaxe=this.convertAmount(this.facture.totalTaxe); this.facture.totalTaxeS=this.UtilsService.convertAmountToString(this.facture.totalTaxe.toString()); this.facture.totalTTC=this.convertAmount(this.facture.totalTTC); this.facture.totalTTCS=this.UtilsService.convertAmountToString(this.facture.totalTTC.toString()); } convertAmount(amount):any { return amount=Math.round(amount * 1000) / 1000; } changeFactureDate() { /* this.minDateDeadlineDate = this.facture.factureDate; if (this.facture.factureDate > this.facture.factureDeadlineDate) { this.facture.factureDeadlineDate = this.facture.factureDate; } const factureDate = new Date(this.facture.factureDate); let limitDate: Date; limitDate = new Date(this.facture.factureDeadlineDate); const time = (limitDate.valueOf() - factureDate.valueOf()) / 86400000; this.facture.factureDeadlineInNumberOfDays = time;*/ } changeNumberOfDeadlineDaysNumber() { /* const factureDate = new Date(this.facture.factureDate); let limitDate: Date; limitDate = new Date(this.facture.factureDate); limitDate.setDate(factureDate.getDate() + this.facture.factureDeadlineInNumberOfDays); this.facture.factureDeadlineDate = this.datePipe.transform(limitDate, 'yyyy-MM-dd');*/ } changeDeadLineDate() { /*const limitDate = new Date(this.facture.factureDeadlineDate); const factureDate = new Date(this.facture.factureDate); const time = (limitDate.valueOf() - factureDate.valueOf()) / 86400000; this.facture.factureDeadlineInNumberOfDays = time;*/ } getAllProductsGroups() { this.UtilsService.get(UtilsServiceService.API_PRODUIT_GROUP).subscribe(response => { let productGroups = response; productGroups.forEach(productGroup => { if(productGroup.productList.length>0){ this.productGroups.push(productGroup); } }) }, error => { this.UtilsService.showToast('danger', 'Erreur interne', `Un erreur interne a été produit lors du chargement des familles de produits`); }); } }
from drltools.utils import dqn_config, trainer from drltools.agent import DQNAgent from unityagents import UnityEnvironment env = UnityEnvironment(file_name="unity_environments/Banana_mac.app", worker_id=1) config = dqn_config agent_class = DQNAgent n_episodes = 2000 max_t = 1000 solved_score = 13 title = 'DQN' if __name__ == "__main__": # Initialize the DQN agent agent = agent_class(config) # Set up the environment brain_name = env.brain_names[0] brain = env.brains[brain_name] env_info = env.reset(train_mode=True)[brain_name] action_size = brain.vector_action_space_size state = env_info.vector_observations[0] state_size = len(state) scores = [] # List to store scores from each episode scores_window = deque(maxlen=100) # Last 100 scores for i_episode in range(1, n_episodes + 1): env_info = env.reset(train_mode=True)[brain_name] # Reset the environment state = env_info.vector_observations[0] # Get the initial state score = 0 # Initialize the score for this episode for t in range(max_t): action = agent.act(state) # Select an action env_info = env.step(action)[brain_name] # Send the action to the environment next_state = env_info.vector_observations[0] # Get the next state reward = env_info.rewards[0] # Get the reward done = env_info.local_done[0] # Check if the episode is done agent.step(state, action, reward, next_state, done) # Update the agent with the experience state = next_state # Update the current state score += reward # Accumulate the score if done: break scores_window.append(score) # Save most recent score scores.append(score) # Save most recent score print('\rEpisode {}\tAverage Score: {:.2f}'.format(i_episode, np.mean(scores_window)), end="") if i_episode % 100 == 0: print('\rEpisode {}\tAverage Score: {:.2f}'.format(i_episode, np.mean(scores_window))) if np.mean(scores_window) >= solved_score: print('\nEnvironment solved in {:d} episodes!\tAverage Score: {:.2f}'.format(i_episode - 100, np.mean(scores_window))) agent.save_model_weights(title + '_solved.pth') break env.close()
import java.util.ArrayList; public class Board { char [][] grid = new char [3][3]; int score; public char[][] get(){ return grid; } Board (){ for (int i=0;i<3;i++) { for (int j=0;j<3;j++) { this.grid[i][j]='-'; } } } public Boolean move (int x , int y ,char a) { char [][] q =get(); if (q[x][y]=='-') { q[x][y]=a; return true; } else { return false; } } public void print () { char [][] q =get(); for (int i=0;i<3;i++) { for (int j=0;j<3;j++) { System.out.printf("%c ", q[i][j]); } System.out.printf("\n"); } } public int status () { char [][] q =get(); for (int i=0 ;i<3;i++) { int temp1=q[i][0]; int temp2=q[0][i]; int count1=0; int count2=0; for (int j=0;j<3;j++) { if (q[i][j]==temp1 && q[i][j]!='-') count1++; if (q[j][i]==temp2 && q[j][i]!='-') count2++; } if (count1==3) { TicTacToe.setcolor(i,0,i,1,i,2); return 1; } if (count2 == 3) { TicTacToe.setcolor(0,i,1,i,2,i); return 1; } } if (q[0][0] == q[1][1] && q[1][1]==q[2][2] && q[1][1]!='-'){ TicTacToe.setcolor(0,0,1,1,2,2); return 1; // win } else if (q[0][2] == q[1][1] && q[1][1]==q[2][0] && q[1][1]!='-'){ TicTacToe.setcolor(0,2,1,1,2,0); return 1; } for (int i=0;i<3;i++) { for (int j=0;j<3;j++) { if (q[i][j]=='-') return 2; //continue } } return 3; // draw } public int[][] post() { int[][] arr = new int[100][2]; int k=0; for (int i=0;i<3;i++) { for (int j=0;j<3;j++) { if (grid[i][j]=='-') { arr[k][0]=i; arr[k][1]=j; k++; } } } return arr; } public ArrayList<Integer> minimax (char ch) { int []countx = new int [2]; int []county = new int [2]; ArrayList<Integer> cord =new ArrayList<>(); // TO CHECK WIN for (int i=0;i<3;i++) { countx[0]=0; countx[1]=0; county[0]=0; county[1]=0; for (int j=0;j<3;j++){ if (grid[i][j]=='X') { countx[0]++; } else if (grid[i][j]=='O') { countx[1]++; } if (grid[j][i]=='X') { county[0]++; } else if (grid[j][i]=='O') { county[1]++; } } if (countx[1]==2 && countx[0]==0){ for (int k=0;k<3;k++) { if (grid[i][k]=='-') { move(i,k,'O'); cord.add(i); cord.add(k); return cord; } } return null; } else if (county[1]==2 && county[0]==0){ for (int k=0;k<3;k++){ if (grid[k][i]=='-') { move(k,i,'O'); cord.add(k); cord.add(i); return cord; } } return null; } } countx[0]=0; countx[1]=0; county[0]=0; county[1]=0; for (int i=0;i<3;i++) { if (grid[i][i]=='X') { countx[0]++; } else if (grid[i][i]=='O') { countx[1]++; } if (grid[i][2-i]=='X') { county[0]++; } else if (grid[i][2-i]=='O') { county[1]++; } } if ( countx[1]==2 && countx[0]==0) { for (int k=0;k<3;k++) { if (grid[k][k]=='-') { move(k,k,'O'); cord.add(k); cord.add(k); return cord; } } return null; } else if (county[1]==2 && county[0]==0) { for (int k=0;k<3;k++) { if (grid[k][2-k]=='-') { move(k,2-k,'O'); cord.add(k); cord.add(2-k); return cord; } } return null; } // TO BLOCK for (int i=0;i<3;i++) { countx[0]=0; countx[1]=0; county[0]=0; county[1]=0; for (int j=0;j<3;j++) { if (grid[i][j]=='X'){ countx[0]++; } else if (grid[i][j]=='O') { countx[1]++; } if (grid[j][i]=='X') { county[0]++; } else if (grid[j][i]=='O') { county[1]++; } } if (countx[0]==2 && countx[1]==0) { for (int k=0;k<3;k++) { if (grid[i][k]=='-') { move(i,k,'O'); cord.add(i); cord.add(k); return cord; } } return null; } else if (county[0]==2 && county[1]==0) { for (int k=0;k<3;k++) { if (grid[k][i]=='-') { move(k,i,'O'); cord.add(k); cord.add(i); return cord; } } return null; } } countx[0]=0; countx[1]=0; county[0]=0; county[1]=0; for (int i=0;i<3;i++) { if (grid[i][i]=='X'){ countx[0]++; } else if (grid[i][i]=='O') { countx[1]++; } if (grid[i][2-i]=='X') { county[0]++; } else if (grid[i][2-i]=='O') { county[1]++; } } if ( countx[0]==2 && countx[1]==0) { for (int k=0;k<3;k++) { if (grid[k][k]=='-') { move(k,k,'O'); cord.add(k); cord.add(k); return cord; } } return null; } else if (county[0]==2 && county[1]==0) { for (int k=0;k<3;k++) { if (grid[k][2-k]=='-') { move(k,2-k,'O'); cord.add(k); cord.add(2-k); return cord; } } return null; } // CENTER , CORNER , EDGE if (grid[1][1]=='-') { move(1,1,'O'); cord.add(1); cord.add(1); return cord; } else if (grid[0][0]=='-') { move (0,0,'O'); cord.add(0); cord.add(0); return cord; } else if (grid[0][2]=='-') { move (0,2,'O'); cord.add(0); cord.add(2); return cord; } else if (grid[2][0]=='-') { move (2,0,'O'); cord.add(2); cord.add(0); return cord; } else if (grid[2][2]=='-') { move (2,2,'O'); cord.add(2); cord.add(2); return cord; } else if (grid[0][1]=='-') { move (0,1,'O'); cord.add(0); cord.add(1); return cord; } else if (grid[2][1]=='-') { move (2,1,'O'); cord.add(2); cord.add(1); return cord; } else if (grid[1][2]=='-') { move (1,2,'O'); cord.add(1); cord.add(2); return cord; } else if (grid[1][0]=='-') { move (1,0,'O'); cord.add(1); cord.add(0); return cord; } return null; } }
<reponame>bcgov/EDUC-GRAD-TEST-AUTOMATION package ca.bc.gov.educ.gtts.services; import org.javers.core.diff.Diff; /** * A reporting tool for creating detailed reports based on various data inputs */ public interface ReportService { /** * Reports differences based on a Diff object * @param ref an optional reference for reporting * @param diff a diff object */ void reportDifferences(String ref, Diff diff); }
#! /bin/bash #SBATCH -o /home/martin/workspace/sweet/benchmarks/rexi_tests_lrz_freq_waves/2015_12_27_scalability_rexi_spec/run_rexi_m008192_t001_n0128_r0014_a1.txt ###SBATCH -e /home/martin/workspace/sweet/benchmarks/rexi_tests_lrz_freq_waves/2015_12_27_scalability_rexi_spec/run_rexi_m008192_t001_n0128_r0014_a1.err #SBATCH -J rexi_m008192_t001_n0128_r0014_a1 #SBATCH --get-user-env #SBATCH --clusters=mpp2 #SBATCH --ntasks=14 #SBATCH --cpus-per-task=1 #SBATCH --exclusive #SBATCH --export=NONE #SBATCH --time=03:00:00 #declare -x NUMA_BLOCK_ALLOC_VERBOSITY=1 declare -x KMP_AFFINITY="granularity=thread,compact,1,0" declare -x OMP_NUM_THREADS=1 echo "OMP_NUM_THREADS=$OMP_NUM_THREADS" echo . /etc/profile.d/modules.sh module unload gcc module unload fftw module unload python module load python/2.7_anaconda_nompi module unload intel module load intel/16.0 module unload mpi.intel module load mpi.intel/5.1 module load gcc/5 cd /home/martin/workspace/sweet/benchmarks/rexi_tests_lrz_freq_waves/2015_12_27_scalability_rexi_spec cd ../../../ . local_software/env_vars.sh # force to use FFTW WISDOM data declare -x SWEET_FFTW_LOAD_WISDOM_FROM_FILE="FFTW_WISDOM_nofreq_T0" time -p mpiexec.hydra -genv OMP_NUM_THREADS 1 -envall -ppn 28 -n 14 ./build/rexi_m_tno_a1 --initial-freq-x-mul=2.0 --initial-freq-y-mul=1.0 -f 1 -g 1 -H 1 -X 1 -Y 1 --compute-error 1 -t 50 -R 4 -C 0.3 -N 128 -U 0 -S 0 --use-specdiff-for-complex-array 1 --rexi-h 0.2 --timestepping-mode 1 --staggering 0 --rexi-m=8192 -C -5.0
<filename>src/main/java/frc/robot/auto/commands/shooter/ShootCommand.java package frc.robot.auto.commands.shooter; import edu.wpi.first.wpilibj.Timer; import frc.robot.auto.Command; public class ShootCommand extends Command { private double topWheelSpeed; private double bottomWheelSpeed; private double duration; private double startTime; /** * Setup shoot command * * @param topWheelSpeed in RPM - should probably be 1300 * @param bottomWheelSpeed in RPM * @param duration in seconds */ public ShootCommand(double topWheelSpeed, double bottomWheelSpeed, double duration) { this.topWheelSpeed = topWheelSpeed; this.bottomWheelSpeed = bottomWheelSpeed; this.duration = duration; } @Override public void start() { startTime = Timer.getFPGATimestamp(); } @Override public void loop() { robot.getFlywheel().update(topWheelSpeed, bottomWheelSpeed); if(robot.getFlywheel().getTopVel() > topWheelSpeed) robot.getIndexer().drive(-1); } @Override public boolean isFinished() { return (Timer.getFPGATimestamp() - startTime) > duration; } @Override public void close() { robot.getFlywheel().updatePercentage(0, 0); robot.getIndexer().drive(0); } }
#!/bin/bash # # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # #title=Checking Java Version source $(cd -P -- "$(dirname -- "$0")" && pwd -P)/header.sh echo "Checking Java version..." $JAVA -version 2>&1 || quit "ERROR: Detect java version failed. Please set JAVA_HOME." if [[ `isValidJavaVersion` == "false" ]]; then quit "ERROR: Java 1.8 or above is required for Byzer Notebook" fi
#!/bin/bash set -eu if [ ! -f $SNAP_COMMON/server.properties ]; then echo "configuration file $SNAP_COMMON/server.properties does not exist." exit 1 fi # Use custom log4j properties if found if [ -f $SNAP_COMMON/log4j.properties ]; then export KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:$SNAP_COMMON/log4j.properties" fi export PATH=$SNAP/usr/lib/jvm/default-java/bin:$PATH export LOG_DIR=$SNAP_COMMON/log # JMX is only available on localhost:9999 export JMX_PORT=${JMX_PORT:-9999} export KAFKA_JMX_OPTS="-Djava.rmi.server.hostname=localhost \ -Djava.net.preferIPv4Stack=true \ -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false" if [ -e "$SNAP_COMMON/broker.env" ]; then . $SNAP_COMMON/broker.env fi $SNAP/opt/kafka/bin/kafka-server-start.sh $SNAP_COMMON/server.properties
# This example scans for any BLE advertisements and prints one advertisement and one scan response # from every device found. This scan is more detailed than the simple test because it includes # specialty advertising types. from adafruit_ble import BLERadio from adafruit_ble.advertising import Advertisement from adafruit_ble.advertising.standard import ProvideServicesAdvertisement ble = BLERadio() print("scanning") found = set() scan_responses = set() # By providing Advertisement as well we include everything, not just specific advertisements. for advertisement in ble.start_scan(ProvideServicesAdvertisement, Advertisement): addr = advertisement.address if advertisement.scan_response and addr not in scan_responses: scan_responses.add(addr) elif not advertisement.scan_response and addr not in found: found.add(addr) else: continue print(addr, advertisement) print("\t" + repr(advertisement)) print() print("scan done")
<reponame>interchained/webnero-ui<filename>js/settings.js $(function() { $('#side-menu').metisMenu(); }); $(document).ready(function(){ }); $(document).on("change", "input[type='checkbox']", function(){ hideAlert("success"); hideAlert("danger"); if(this.checked) $("#confirm-msg").text($(this).attr("msg-on")); else $("#confirm-msg").text($(this).attr("msg-off")); console.log($(this).attr("coin")); $("#confirm-modal").modal(); }); $(document).on("click", "#resetpwd", function(){ hideAlert("success"); hideAlert("danger"); var newPwd = $("#newpwd").val(); if(!Utils.isValidPassword(newPwd)){ showAlert("danger", "Invalid password (min. 8 chars, one digit, one uppercase )"); return; } if($("#confirmpwd").val() !== newPwd){ showAlert("danger", "Password mismatch"); return; } $("#confirm-msg").text("Are you sure you want to updated your password ?"); $("#confirm-ok").data("operation", "password"); $("#confirm-modal").modal(); }); $(document).on("click", "#resetcode", function(){ hideAlert("success"); hideAlert("danger"); var newPin = $("#newcode").val(); if(!Utils.isValidCode(newPin)){ showAlert("danger", "Invalid code, please provide 5 digit"); return; } if($("#confirmcode").val() !== newPin){ showAlert("danger", "Pincode mismatch"); return; } $("#confirm-msg").text("Are you sure you want to updated your pincode ?"); $("#confirm-ok").data("operation", "password"); $("#confirm-modal").modal(); }); $(document).on("click", "#confirm-ok", function(){ console.log($(this).data("operation")); showAlert("success", "Operation success"); $("#confirm-modal").modal('hide'); }); $(document).on("click", "#confirm-canc", function(){ hideAlert("success"); hideAlert("danger"); $("#confirm-modal").modal('hide'); }); function showAlert(type, msg){ $(".alert-" + type).text(msg); $(".alert-" + type).fadeIn(); } function hideAlert(type){ $(".alert-" + type).fadeOut(); }
<filename>jena-3.0.1/jena-sdb/src/main/java/org/apache/jena/sdb/store/StoreBaseHSQL.java<gh_stars>0 /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jena.sdb.store; import org.apache.jena.sdb.Store ; import org.apache.jena.sdb.StoreDesc ; import org.apache.jena.sdb.compiler.QueryCompilerFactory ; import org.apache.jena.sdb.core.sqlnode.GenerateSQL ; import org.apache.jena.sdb.layout2.StoreBase ; import org.apache.jena.sdb.layout2.TableDescNodes ; import org.apache.jena.sdb.layout2.TableDescQuads ; import org.apache.jena.sdb.layout2.TableDescTriples ; import org.apache.jena.sdb.sql.SDBConnection ; import org.apache.jena.sdb.util.HSQLUtils ; import org.apache.jena.sdb.util.StoreUtils ; public abstract class StoreBaseHSQL extends StoreBase { protected boolean currentlyOpen = true ; public StoreBaseHSQL(SDBConnection connection, StoreDesc desc, StoreFormatter formatter, StoreLoader loader, QueryCompilerFactory compilerF, SQLBridgeFactory sqlBridgeF, TableDescTriples tripleTableDesc, TableDescQuads quadTableDesc, TableDescNodes nodeTableDesc) { super(connection, desc, formatter, loader, compilerF, sqlBridgeF, new GenerateSQL(), tripleTableDesc, quadTableDesc, nodeTableDesc) ; } @Override public void close() { if ( currentlyOpen ) { super.close() ; // This interacts with JDBC connection management HSQLUtils.shutdown(getConnection()) ; } currentlyOpen = false ; super.close(); } public static void close(Store store) { if ( StoreUtils.isHSQL(store) ) ((StoreBaseHSQL)store).close() ; } public static void checkpoint(Store store) { if ( StoreUtils.isHSQL(store) ) ((StoreBaseHSQL)store).checkpoint() ; } public void checkpoint() { if ( currentlyOpen ) HSQLUtils.checkpoint(getConnection()) ; } }
<gh_stars>0 import React from 'react'; import Icon from 'react-native-vector-icons/Feather'; import { Container } from './styles'; export default function BackButton() { return ( <Container> <Icon name="arrow-back" size={18} color={'#7159c1'} /> </Container> ); }
#!/usr/bin/env bash # Recursively list all files in this Git checkout, including files in # submodules. set -euo pipefail submodules=($(git submodule foreach --quiet 'echo $path')) # IMPORTANT: This script must never output a bare directory. That is, given a # directory tree with files a/1 and a/2, this script must output "a/1 \n a/2" # and not "a/ \n a/1 \n a/2". Bare directories will cause e.g. tar to include # the entire directory tree, then re-include the files when the files in the # directory are listed on the following lines. These duplicate files will break # tar extraction horribly. # # git ls-files gets this right with the notable exception of submodules, which # are always output as a bare directory. We filter them out manually with the # parameter expansion below, which prefixes every path in the submodules array # with `:(exclude)`, resulting in a final command like: # # git ls-files . :(exclude)vendor :(exclude)c-deps/jemalloc... # git ls-files . "${submodules[@]/#/:(exclude)}" # Then, we list all the files *within* each submodule, without listing the bare # submodule directory. for submodule in "${submodules[@]}"; do git -C "$submodule" ls-files | sed -e "s,^,$submodule/," done
. pdf-mydoc.sh;
#!/bin/sh # Build Hugo Project echo "🔨 Building Hugo Site" ## Hugo Options used for building the site # -d dist directory # -gc run some cleanup tasks (remove unused cache files) after the build # --minify minify the output # --cleanDestinationDir remove files from destination not found in static directories # -v verbose mode CURRENT_UID="$(id -u):$(id -g)" docker-compose run --rm hugo build --minify --gc -v --cleanDestinationDir -d dist echo "✅ build complete"
package com.jinke.kanbox; import java.io.File; import java.io.IOException; import java.text.DateFormat; import java.util.Date; import java.util.List; import com.jinke.calligraphy.activity.DownloadProgressActivity; import com.jinke.calligraphy.app.branch.Start; import com.jinke.calligraphy.database.CDBPersistent; import android.os.Looper; import android.os.Message; import android.util.Log; import android.view.View; import android.widget.Toast; public class UploadAllFileThread extends Thread implements RequestListener{ String zipPath = ""; List<Integer> pageList; String dstName; boolean iscopy ; public UploadAllFileThread(boolean iscoyp){ zipPath = Start.getStoragePath() + "/" + Start.username + ".zip"; this.iscopy = iscoyp; } @Override public void run() { // TODO Auto-generated method stub super.run(); Looper.prepare(); CDBPersistent db = new CDBPersistent(Start.context); db.open(); pageList = db.getUploadPage(); // Log.e("update", "----------------->>>>>>>>>>>>>>>>>>>>>>>> getPageList:" + pageList.size()); if(Start.getDownloadList().size() != 0){ Log.e("update", "----------------->>>>>>>>>>>>>>>>>>>>>>>> getDownloadList:" + Start.getDownloadList().size()); RequestListener listener = new RequestListenerImplement(Start.getDownloadList().size()); String path; String dstPath; // for(DownloadEntity enty : Start.getDownloadList()){ DownloadEntity enty; for(int i=0;i<Start.getDownloadList().size();i++){ enty = Start.getDownloadList().get(i); path = enty.getPath(); Log.e("update", "----------------->>>>>>>>>>>>>>>>>>>>>>>> getDownloadList:" + path); dstPath = enty.getDestPath(); try { Kanbox.getInstance().upload(path, dstPath , Token.getInstance(),listener); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } Start.clearDownloadList(); }else{ //查看需要上传的页码 if(pageList != null){ for(int i : pageList) Log.e("uploadpage", "page:" + i); // db.uploadedSuccess(); Message msg = new Message(); msg.what = Start.KANBOX_START_UPLOAD; msg.obj = "共" + pageList.size()+ "页需要上传"; // Start.barTextHandler.sendMessage(msg); DownloadProgressActivity.barTextHandler.sendMessage(msg); dstName = "/"+ Start.username + "/calligraphy"; makeDir(dstName); /*整体上传时使用 zipAllPage(); uploadAllFile(); */ } else{ Log.e("uploadpage", "no page need upload"); Toast.makeText(Start.context, "没有 修改或新建 过的文件需要备份", Toast.LENGTH_LONG).show(); Start.barText.setVisibility(View.INVISIBLE); Message msg = new Message(); msg.what = DownloadProgressActivity.KANBOX_UPLOAD_NONEED; msg.obj = "没有 修改或新建 过的文件需要备份"; DownloadProgressActivity.barTextHandler.sendMessage(msg); } } db.close(); Looper.loop(); } /** * 打包所有页文件到一个压缩包,整体上传时使用 */ private void zipAllPage(){ //打包本地文件 try { File zipFile = new File(zipPath); if(zipFile.exists()) zipFile.delete(); Compressor.zip( zipPath, Start.getStoragePath() + "/calldir"); // ZipUtils.Ectract("/extsd/" + Start.username + ".zip", "/extsd/testzip/"); } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } } /** * 上传文件,整体上传时使用 */ private void uploadAllFile() { try { Kanbox.getInstance().upload(zipPath, "/" + Start.username + ".zip", Token.getInstance(), new RequestListenerImplement(0)); } catch (IOException e) { e.printStackTrace(); Toast.makeText(Start.context, "操作失败\n\n" + e.toString(), Toast.LENGTH_LONG).show(); } } /** * 创建文件夹 */ private void makeDir(String dirName) { // Kanbox.getInstance().makeDir(Token.getInstance(), "/kanbox", null); Kanbox.getInstance().makeDir(Token.getInstance(), dirName, this); } /** * 复制文件 */ private void copyFile() { DateFormat format2 = new java.text.SimpleDateFormat( "yyyyMMddHHmmss"); String s = format2.format(new Date()); Kanbox.getInstance().copyFile(Token.getInstance(), dstName, dstName+s, this); } /** * 删除文件 */ private void deleteFile() { Kanbox.getInstance().deleteFile(Token.getInstance(), "/kanbox.png", this); } /** * 压缩需要上传的文件 */ public void zipUploadPage(List<Integer> pageList,String dstName){ String tempPath = ""; String dirPath = ""; File zipFile; int indexCount = 0; for(int i : pageList){ tempPath = Start.getStoragePath() + "/calldir/free_" + i + "/index_"+i + ".jpg"; Log.e("uploadindex", "tempPath:" + tempPath + " exit:" + (new File(tempPath)).exists()); if((new File(tempPath)).exists()) indexCount++; } RequestListener listener = new RequestListenerImplement(pageList.size() + 1 + indexCount); Message msg = new Message(); for(int i : pageList){ dirPath = "/calldir/free_" + i; tempPath = Start.getStoragePath() + "/calldir/page" + i + ".zip"; zipFile = new File(tempPath); if(zipFile.exists()) zipFile.delete(); try { Compressor.zipPage( tempPath, Start.getStoragePath() + dirPath); Log.e("uploadpage", "upload page" + i); msg = new Message(); msg.what = DownloadProgressActivity.KANBOX_START_UPLOAD_PAGE; msg.obj = dirPath + "开始上传"; // Start.barTextHandler.sendMessage(msg); DownloadProgressActivity.barTextHandler.sendMessage(msg); Kanbox.getInstance().upload(tempPath, dstName + "/page" + i + ".zip" , Token.getInstance(),listener); tempPath = Start.getStoragePath() + "/calldir/free_" + i + "/index_"+i + ".jpg"; Log.e("uploadindex", "tempPath:" + tempPath + " exit:" + (new File(tempPath)).exists()); if((new File(tempPath)).exists()){ Kanbox.getInstance().upload(tempPath, dstName + "/index_"+i + ".jpg" , Token.getInstance(),listener); } } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } } try { msg = new Message(); msg.what = DownloadProgressActivity.KANBOX_START_UPLOAD; msg.obj = "数据库文件开始上传"; // Start.barTextHandler.sendMessage(msg); DownloadProgressActivity.barTextHandler.sendMessage(msg); Kanbox.getInstance().upload( "/data/data/com.jinke.calligraphy.app.branch/databases/calligraphy.db", dstName + "/calligraphy.db" , Token.getInstance(), listener); Log.e("uploadpage", "upload databases" + dstName); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } public void uploadAll(){ Message msg = new Message(); msg.what = DownloadProgressActivity.KANBOX_START_UPLOAD; msg.obj = "酷盘文件夹建立成功"; // Start.barTextHandler.sendMessage(msg); DownloadProgressActivity.barTextHandler.sendMessage(msg); zipUploadPage(pageList,dstName);//压缩并上传所有需要上传的页 } @Override public void onComplete(String response, int operationType) { // TODO Auto-generated method stub Log.e("dir", "Upload AllFileThread complete response\n:" + response); switch (operationType) { case OP_MAKE_DIR: Log.e("dir", "mkdir response:" + response); if(response.contains("ERROR_PATH_EXIST") && iscopy) // Log.e("dir", "should copy"); copyFile(); else{ Log.e("dir", "do not copy"); uploadAll(); } break; case OP_UPLOAD: if(response.contains(".db")){ Toast.makeText(Start.context, "数据库文件上传成功", Toast.LENGTH_LONG).show(); Message msg = new Message(); msg.what = DownloadProgressActivity.KANBOX_START_UPLOAD; msg.obj = "数据库文件上传完成"; // Start.barTextHandler.sendMessage(msg); DownloadProgressActivity.barTextHandler.sendMessage(msg); break; } break; case OP_COPY: Log.e("dir", "dir copy success"); uploadAll(); break; default: break; } } @Override public void onError(KanboxException error, int operationType) { // TODO Auto-generated method stub Log.e("dir", "Upload AllFileThread complete error\n:" + error); Message msg; switch (operationType) { case OP_MAKE_DIR: Toast.makeText(Start.context, "创建文件夹失败,请重试", Toast.LENGTH_LONG).show(); msg = new Message(); msg.what = DownloadProgressActivity.KANBOX_ERROR; msg.obj = "创建文件夹失败,请退出重试"; DownloadProgressActivity.barTextHandler.sendMessage(msg); break; case OP_UPLOAD: break; case OP_COPY: Log.e("dir", "dir copy failed"); msg = new Message(); msg.what = DownloadProgressActivity.KANBOX_ERROR; msg.obj = "酷盘网络备份失败,请退出重试"; DownloadProgressActivity.barTextHandler.sendMessage(msg); break; default: break; } } @Override public void downloadProgress(long currSize) { // TODO Auto-generated method stub } @Override public void onError(KanboxException error, int operationType, String path, String destPath) { Log.e("dir", "Upload AllFileThread complete error\n:" + error); Message msg; switch (operationType) { case OP_MAKE_DIR: Log.e("upload", "make dir error"); msg = new Message(); msg.what = DownloadProgressActivity.KANBOX_ERROR; msg.obj = "网络异常,建立远程文件夹失败,请稍后重新尝试:"; // Start.barTextHandler.sendMessage(msg); DownloadProgressActivity.barTextHandler.sendMessage(msg); break; case OP_UPLOAD: Log.e("upload", "upload dir error"); // TODO Auto-generated method stub Start.addDownloadEnty(new DownloadEntity(path, destPath)); msg = new Message(); msg.what = DownloadProgressActivity.KANBOX_ERROR_UPLOAD; msg.obj = "传输过程中存在错误:"; // Start.barTextHandler.sendMessage(msg); DownloadProgressActivity.barTextHandler.sendMessage(msg); break; } } }
#!/bin/bash # default sbatch FT2 #SBATCH --output=irc-%j.log #SBATCH --time=04:00:00 # partition selection #_remove_this_in_ft_SBATCH -p shared --qos=shared #SBATCH -c 1 --mem-per-cpu=2048 #SBATCH -n 8 # SBATCH --partition=cola-corta,thinnodes # SBATCH -c 1 # SBATCH -n 24 #exe=$(basename $0) # under batchs systems the scripts are copied to a generic script (in slurm slurm_script) exe="irc.sh" cwd=$PWD sharedir=${AMK}/share source utils.sh #check the arguments of the script if [ $# -gt 0 ]; then ci=$1 else ci="proceed" fi if [ -f amk.dat ];then echo "amk.dat is in the current dir" inputfile=amk.dat else echo "amk input file is missing. You sure you are in the right folder?" exit fi if [ $ci != "screening" ] && [ $ci != "proceed" ]; then echo "++++++++++++++++++++++++++++++++++++++++++++++++++++++++" echo " Wrong argument " echo "++++++++++++++++++++++++++++++++++++++++++++++++++++++++" echo "To check what screening has done execute this script as:" echo "$exe screening" echo "" echo "To proceed with the irc execute this script as:" echo "$exe" echo "++++++++++++++++++++++++++++++++++++++++++++++++++++++++" exit fi ###Do screnning before anything else screening.sh $inputfile if [ $ci == "screening" ]; then echo "" echo "++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++" echo " Please check redundant and fragmented structures indicated in screening.log " echo " If they are not what you expected you might change MAPEmax, BAPEmax and/or eigLmax " echo "Then, you can carry on with the IRC calculations, run this script without argument " echo "++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++" echo "" exit fi ###read input file read_input ### if [ ! -f ${tsdirll}/ts_mopac_failed ] && [ "$program_opt" != "mopac" ]; then echo "TSs not optimized with mopac" > ${tsdirll}/ts_mopac_failed fi ## #remove tmp files tmp_files=(tmp* bbfs.* *.arc *.mop coordir $tsdirll/*_mop.mop $tsdirll/*_mop.arc) trap 'err_report $LINENO' ERR trap cleanup EXIT INT if [ ! -d "$tsdirll/MINs" ]; then mkdir $tsdirll/MINs fi ##create table for min sqlite3 ${tsdirll}/MINs/min.db "create table if not exists min (id INTEGER PRIMARY KEY,natom INTEGER, name TEXT,energy REAL,zpe REAL,g REAL,geom TEXT,freq TEXT, sigma INTEGER,unique(name));" # Optimize minref and calculate freqs echo "$min_template" > ${molecule}_freq.mop awk 'NF==4{print $0}' ${molecule}_ref.xyz >> ${molecule}_freq.mop echo "$freq_template" >> ${molecule}_freq.mop mopac ${molecule}_freq.mop 2>/dev/null # First we copy min0 in MIN directory echo "Moving min0 to its final location" if [ -f $tsdirll/MINs/min0.out ]; then echo "Calcs completed for min0" else geom="$(get_geom_mopac.sh ${molecule}_freq.out | awk '{if(NF==4) print $0}')" sed 's/thermo/thermo('$temperature','$temperature')/;s/method/'"$method"' charge='$charge'/' $sharedir/thermo_template > $tsdirll/MINs/min0.mop echo "$geom" >> $tsdirll/MINs/min0.mop mopac $tsdirll/MINs/min0.mop 2>/dev/null e0=$(awk '/HEAT OF FORMATION =/{e=$5};END{print e}' $tsdirll/MINs/min0.out ) zpe0=$(awk '/ ZERO POINT ENERGY/{zpe=$4};END{print zpe}' $tsdirll/MINs/min0.out ) g_corr0=$(awk 'BEGIN{t='$temperature'} / ZERO POINT ENERGY/{zpe=$4} /CALCULATED THERMODYNAMIC PROPERTIES/{ok=1} {if(ok==1 && $1 == '$temperature') { getline;getline;getline;getline; h=$3/1000;s=$5/1000;print zpe+h-t*s;exit} }' $tsdirll/MINs/min0.out ) name=min0_0 freq="$(get_freq_mopac.sh $tsdirll/MINs/min0.out)" sigma=$(awk '/SYMMETRY NUMBER/{print $NF;exit}' $tsdirll/MINs/min0.out) sqlite3 ${tsdirll}/MINs/min.db "insert into min (natom,name,energy,zpe,g,geom,freq,sigma) values ($natom,'$name',$e0,$zpe0,$g_corr0,'$geom','$freq',$sigma);" fi # Now we do things specific of IRC if [ ! -d "$tsdirll/IRC" ]; then mkdir $tsdirll/IRC ; fi if [ ! -d "$tsdirll/TSs" ]; then mkdir $tsdirll/TSs ; fi m=0 sqlite3 ${tsdirll}/inputs.db "drop table if exists mopac; create table mopac (id INTEGER PRIMARY KEY,name TEXT, unique(name));" for name in $(awk '{print $3}' $tslistll) do if [ -f $tsdirll/TSs/${name}_thermo.out ] && [ -f $tsdirll/IRC/${name}_ircf.out ] && [ -f $tsdirll/IRC/${name}_ircr.out ]; then calc1=$(awk 'BEGIN{calc=1};/MOPAC DONE/{calc=0};END{print calc}' $tsdirll/TSs/${name}_thermo.out) calc2=$(awk 'BEGIN{calc=1};/MOPAC DONE/{calc=0};END{print calc}' $tsdirll/IRC/${name}_ircf.out) calc3=$(awk 'BEGIN{calc=1};/MOPAC DONE/{calc=0};END{print calc}' $tsdirll/IRC/${name}_ircr.out) if [ $calc1 -eq 0 ] && [ $calc2 -eq 0 ] && [ $calc3 -eq 0 ]; then calc=0 else calc=1 fi else calc=1 fi if [ $calc -eq 0 ]; then echo "Calcs completed for" $name else if [ "$program_opt" != "mopac" ]; then skip=$(awk 'BEGIN{skip=0};/'$name'/{skip=1};END{print skip}' ${tsdirll}/ts_mopac_failed) if [ $skip == 1 ]; then echo "TS $name has ben previously discarded-->(skip because mopac cannot optimize it)" continue else echo "$ts_template" > ${tsdirll}/${name}_mop.mop get_geom_g09.sh $tsdirll/${name}.out >> ${tsdirll}/${name}_mop.mop echo "$freq_template" >> ${tsdirll}/${name}_mop.mop mopac ${tsdirll}/${name}_mop.mop 2>/dev/null fe="$(mopac_freq_ts.sh ${tsdirll}/${name}_mop.out 1)" fx="$(echo "$fe" | awk '{printf "%10.0f",$1}')" if [[ ("$fx" -gt "0") ]]; then get_geom_mopac.sh ${tsdirll}/${name}_mop.out | awk '{if(NF==4) print $0}' > tmp_geom else echo ${name} >> ${tsdirll}/ts_mopac_failed rm ${tsdirll}/${name}_mop.out continue fi fi else get_geom_mopac.sh $tsdirll/${name}.out | awk '{if(NF==4) print $0}' > tmp_geom fi ((m=m+1)) sed 's/thermo/thermo('$temperature','$temperature')/;s/method/'"$method"' charge='$charge'/' $sharedir/thermo_template > $tsdirll/TSs/${name}_thermo.mop cat tmp_geom >> $tsdirll/TSs/${name}_thermo.mop sed 's/method/'"$method"' charge='$charge' irc= 1/g' $sharedir/freq_template1 > $tsdirll/IRC/${name}_ircf.mop sed 's/method/'"$method"' charge='$charge' irc=-1/g' $sharedir/freq_template1 > $tsdirll/IRC/${name}_ircr.mop cat tmp_geom >> $tsdirll/IRC/${name}_ircf.mop cat tmp_geom >> $tsdirll/IRC/${name}_ircr.mop sed 's/method/'"$method"' charge='$charge'/g' $sharedir/freq_template2 | sed 's/force/cycles=5000 recalc=1/g' >> $tsdirll/IRC/${name}_ircf.mop sed 's/method/'"$method"' charge='$charge'/g' $sharedir/freq_template2 | sed 's/force/cycles=5000 recalc=1/g' >> $tsdirll/IRC/${name}_ircr.mop echo -e "insert or ignore into mopac values (NULL,'$name');\n.quit" | sqlite3 ${tsdirll}/inputs.db fi done echo Performing a total of $m irc calculations #Perform m parallel calculations if [ $m -gt 0 ]; then #ft2 slurm if [ ! -z $SLURM_JOB_ID ] && [ ! -z $SLURM_NTASKS ]; then if (( $m < $SLURM_NTASKS )); then echo "WARNING: Number of irc calculations ($m) lower than allocated tasks ($SLURM_NTASKS)." fi fi doparallel "runirc.sh {1} $tsdirll" "$(seq $m)" fi
<gh_stars>1-10 """ Render pipeline """ # pylint: disable=line-too-long import logging from typing import Any, Dict, List import pydot from .models import KEGGPathway from .resolver import KEGGPathwayResolver from .utils import ColorGradient class KEGGPathwayRenderer: """ Renderer for KEGG Pathway """ # pylint: disable=too-many-instance-attributes def __init__(self, kegg_pathway: KEGGPathway): """ Init renderer for KEGG Pathway :param kegg_pathway: KEGGPathway """ self.pathway = kegg_pathway self.overlay: Dict[int, Any] = {} self.exp_min = 0 self.exp_max = 0 self.render_string = None # TODO: fix any typing self.cmap_upreg: List[Any] = [] self.cmap_downreg: List[Any] = [] self.upper_color = (255, 0, 0) self.lower_color = (0, 0, 255) self.components = KEGGPathwayResolver.get_components() def set_overlay(self, gene_dict: dict): """ Overlay dot rendering with differential expression data. {<gene-id>: <fold-exp>} :param gene_dict: dict :return: """ self.cmap_downreg = ColorGradient(start=(255, 255, 255), stop=self.lower_color, steps=100).get_list() self.cmap_upreg = ColorGradient(start=(255, 255, 255), stop=self.upper_color, steps=100).get_list() self.overlay = gene_dict self.exp_min = min(gene_dict.values()) self.exp_max = max(gene_dict.values()) # Clip log fold expression # if self.exp_min > 0: # self.exp_min = 0 # if self.exp_max < 0: # self.exp_max = 0 self.exp_min = min(self.exp_min, 0) self.exp_max = max(self.exp_max, 0) def _get_gene_color(self, gene_id: int): """ Get overlay color :param gene_id: int :return: str """ if gene_id not in self.overlay: return "#ffffff" if self.overlay[gene_id] < 0: return self.cmap_downreg[abs(int(self.overlay[gene_id] / self.exp_min * 100))] return self.cmap_upreg[abs(int(self.overlay[gene_id] / self.exp_max * 100))] def group_render(self): """ Group render :return: str """ # TODO: fix rendering # pylint: disable=too-many-branches string_builder = [] string_builder.append(f"digraph pathway{self.pathway.number} {{") string_builder.append("\tnode [shape=rectangle arrowhead=normal];") string_builder.append(f"\tlabel=\"{self.pathway.title}\";") string_builder.append("\tfontsize=25;\n\tlabelloc=\"t\";") # search for "path:<...>" # TODO : divide entries in relation groups [[<entry>, ...], [...]] entry_groups = [] genes_of_interest = [] for rel in self.pathway.relations: if len(entry_groups) == 0: entry_groups.append([int(rel.entry1), int(rel.entry2)]) else: conn = [] # for n in range(0, len(entry_groups)): # if int(rel.entry1) in entry_groups[n] or int(rel.entry2) in entry_groups[n]: # conn.append(n) for entry_index, entry_item in enumerate(entry_groups): if int(rel.entry1) in entry_item or int(rel.entry2) in entry_item: conn.append(entry_index) if len(conn) == 0: entry_groups.append([int(rel.entry1), int(rel.entry2)]) elif len(conn) == 1: entry_groups[conn[0]].extend([int(rel.entry1), int(rel.entry2)]) else: # more then 1 match -> merge groups entry_groups[conn[0]].extend(entry_groups[conn[1]]) del entry_groups[conn[1]] entry_groups[conn[0]].extend([int(rel.entry1), int(rel.entry2)]) # print(entry_groups) # TODO : entry group in cluster (subgraph) --> annotate cluster with pathway # for n in range(0, len(entry_groups)): for entry_index, entry_item in enumerate(entry_groups): string_builder.append(f"\tsubgraph cluster{entry_index} {{") string_builder.append("\tlabel=\"\";\n\tcolor=blue;\n\tstyle=dashed;") for group_item in entry_item: for entry in self.pathway.entries: # TODO : shape=oval for components --> get comp name if int(entry.id) == group_item: # stringBuilder.append("\tnode [label=\"{LABEL}\"]; entry{ID};" # .format(LABEL=entry.graphics.name.split(", ")[0], ID=entry.id)) if entry.type == "gene": # shape=rectangle, style=filled, fillcolor=\"#cccccc\" entry_label = entry.graphics.name.split(", ")[0] string_builder.append(f"\tnode [label=\"{entry_label}\", shape=rectangle," f" style=filled, fillcolor=\"#ffffff\"]; entry{entry.id};") elif entry.type == "group": # shape=rectangle, color="black" label=<<table border='0' cellborder='1'> # <tr><td>comp 1</td></tr><tr><td>comp 2</td></tr></table>>]; labels = [self.pathway.get_entry_by_id(comp.id).graphics.name.split(", ")[0] for comp in entry.components] s_label = "".join([f"<tr><td>{l}</td></tr>" for l in labels]) string_builder.append(f"\tnode [label=<<table border='0' cellborder='1'>{s_label}</table>>," " shape=rectangle," f" style=filled, color=black, fillcolor=\"#ffffff\"]; entry{entry.id};") elif entry.type == "compound": entry_label = entry.graphics.name.split(", ")[0] string_builder.append(f"\tnode [label=\"{entry_label}\", shape=oval," f" style=filled, fillcolor=\"#ffffff\"]; entry{entry.id};") genes_of_interest.append(entry.id) string_builder.append("\t}") for rel in self.pathway.relations: # TODO : adjust arrowhead if rel.entry1 in genes_of_interest and rel.entry2 in genes_of_interest: string_builder.append(f"\tentry{rel.entry1} -> entry{rel.entry2};") string_builder.append("}") self.render_string = "\n".join(string_builder) return self.render_string def render(self): """ digraph G { labelloc="t"; label="Inflammatory bowel disease (IBD)"; fontsize=25; node [shape=rectangle arrowhead=normal]; node [label="Tlr2"]; entry1; node [label="Nfkb1"]; entry2; node [label="Nod2"]; entry3; entry1 -> entry2; entry1 -> entry3 [arrowhead="tee"]; } """ string_builder = [] string_builder.append(f"digraph pathway{self.pathway.number} {{") string_builder.append("\tnode [shape=rectangle arrowhead=normal];") string_builder.append(f"\tlabel=\"{self.pathway.title}\";") string_builder.append("\tfontsize=25;\n\tlabelloc=\"t\";") # TODO : specifiy for each node: shape=circle|rectangle|oval|ellipse, style=filled, fillcolor=red|green|#cccccc # search for "path:<...>" # TODO : parse entry::type[group] related_entries = [int(p.entry1) for p in self.pathway.relations] related_entries.extend([int(p.entry2) for p in self.pathway.relations]) for entry in self.pathway.entries: # only render genes with at least 1 relation if int(entry.id) in related_entries: # and entry.type == "gene" # case select for types gene, comp, group, ... if entry.type == "gene": # shape=rectangle, style=filled, fillcolor=\"#cccccc\" entry_label = entry.graphics.name.split(", ")[0] string_builder.append(f"\tnode [label=\"{entry_label}\", shape=rectangle," f" style=filled, fillcolor=\"#ffffff\"]; entry{entry.id};") elif entry.type == "group": # shape=rectangle, color="black" label=<<table border='0' cellborder='1'> # <tr><td>comp 1</td></tr><tr><td>comp 2</td></tr></table>>]; labels = [self.pathway.get_entry_by_id(comp.id).graphics.name.split(", ")[0] for comp in entry.components] s_label = "".join([f"<tr><td>{l}</td></tr>" for l in labels]) string_builder.append(f"\tnode [label=<<table border='0' cellborder='1'>{s_label}</table>>," " shape=rectangle," f" style=filled, color=black, fillcolor=\"#ffffff\"]; entry{entry.id};") elif entry.type == "compound": entry_label = entry.graphics.name.split(", ")[0] string_builder.append(f"\tnode [label=\"{entry_label}\", shape=oval," f" style=filled, fillcolor=\"#ffffff\"]; entry{entry.id};") for rel in self.pathway.relations: # TODO : adjust arrowhead string_builder.append(f"\tentry{rel.entry1} -> entry{rel.entry2};") string_builder.append("}") self.render_string = "\n".join(string_builder) return self.render_string def raw_render(self): """ Render pydot graph :return: str """ string_builder = [] string_builder.append(f"digraph pathway{self.pathway.number} {{") string_builder.append("\tgraph [fontname = \"arial\"];\n\tnode [fontname = \"arial\"];") string_builder.append("\tedge [fontname = \"arial\"];") string_builder.append("\tnode [shape=rectangle arrowhead=normal];") string_builder.append(f"\tlabel=\"{self.pathway.title}\";") string_builder.append("\tfontsize=25;\n\tlabelloc=\"t\";") related_entries = [int(p.entry1) for p in self.pathway.relations] related_entries.extend([int(p.entry2) for p in self.pathway.relations]) for entry in self.pathway.entries: # only render genes with at least 1 relation if int(entry.id) in related_entries: if entry.type == "gene": entry_label = entry.graphics.name.split(", ")[0] string_builder.append(f"\tnode [label=\"{entry_label}\", shape=rectangle," f" style=filled, fillcolor=\"{self._get_gene_color(entry.get_gene_id())}\"]; entry{entry.id};") elif entry.type == "group": labels = [self.pathway.get_entry_by_id(comp.id).graphics.name.split(", ")[0] for comp in entry.components] s_label = "".join([f"<tr><td>{l}</td></tr>" for l in labels]) string_builder.append(f"\tnode [label=<<table border='0' cellborder='1'>{s_label}</table>>," " shape=rectangle," f" style=filled, color=black, fillcolor=\"#ffffff\"]; entry{entry.id};") elif entry.type == "compound": name = entry.graphics.name.split(", ")[0] if name in self.components: name = self.components.get(name) string_builder.append(f"\tnode [label=\"{name}\", shape=oval," f" style=filled, fillcolor=\"#ffffff\"]; entry{entry.id};") for rel in self.pathway.relations: # adjust arrowhead label = "" arrowhead = "normal" if "inhibition" in rel.subtypes or "repression" in rel.subtypes: arrowhead = "tee" elif "binding/association" in rel.subtypes: arrowhead = "none" if "phosphorylation" in rel.subtypes: label = "+p" string_builder.append(f"\tentry{rel.entry1} -> entry{rel.entry2} [arrowhead=\"{arrowhead}\" label=\"{label}\"];") string_builder.append("}") self.render_string = "\n".join(string_builder) return self.render_string def to_dot(self): """ Convert rendered string to pydot graph :return: pydot.Graph """ return pydot.graph_from_dot_data(self.render_string)[0] def export(self, extension: str): """ Export pydot graph :param extension: str :return: Any """ # render with pydot to png|svg|pdf| if not self.render_string: self.render() return self.to_dot().create(format=extension) def render_legend(self): """ Render svg label :return: str """ # Dont fix linting here, maybe this function will be removed # pylint: disable=line-too-long return f"""<?xml version="1.0" standalone="no"?> <svg height="200" width="300" version="1.1" baseProfile="full" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:ev="http://www.w3.org/2001/xml-events"> <defs> <linearGradient id="cmap" x1="0%" y1="0%" x2="0%" y2="100%"> <stop offset="0%" style="stop-color:{ColorGradient.to_css(color=self.upper_color)};stop-opacity:1" /> <stop offset="50%" style="stop-color:rgb(255,255,255);stop-opacity:1" /> <stop offset="100%" style="stop-color:{ColorGradient.to_css(color=self.lower_color)};stop-opacity:1" /> </linearGradient> </defs> <g> <rect x="20" y="50" width="20" height="100" fill="url(#cmap)" /> <rect x="20" y="50" width="20" height="100" style="stroke:black;stroke-width:2;fill-opacity:0;stroke-opacity:1" /> <text x="55" y="150" fill="black" alignment-baseline="central">{self.exp_min}</text> <text x="55" y="100" fill="black" alignment-baseline="central">0</text> <text x="55" y="50" fill="black" alignment-baseline="central">{self.exp_max}</text> <line x1="40" y1="50" x2="50" y2="50" style="stroke:rgb(0,0,0);stroke-width:2" /> <line x1="40" y1="100" x2="50" y2="100" style="stroke:rgb(0,0,0);stroke-width:2" /> <line x1="40" y1="150" x2="50" y2="150" style="stroke:rgb(0,0,0);stroke-width:2" /> </g> </svg>""" if __name__ == "__main__": logging.basicConfig(level=logging.DEBUG) logging.debug("Testing rendering...") PATHWAY = KEGGPathwayResolver(org="mmu").get_pathway("04659") RENDERER = KEGGPathwayRenderer(kegg_pathway=PATHWAY) # print(renderer.group_render()) # print(renderer.render()) print(RENDERER.raw_render())
while true; do python3 ./generate.py > random.in A=$(./a.out < random.in) B=$(./b.out < random.in) if [ "$A" != "$B" ]; then echo "----------------------------------------" echo "Wrong Answer" echo "[test case] " cat random.in echo "[./a.out] " echo "$A" echo "[./b.out] " echo "$B" echo "----------------------------------------" fi done
import torch import torch.nn import torch.nn.functional as nn import torch.autograd as autograd import torch.optim as optim import numpy as np import matplotlib.pyplot as plt import matplotlib.gridspec as gridspec import os from torch.autograd import Variable from tensorflow.examples.tutorials.mnist import input_data mnist = input_data.read_data_sets('../../MNIST_data', one_hot=True) mb_size = 32 z_dim = 10 eps_dim = 4 X_dim = mnist.train.images.shape[1] y_dim = mnist.train.labels.shape[1] h_dim = 128 cnt = 0 lr = 1e-3 def log(x): return torch.log(x + 1e-8) # Encoder: q(z|x,eps) Q = torch.nn.Sequential( torch.nn.Linear(X_dim + eps_dim, h_dim), torch.nn.ReLU(), torch.nn.Linear(h_dim, z_dim) ) # Decoder: p(x|z) P = torch.nn.Sequential( torch.nn.Linear(z_dim, h_dim), torch.nn.ReLU(), torch.nn.Linear(h_dim, X_dim), torch.nn.Sigmoid() ) # Discriminator: T(X, z) T = torch.nn.Sequential( torch.nn.Linear(X_dim + z_dim, h_dim), torch.nn.ReLU(), torch.nn.Linear(h_dim, 1) ) def reset_grad(): Q.zero_grad() P.zero_grad() T.zero_grad() def sample_X(size, include_y=False): X, y = mnist.train.next_batch(size) X = Variable(torch.from_numpy(X)) if include_y: y = np.argmax(y, axis=1).astype(np.int) y = Variable(torch.from_numpy(y)) return X, y return X Q_solver = optim.Adam(Q.parameters(), lr=lr) P_solver = optim.Adam(P.parameters(), lr=lr) T_solver = optim.Adam(T.parameters(), lr=lr) for it in range(1000000): X = sample_X(mb_size) eps = Variable(torch.randn(mb_size, eps_dim)) z = Variable(torch.randn(mb_size, z_dim)) # Optimize VAE z_sample = Q(torch.cat([X, eps], 1)) X_sample = P(z_sample) T_sample = T(torch.cat([X, z_sample], 1)) disc = torch.mean(-T_sample) loglike = -nn.binary_cross_entropy(X_sample, X, size_average=False) / mb_size elbo = -(disc + loglike) elbo.backward() Q_solver.step() P_solver.step() reset_grad() # Discriminator T(X, z) z_sample = Q(torch.cat([X, eps], 1)) T_q = nn.sigmoid(T(torch.cat([X, z_sample], 1))) T_prior = nn.sigmoid(T(torch.cat([X, z], 1))) T_loss = -torch.mean(log(T_q) + log(1. - T_prior)) T_loss.backward() T_solver.step() reset_grad() # Print and plot every now and then if it % 1000 == 0: print('Iter-{}; ELBO: {:.4}; T_loss: {:.4}' .format(it, -elbo.data[0], -T_loss.data[0])) samples = P(z).data.numpy()[:16] fig = plt.figure(figsize=(4, 4)) gs = gridspec.GridSpec(4, 4) gs.update(wspace=0.05, hspace=0.05) for i, sample in enumerate(samples): ax = plt.subplot(gs[i]) plt.axis('off') ax.set_xticklabels([]) ax.set_yticklabels([]) ax.set_aspect('equal') plt.imshow(sample.reshape(28, 28), cmap='Greys_r') if not os.path.exists('out/'): os.makedirs('out/') plt.savefig('out/{}.png' .format(str(cnt).zfill(3)), bbox_inches='tight') cnt += 1 plt.close(fig)
<filename>z3/context_test.go // Copyright 2017 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package z3 import ( "fmt" "regexp" "testing" ) func expectPanic(t *testing.T, pattern string, f func()) { re, err := regexp.Compile(pattern) if err != nil { t.Fatal("bad regexp: ", err) } defer func() { err := recover() if err == nil { t.Fatalf("want panic matching %q, got none", pattern) } else if s := fmt.Sprint(err); !re.MatchString(s) { t.Fatalf("want panic matching %q, got %s", pattern, err) } }() f() } func TestErrorHandling(t *testing.T) { ctx := NewContext(nil) x := ctx.BVConst("x", 1) y := ctx.BVConst("y", 2) expectPanic(t, "are incompatible", func() { x.Eq(y) }) }
#!/bin/bash source .env source $ENV_INIT_SCRIPT_PATH/env_init.sh # Swarm Viewer Manomarks/visualizer echo "Starting Swarm Viewer Ui.." SERVICE_COMMAND="docker service create \ --name="$SWARM_VIEWER_UI_SERVICE_NAME" \ --publish="$VISUALIZER_PORT":8080/tcp \ --constraint=node.role==manager \ --mount=type=bind,src=/var/run/docker.sock,dst=/var/run/docker.sock \ $SWARM_VIEWER_UI_SERVICE_IMAGE:$SWARM_VIEWER_UI_SERVICE_IMAGE_TAG" ( start_service "Swarm Viewer Ui" swarm-viewer "$SERVICE_COMMAND" ) & # Portainer echo "Starting Docker Management Web UI Portainer Ui.." SERVICE_COMMAND="docker service create \ --name="$DOCKER_MANAGEMENT_WEB_UI_SERVICE_NAME" \ --publish="$PORTAINER_PORT":9000 \ --constraint=node.role==manager \ --mount=type=bind,src=/var/run/docker.sock,dst=/var/run/docker.sock \ $DOCKER_MANAGEMENT_WEB_UI_SERVICE_IMAGE:$DOCKER_MANAGEMENT_WEB_UI_SERVICE_IMAGE_TAG \ -H unix:///var/run/docker.sock" ( start_service "Docker Management Web UI Portainer" portainer "$SERVICE_COMMAND" ) & # Weave Scope echo "Starting Microservices Viewer Web Ui Weave Scope.." SERVICE_COMMAND="docker service create \ --name="$MICROSERVICES_VIEWER_WEAVE_SCOPE_WEB_UI_SERVICE_NAME" \ --mode=global \ --mount=type=bind,src=/var/run/docker.sock,dst=/var/run/docker.sock \ $MICROSERVICES_VIEWER_WEAVE_SCOPE_WEB_UI_SERVICE_IMAGE:$MICROSERVICES_VIEWER_WEAVE_SCOPE_WEB_UI_SERVICE_IMAGE_TAG \ scope launch" ( start_service "Microservices Viewer Web UI weave-scope" weave-scope "$SERVICE_COMMAND" ) & echo "wating for services to be created..." wait echo "Services cretaed successfully"
<filename>examples/ServiceContainer/index.js import { MessageBus, ServiceBus } from '../../src'; import CalculatorContainer from './Calculator'; import assert from 'assert'; const messageBus = new MessageBus(); const serviceBus = new ServiceBus(); serviceBus.connect(messageBus); serviceBus.register('calculator', new CalculatorContainer()); Promise.all([ serviceBus.send('calculator.cpm', { cost: 22, impressions: 0 }), serviceBus.send('calculator.cpm', { cost: 22, impressions: 100 }), ]) .then(results => { assert.equal(results[0], 0); assert.equal(results[1], 220); }) .catch(err => console.log(err)); // or const Calculator = serviceBus.extract('calculator'); Calculator.cost({ cpm: 220, impressions: 100 }) .then(result => { assert.equal(result, 22); }) .catch(err => console.log(err));
import React from 'react'; const LoadingIndicator = props => ( <div> { props.loading && <div> <h1>Loading...</h1> </div> } </div> ); export default LoadingIndicator;
#!/usr/bin/env bash # # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # set -o errexit set -o nounset set -o pipefail SCRIPT_ROOT=$(dirname "${BASH_SOURCE[0]}") PROJECT_ROOT="$SCRIPT_ROOT/.." GENERATED_ROOT="$PROJECT_ROOT/.generated" PKG_NAME="github.com/apache/apisix-ingress-controller" # Make sure no pollution rm -rf "$GENERATED_ROOT" trap 'rm -rf "$GENERATED_ROOT"' EXIT # Prepare existed codes mkdir -p "${GENERATED_ROOT}/${PKG_NAME}/pkg/kube/apisix" cp -r "${PROJECT_ROOT}/pkg/kube/apisix/client" "${GENERATED_ROOT}/${PKG_NAME}/pkg/kube/apisix" cp_deepcopy() { local SRC_PATH="$1" local CP_SCRIPT='FILE_PATH=$1; TARGET_PATH=$2; DIR="$(dirname $FILE_PATH)"; mkdir -p $TARGET_PATH/$DIR && cp $FILE_PATH $TARGET_PATH/$DIR' find "${PROJECT_ROOT}/$SRC_PATH" -type f -name 'zz_generated.deepcopy.go' \ -exec sh -c "$CP_SCRIPT" _ {} "${GENERATED_ROOT}/${PKG_NAME}" ';' } cp_deepcopy "pkg/kube/apisix/" cp_deepcopy "pkg/types/apisix/" # Verify ret=0 bash "${SCRIPT_ROOT}"/generate-groups.sh "deepcopy,client,informer,lister" \ ${PKG_NAME}/pkg/kube/apisix/client ${PKG_NAME}/pkg/kube/apisix/apis \ config:v2beta3,v2beta2,v2beta1 ${PKG_NAME} \ --output-base "$GENERATED_ROOT" \ --go-header-file "${SCRIPT_ROOT}"/boilerplate.go.txt \ --verify-only || ret=$? if [[ $ret -eq 0 ]]; then bash "${SCRIPT_ROOT}"/generate-groups.sh "deepcopy" \ ${PKG_NAME}/pkg/types ${PKG_NAME}/pkg/types \ apisix:v1 ${PKG_NAME} \ --output-base "$GENERATED_ROOT" \ --go-header-file "${SCRIPT_ROOT}"/boilerplate.go.txt \ --verify-only|| ret=$? fi if [[ $ret -eq 0 ]]; then echo "Generated codes up to date." else echo "Generated codes out of date. Please run \`make codegen\`" exit 1 fi
#!/bin/sh # File is auto-generated by cmake compilation, do not edit. # Map the NuttX-style variable definition 'set <var> <value>' to something that # bash and alternatives understand # define _set first because sh (POSIX shell) does not like overwriting set directly _set() { eval $1=$2 } alias set=_set # alternative method with an alias: # alias set='f(){ set -- "$1=$2"; eval "$1"; unset -f f; }; eval f' # Execute another shell script. # $1: Path to the script, (optionally starts with /, to match with the NuttX # scripts) sh() { script="$1" case "$script" in "/"*) script="$script" ;; *) script="/$script" ;; esac . "$(pwd)$script" } # Don't stop on errors. #set -e # Arguments passed to this script: # $1: optional instance id px4_instance=0 [ -n "$1" ] && px4_instance=$1 alias cdev_test='px4-cdev_test --instance $px4_instance' alias controllib_test='px4-controllib_test --instance $px4_instance' alias rc_tests='px4-rc_tests --instance $px4_instance' alias uorb='px4-uorb --instance $px4_instance' alias uorb_tests='px4-uorb_tests --instance $px4_instance' alias batt_smbus='px4-batt_smbus --instance $px4_instance' alias camera_trigger='px4-camera_trigger --instance $px4_instance' alias ets_airspeed='px4-ets_airspeed --instance $px4_instance' alias ms4525_airspeed='px4-ms4525_airspeed --instance $px4_instance' alias ms5525_airspeed='px4-ms5525_airspeed --instance $px4_instance' alias sdp3x_airspeed='px4-sdp3x_airspeed --instance $px4_instance' alias cm8jl65='px4-cm8jl65 --instance $px4_instance' alias leddar_one='px4-leddar_one --instance $px4_instance' alias ll40ls='px4-ll40ls --instance $px4_instance' alias mb12xx='px4-mb12xx --instance $px4_instance' alias pga460='px4-pga460 --instance $px4_instance' alias sf0x='px4-sf0x --instance $px4_instance' alias sf0x_tests='px4-sf0x_tests --instance $px4_instance' alias sf1xx='px4-sf1xx --instance $px4_instance' alias srf02='px4-srf02 --instance $px4_instance' alias teraranger='px4-teraranger --instance $px4_instance' alias tfmini='px4-tfmini --instance $px4_instance' alias ulanding_radar='px4-ulanding_radar --instance $px4_instance' alias vl53lxx='px4-vl53lxx --instance $px4_instance' alias gps='px4-gps --instance $px4_instance' alias pwm_out_sim='px4-pwm_out_sim --instance $px4_instance' alias tone_alarm='px4-tone_alarm --instance $px4_instance' alias attitude_estimator_q='px4-attitude_estimator_q --instance $px4_instance' alias camera_feedback='px4-camera_feedback --instance $px4_instance' alias commander='px4-commander --instance $px4_instance' alias commander_tests='px4-commander_tests --instance $px4_instance' alias dataman='px4-dataman --instance $px4_instance' alias ekf2='px4-ekf2 --instance $px4_instance' alias send_event='px4-send_event --instance $px4_instance' alias fw_att_control='px4-fw_att_control --instance $px4_instance' alias fw_pos_control_l1='px4-fw_pos_control_l1 --instance $px4_instance' alias gnd_att_control='px4-gnd_att_control --instance $px4_instance' alias gnd_pos_control='px4-gnd_pos_control --instance $px4_instance' alias land_detector='px4-land_detector --instance $px4_instance' alias landing_target_estimator='px4-landing_target_estimator --instance $px4_instance' alias load_mon='px4-load_mon --instance $px4_instance' alias local_position_estimator='px4-local_position_estimator --instance $px4_instance' alias logger='px4-logger --instance $px4_instance' alias mavlink='px4-mavlink --instance $px4_instance' alias mavlink_tests='px4-mavlink_tests --instance $px4_instance' alias mc_att_control='px4-mc_att_control --instance $px4_instance' alias mc_pos_control='px4-mc_pos_control --instance $px4_instance' alias navigator='px4-navigator --instance $px4_instance' alias replay='px4-replay --instance $px4_instance' alias sensors='px4-sensors --instance $px4_instance' alias simulator='px4-simulator --instance $px4_instance' alias accelsim='px4-accelsim --instance $px4_instance' alias measairspeedsim='px4-measairspeedsim --instance $px4_instance' alias barosim='px4-barosim --instance $px4_instance' alias gpssim='px4-gpssim --instance $px4_instance' alias gyrosim='px4-gyrosim --instance $px4_instance' alias vmount='px4-vmount --instance $px4_instance' alias vtol_att_control='px4-vtol_att_control --instance $px4_instance' alias wind_estimator='px4-wind_estimator --instance $px4_instance' alias dyn='px4-dyn --instance $px4_instance' alias esc_calib='px4-esc_calib --instance $px4_instance' alias led_control='px4-led_control --instance $px4_instance' alias mixer='px4-mixer --instance $px4_instance' alias motor_ramp='px4-motor_ramp --instance $px4_instance' alias param='px4-param --instance $px4_instance' alias perf='px4-perf --instance $px4_instance' alias pwm='px4-pwm --instance $px4_instance' alias reboot='px4-reboot --instance $px4_instance' alias sd_bench='px4-sd_bench --instance $px4_instance' alias shutdown='px4-shutdown --instance $px4_instance' alias tests='px4-tests --instance $px4_instance' alias hrt_test='px4-hrt_test --instance $px4_instance' alias top='px4-top --instance $px4_instance' alias listener='px4-listener --instance $px4_instance' alias tune_control='px4-tune_control --instance $px4_instance' alias ver='px4-ver --instance $px4_instance' alias bottle_drop='px4-bottle_drop --instance $px4_instance' alias ex_fixedwing_control='px4-ex_fixedwing_control --instance $px4_instance' alias hello='px4-hello --instance $px4_instance' alias position_estimator_inav='px4-position_estimator_inav --instance $px4_instance' alias px4_mavlink_debug='px4-px4_mavlink_debug --instance $px4_instance' alias px4_simple_app='px4-px4_simple_app --instance $px4_instance' alias rover_steering_control='px4-rover_steering_control --instance $px4_instance' alias segway='px4-segway --instance $px4_instance'
<filename>app/src/main/java/com/lewy/videoutil/managers/YouTubeTitleManager.java<gh_stars>0 package com.lewy.videoutil.managers; import android.os.AsyncTask; import com.lewy.videoutil.interfaces.YouTubeTitleCallback; import org.apache.commons.io.IOUtils; import org.json.JSONObject; import java.net.URL; /** * Created by lewy on 05.06.2016. */ public class YouTubeTitleManager extends AsyncTask { private static final String YOU_TUBE_BASE_URL = "https://www.youtube.com/watch?v="; private YouTubeTitleCallback youTubeTitleCallback; private String youTubeUrl; public void setYouTubeTitleManager(YouTubeTitleCallback youTubeTitleCallback) { this.youTubeTitleCallback = youTubeTitleCallback; } public void setYouTubeUrl(String youTubeUrl) { this.youTubeUrl = youTubeUrl; } @Override protected Object doInBackground(Object[] params) { try { if (youTubeUrl != null) { URL url = new URL("http://www.youtube.com/oembed?url=" + YOU_TUBE_BASE_URL + youTubeUrl + "&format=json"); youTubeTitleCallback.title(new JSONObject(IOUtils.toString(url)).getString("title")); } } catch (Exception e) { e.printStackTrace(); } return null; } }
<gh_stars>0 import store from "store"; import {ActionFactory} from "store/action"; import {DotButton} from "components/DotButton"; import "./RemoveVoteButton.scss"; import classNames from "classnames"; import {VoteClientModel} from "types/vote"; import {FC} from "react"; type RemoveVoteProps = { noteId: string; activeVoting: boolean; votes: VoteClientModel[]; ownVotes: VoteClientModel[]; tabIndex: number; }; export const RemoveVoteButton: FC<RemoveVoteProps> = ({noteId, activeVoting, votes, ownVotes, tabIndex}) => { const deleteVote = () => { store.dispatch(ActionFactory.deleteVote(noteId)); }; return ( <DotButton tabIndex={tabIndex} className={classNames("vote-button-remove", {"vote-button-remove--own-vote": ownVotes.length > 0})} disabled={!activeVoting} onClick={deleteVote} > <span className="vote-button-remove__folded-corner" /> <span>{votes.length.toString()}</span> </DotButton> ); };
python main.py -le=1 -r=50 -nd=0 --verbose=0 --model='MLP' --run=1 python main.py -le=1 -r=50 -nd=1 --verbose=0 --model='MLP' --run=1 python main.py -le=1 -r=50 -nd=2 --verbose=0 --model='MLP' --run=1 python main.py -le=1 -r=50 -nd=3 --verbose=0 --model='MLP' --run=1 python main.py -le=1 -r=50 -nd=4 --verbose=0 --model='MLP' --run=1 python main.py -le=5 -r=50 -nd=0 --verbose=0 --model='MLP' --run=2 python main.py -le=5 -r=50 -nd=1 --verbose=0 --model='MLP' --run=2 python main.py -le=5 -r=50 -nd=2 --verbose=0 --model='MLP' --run=2 python main.py -le=5 -r=50 -nd=3 --verbose=0 --model='MLP' --run=2 python main.py -le=5 -r=50 -nd=4 --verbose=0 --model='MLP' --run=2 python main.py -le=10 -r=50 -nd=0 --verbose=0 --model='MLP' --run=3 python main.py -le=10 -r=50 -nd=1 --verbose=0 --model='MLP' --run=3 python main.py -le=10 -r=50 -nd=2 --verbose=0 --model='MLP' --run=3 python main.py -le=10 -r=50 -nd=3 --verbose=0 --model='MLP' --run=3 python main.py -le=10 -r=50 -nd=4 --verbose=0 --model='MLP' --run=3 python main.py -le=20 -r=50 -nd=0 --verbose=0 --model='MLP' --run=4 python main.py -le=20 -r=50 -nd=1 --verbose=0 --model='MLP' --run=4 python main.py -le=20 -r=50 -nd=2 --verbose=0 --model='MLP' --run=4 python main.py -le=20 -r=50 -nd=3 --verbose=0 --model='MLP' --run=4 python main.py -le=20 -r=50 -nd=4 --verbose=0 --model='MLP' --run=4 python main.py -le=1 -r=50 -nd=0 --verbose=0 --model='MLP' --mu=1e-2 --algo='FedProx' --run=5 python main.py -le=1 -r=50 -nd=1 --verbose=0 --model='MLP' --mu=1e-2 --algo='FedProx' --run=5 python main.py -le=1 -r=50 -nd=2 --verbose=0 --model='MLP' --mu=1e-2 --algo='FedProx' --run=5 python main.py -le=1 -r=50 -nd=3 --verbose=0 --model='MLP' --mu=1e-2 --algo='FedProx' --run=5 python main.py -le=1 -r=50 -nd=4 --verbose=0 --model='MLP' --mu=1e-2 --algo='FedProx' --run=5 python main.py -le=5 -r=50 -nd=0 --verbose=0 --model='MLP' --mu=1e-2 --algo='FedProx' --run=6 python main.py -le=5 -r=50 -nd=1 --verbose=0 --model='MLP' --mu=1e-2 --algo='FedProx' --run=6 python main.py -le=5 -r=50 -nd=2 --verbose=0 --model='MLP' --mu=1e-2 --algo='FedProx' --run=6 python main.py -le=5 -r=50 -nd=3 --verbose=0 --model='MLP' --mu=1e-2 --algo='FedProx' --run=6 python main.py -le=5 -r=50 -nd=4 --verbose=0 --model='MLP' --mu=1e-2 --algo='FedProx' --run=6 python main.py -le=10 -r=50 -nd=0 --verbose=0 --model='MLP' --mu=1e-2 --algo='FedProx' --run=7 python main.py -le=10 -r=50 -nd=1 --verbose=0 --model='MLP' --mu=1e-2 --algo='FedProx' --run=7 python main.py -le=10 -r=50 -nd=2 --verbose=0 --model='MLP' --mu=1e-2 --algo='FedProx' --run=7 python main.py -le=10 -r=50 -nd=3 --verbose=0 --model='MLP' --mu=1e-2 --algo='FedProx' --run=7 python main.py -le=10 -r=50 -nd=4 --verbose=0 --model='MLP' --mu=1e-2 --algo='FedProx' --run=7 python main.py -le=20 -r=50 -nd=0 --verbose=0 --model='MLP' --mu=1e-2 --algo='FedProx' --run=8 python main.py -le=20 -r=50 -nd=1 --verbose=0 --model='MLP' --mu=1e-2 --algo='FedProx' --run=8 python main.py -le=20 -r=50 -nd=2 --verbose=0 --model='MLP' --mu=1e-2 --algo='FedProx' --run=8 python main.py -le=20 -r=50 -nd=3 --verbose=0 --model='MLP' --mu=1e-2 --algo='FedProx' --run=8 python main.py -le=20 -r=50 -nd=4 --verbose=0 --model='MLP' --mu=1e-2 --algo='FedProx' --run=8 python main.py -le=1 -r=50 -nd=0 --verbose=0 --model='CNN' --run=9 python main.py -le=1 -r=50 -nd=1 --verbose=0 --model='CNN' --run=9 python main.py -le=1 -r=50 -nd=2 --verbose=0 --model='CNN' --run=9 python main.py -le=1 -r=50 -nd=3 --verbose=0 --model='CNN' --run=9 python main.py -le=1 -r=50 -nd=4 --verbose=0 --model='CNN' --run=9 python main.py -le=5 -r=50 -nd=0 --verbose=0 --model='CNN' --run=10 python main.py -le=5 -r=50 -nd=1 --verbose=0 --model='CNN' --run=10 python main.py -le=5 -r=50 -nd=2 --verbose=0 --model='CNN' --run=10 python main.py -le=5 -r=50 -nd=3 --verbose=0 --model='CNN' --run=10 python main.py -le=5 -r=50 -nd=4 --verbose=0 --model='CNN' --run=10 python main.py -le=10 -r=50 -nd=0 --verbose=0 --model='CNN' --run=11 python main.py -le=10 -r=50 -nd=1 --verbose=0 --model='CNN' --run=11 python main.py -le=10 -r=50 -nd=2 --verbose=0 --model='CNN' --run=11 python main.py -le=10 -r=50 -nd=3 --verbose=0 --model='CNN' --run=11 python main.py -le=10 -r=50 -nd=4 --verbose=0 --model='CNN' --run=11 python main.py -le=20 -r=50 -nd=0 --verbose=0 --model='CNN' --run=12 python main.py -le=20 -r=50 -nd=1 --verbose=0 --model='CNN' --run=12 python main.py -le=20 -r=50 -nd=2 --verbose=0 --model='CNN' --run=12 python main.py -le=20 -r=50 -nd=3 --verbose=0 --model='CNN' --run=12 python main.py -le=20 -r=50 -nd=4 --verbose=0 --model='CNN' --run=12 python main.py -le=1 -r=100 -nd=0 --verbose=0 --model='CNN' --mu=1e-2 --algo='FedProx' --run=13 python main.py -le=1 -r=100 -nd=1 --verbose=0 --model='CNN' --mu=1e-2 --algo='FedProx' --run=13 python main.py -le=1 -r=100 -nd=2 --verbose=0 --model='CNN' --mu=1e-2 --algo='FedProx' --run=13 python main.py -le=1 -r=100 -nd=3 --verbose=0 --model='CNN' --mu=1e-2 --algo='FedProx' --run=13 python main.py -le=1 -r=100 -nd=4 --verbose=0 --model='CNN' --mu=1e-2 --algo='FedProx' --run=13 python main.py -le=5 -r=100 -nd=0 --verbose=0 --model='CNN' --mu=1e-2 --algo='FedProx' --run=14 python main.py -le=5 -r=100 -nd=1 --verbose=0 --model='CNN' --mu=1e-2 --algo='FedProx' --run=14 python main.py -le=5 -r=100 -nd=2 --verbose=0 --model='CNN' --mu=1e-2 --algo='FedProx' --run=14 python main.py -le=5 -r=100 -nd=3 --verbose=0 --model='CNN' --mu=1e-2 --algo='FedProx' --run=14 python main.py -le=5 -r=100 -nd=4 --verbose=0 --model='CNN' --mu=1e-2 --algo='FedProx' --run=14 python main.py -le=10 -r=100 -nd=0 --verbose=0 --model='CNN' --mu=1e-2 --algo='FedProx' --run=15 python main.py -le=10 -r=100 -nd=1 --verbose=0 --model='CNN' --mu=1e-2 --algo='FedProx' --run=15 python main.py -le=10 -r=100 -nd=2 --verbose=0 --model='CNN' --mu=1e-2 --algo='FedProx' --run=15 python main.py -le=10 -r=100 -nd=3 --verbose=0 --model='CNN' --mu=1e-2 --algo='FedProx' --run=15 python main.py -le=10 -r=100 -nd=4 --verbose=0 --model='CNN' --mu=1e-2 --algo='FedProx' --run=15 python main.py -le=20 -r=100 -nd=0 --verbose=0 --model='CNN' --mu=1e-2 --algo='FedProx' --run=16 python main.py -le=20 -r=100 -nd=1 --verbose=0 --model='CNN' --mu=1e-2 --algo='FedProx' --run=16 python main.py -le=20 -r=100 -nd=2 --verbose=0 --model='CNN' --mu=1e-2 --algo='FedProx' --run=16 python main.py -le=20 -r=100 -nd=3 --verbose=0 --model='CNN' --mu=1e-2 --algo='FedProx' --run=16 python main.py -le=20 -r=100 -nd=4 --verbose=0 --model='CNN' --mu=1e-2 --algo='FedProx' --run=16
#!/bin/bash PROG_PATH=${BASH_SOURCE[0]} # this script's name PROG_NAME=${PROG_PATH##*/} # basename of script (strip path) PROG_DIR="$(cd "$(dirname "${PROG_PATH:-$PWD}")" 2>/dev/null 1>&2 && pwd)" APP_DIR="$(cd "${PROG_DIR}/.." 2>/dev/null 1>&2 && pwd)" declare GIT_USERNAME=admin declare GIT_PASSWORD= argocd repo add https://github.com/argoproj/argocd-example-apps --username ${GIT_USERNAME} --password ${GIT_PASSWORD}
<reponame>codefoxut/la-casa-de-papel package main import ( "fmt" "math" "sort" ) func minArray(a []int) int { minValue := int(math.MaxInt32) for _, x := range a { if minValue > x { minValue = x } } return minValue } func coinChange1(coins []int, amount int) int { sort.Ints(coins) if amount < 0 { return -1 } if amount == 0 { return 0 } minCoins := make([]int, len(coins)) for i, coin := range coins { val := coinChange1(coins, amount-coin) if val < 0 { val = int(math.MaxInt32) } else { val = 1 + val } minCoins[i] = val } v := minArray(minCoins) if int(math.MaxInt32) == v { return -1 } return v } func coinChange2(coins []int, amount int) int { if amount < 0 { return -1 } if amount == 0 { return 0 } sort.Sort(sort.Reverse(sort.IntSlice(coins))) coinCount, flag := solutionFinder(coins, amount, 0) if flag { return coinCount } return -1 } func solutionFinder(coins []int, amount int, coinCount int) (int, bool) { if amount < 0 { return -1, false } if amount == 0 { return coinCount, true } for i := 0; i < len(coins); i++ { count, val := solutionFinder(coins, amount-coins[i], coinCount+1) fmt.Println(val) if val { return count, true } } return -1, false } func contains(s []int, num int) bool { for _, v := range s { if v == num { return true } } return false } func coinChangeLoop(coins []int, amount int) int { if amount <= 0 { return 0 } dp := make([]int, amount+1) for i := 1; i <= amount; i++ { dp[i] = amount + 1 } for i := 1; i <= amount; i++ { var values []int values = append(values, dp[i]) for _, coin := range coins { if i >= coin { values = append(values, dp[i-coin]+1) } } dp[i] = minArray(values) } fmt.Println(dp) if dp[amount] > amount { return -1 } return dp[amount] } func coinChange(coins []int, amount int) int { if amount <= 0 { return 0 } dp := make([]int, amount+1) for _, coin := range coins { if coin <= amount { dp[coin] = 1 } } for i := 1; i <= amount; i++ { var values []int var val int if !contains(coins, i) { for _, c := range coins { if i> c { val = dp[i-c] values = append(values, val+1) } } if len(values) == 0 { dp[i] = int(math.MaxInt32) } else { dp[i] = minArray(values) } } } // fmt.Println(dp) if dp[amount] == int(math.MaxInt32) { return -1 } return dp[amount] } func main() { coins := []int{1, 2, 5} amount := 11 fmt.Println(coinChange(coins, amount)) fmt.Println(coinChangeLoop(coins, amount)) }
<reponame>boger7788/boger-weather package com.bogerweather.app.util; /** * Created by Administrator on 2015/10/5. */ public interface HttpCallbackListener { void onFinish(String response); void onError(Exception e); }
import java.util.HashMap; public class BankingSystem { private HashMap<Integer, BankAccount> accounts; private int accountCounter; public BankingSystem() { accounts = new HashMap<>(); accountCounter = 1; } public void createAccount(String name, double initialBalance) { BankAccount newAccount = new BankAccount(accountCounter, name, initialBalance); accounts.put(accountCounter, newAccount); accountCounter++; } public void deposit(int accountNumber, double amount) { if (accounts.containsKey(accountNumber)) { BankAccount account = accounts.get(accountNumber); double currentBalance = account.getBalance(); account.setBalance(currentBalance + amount); } else { System.out.println("Account not found"); } } public void withdraw(int accountNumber, double amount) { if (accounts.containsKey(accountNumber)) { BankAccount account = accounts.get(accountNumber); double currentBalance = account.getBalance(); if (currentBalance >= amount) { account.setBalance(currentBalance - amount); } else { System.out.println("Insufficient funds"); } } else { System.out.println("Account not found"); } } public double checkBalance(int accountNumber) { if (accounts.containsKey(accountNumber)) { return accounts.get(accountNumber).getBalance(); } else { System.out.println("Account not found"); return -1; } } public void displayAllAccounts() { for (BankAccount account : accounts.values()) { System.out.println("Account Number: " + account.getAccountNumber()); System.out.println("Account Holder: " + account.getAccountHolder()); System.out.println("Balance: " + account.getBalance()); System.out.println("------------------------"); } } } class BankAccount { private int accountNumber; private String accountHolder; private double balance; public BankAccount(int accountNumber, String accountHolder, double balance) { this.accountNumber = accountNumber; this.accountHolder = accountHolder; this.balance = balance; } public int getAccountNumber() { return accountNumber; } public String getAccountHolder() { return accountHolder; } public double getBalance() { return balance; } public void setBalance(double balance) { this.balance = balance; } }
# shellcheck shell=bash # toolbelt.sh # https://github.com/TobyGiacometti/toolbelt.sh # Copyright (c) 2021 Toby Giacometti and contributors # Apache License 2.0 #--- # @stdout ID composed of alphanumeric characters and `_` that uniquely identifies # the backup on the device. public__print_id() { :; } #--- # @param $1 ID for the destination that should be added. The ID is used during # various operations to select the desired destination. Please note # that only alphanumeric characters and `_` are allowed in destination # IDs. # @param $2 URL or path of the destination. # @param $3 ID for the backup. The ID is used to differentiate multiple backups # in the same destination. Please note that only alphanumeric characters # and `_` are allowed in backup IDs. public__add_destination() { :; } #--- # @param $1 ID of the destination that should be removed. public__remove_destination() { :; } #--- # @stdout IDs of the available destinations separated by `/`. If no destinations # are available, nothing is printed. public__print_destinations() { :; } #--- # @stdout Path to a file that contains include/exclude patterns for the backup # (without trailing newline). public__print_filter_path() { :; } #--- # @param $1 ID of the destination where the backup should be stored. public__run() { :; } #--- # @param $1 ID of the destination from which the snapshot listing should be # retrieved. # @stdout Snapshot IDs in destination. The output has following format: # # ``` # id (yyyy-mm-dd hh:mm) # id (yyyy-mm-dd hh:mm) # ``` # # For example: # # ``` # 1 (2021-03-03 03:00) # 2 (2021-03-03 09:00) # ``` # # If no backup has been run, nothing is printed. public__print_snapshots() { :; } #--- # @param $1 ID of the destination from which the backup should be restored. # @param $2 ID of the snapshot that should be restored. # @param $3 Path to the restore directory. If not provided, the backup is restored # to the source directory. # @param... Include/exclude patterns for the restore. If not provided, the full # backup is restored. public__restore() { :; } #--- # Maintain the backup. # # During maintenance, following operations occur: # # - Old snapshots are removed if backup retention policies are specified. # - The integrity of the backup is verified. # # @param $1 ID of the destination that should be maintained. # @param $2 Backup retention policies for the maintenance operation. For example: # # ```sh # # - Snapshots older than 7 days: Keep 1 snapshot for each day. # # - Snapshots older than 30 days: Keep 1 snapshot every 7 days. # # - Snapshots older than 365 days: Keep 1 snapshot every 30 days. # # - Snapshots older than 1095 days: Remove all snapshots. # $backup maintain example 7:1,30:7,365:30,1095:0 # ``` # # If no backup retention policies are specified, old snapshots are not # removed. # @param... Flags that customize the functionality: # # - `deep`: A more elaborate maintenance process is executed. Deep # maintenance should not be executed frequently since it consumes # extra resources and can take a very long time. public__maintain() { :; }
import org.junit.*; import play.test.*; import play.Application; import play.mvc.*; import static play.test.Helpers.*; import static org.junit.Assert.*; import play.db.jpa.*; import java.util.List; import models.*; import org.dbunit.*; import org.dbunit.dataset.*; import org.dbunit.dataset.xml.*; import java.util.HashMap; import java.io.FileInputStream; import java.util.Map; import java.util.ArrayList; import play.libs.ws.*; public class CrearTareasTests { JndiDatabaseTester databaseTester; Application app; // Devuelve los settings necesarios para crear la aplicación fake // usando la base de datos de integración private static HashMap<String, String> settings() { HashMap<String, String> settings = new HashMap<String, String>(); settings.put("db.default.url", "jdbc:mysql://localhost:3306/mads_test"); settings.put("db.default.username", "root"); settings.put("db.default.password", "<PASSWORD>"); //no puse password a root settings.put("db.default.jndiName", "DefaultDS"); settings.put("jpa.default", "mySqlPersistenceUnit"); return(settings); } // Crea la conexión con la base de datos de prueba y // la inicializa con las tablas definidas por las entidades JPA @BeforeClass public static void createTables() { Application fakeApp = Helpers.fakeApplication(settings()); // Abrimos una transacción para que JPA cree en la BD // las tablas correspondientes a las entidades running (fakeApp, () -> { JPA.withTransaction(() -> {}); }); } // Se ejecuta antes de cada tests, inicializando la BD con los // datos del dataset @Before public void inicializaBaseDatos() throws Exception { app = Helpers.fakeApplication(settings()); databaseTester = new JndiDatabaseTester("DefaultDS"); IDataSet initialDataSet = new FlatXmlDataSetBuilder().build(new FileInputStream("test/resources/tareas_dataset_1.xml")); databaseTester.setDataSet(initialDataSet); databaseTester.onSetup(); } @After public void cierraBaseDatos() throws Exception { databaseTester.onTearDown(); } @Test public void testCreaTareaDevuelveTarea() { running (app, () -> { JPA.withTransaction(() -> { Usuario usuario = UsuarioDAO.find(1); Tarea tarea = new Tarea("Una tarea muy chula",usuario); tarea = TareaDAO.create(tarea); assertEquals(tarea.descripcion,"Una tarea muy chula"); usuario = tarea.usuario; assertEquals(usuario.login, "pepito"); assertEquals(4,usuario.tareas.size()); }); }); } @Test public void testTareaServiceCreaTareaDevuelveTarea() { running (app, () -> { JPA.withTransaction(() -> { Usuario usuario = UsuarioDAO.find(2); Tarea tarea = new Tarea("Una tarea muy chula",usuario); tarea = TareaService.grabaTarea(tarea); assertEquals(tarea.descripcion,"Una tarea muy chula"); usuario = tarea.usuario; assertEquals(usuario.login, "julia"); assertEquals(2,usuario.tareas.size()); }); }); } @Test public void testWebPaginaCrearTarea() { running(testServer(3333, app), () -> { int timeout = 4000; WSResponse response = WS .url("http://localhost:3333/usuarios/1/tareas/nueva") .setHeader("Cookie",WSUtils.getSessionCookie("pepito","perez")) .get() .get(timeout); assertEquals(OK, response.getStatus()); String body = response.getBody(); assertTrue(body.contains("<h3>Nueva tarea para el usuario pepito</h3>")); }); } @Test public void testWebCreaTareaEnForm() { running(testServer(3333, app), () -> { int timeout = 4000; WSResponse response = WS .url("http://localhost:3333/tareas/nueva") .setFollowRedirects(false) .setHeader("Cookie",WSUtils.getSessionCookie("pepito","perez")) .setContentType("application/x-www-form-urlencoded") .post("descripcion=Hay que refactorizar amigos&id_usuario=1") .get(timeout); response = WS .url("http://localhost:3333/usuarios/1/tareas") .setHeader("Cookie",WSUtils.getSessionCookie("pepito","perez")) .get() .get(timeout); assertEquals(OK, response.getStatus()); assertTrue(response.getBody().contains("Hay que refactorizar amigos")); }); } @Test public void testWebCreaTareaEnFormConTag() { running(testServer(3333, app), () -> { JPA.withTransaction(() -> { int timeout = 4000; WSResponse response = WS .url("http://localhost:3333/tareas/nueva") .setFollowRedirects(false) .setHeader("Cookie",WSUtils.getSessionCookie("pepito","perez")) .setContentType("application/x-www-form-urlencoded") .post("descripcion=Hay que refactorizar amigos&tags=1;2;3;&&id_usuario=1") .get(timeout); assertEquals(UNAUTHORIZED, response.getStatus()); //la tag 2 no es suya response = WS .url("http://localhost:3333/tareas/nueva") .setFollowRedirects(false) .setHeader("Cookie",WSUtils.getSessionCookie("pepito","perez")) .setContentType("application/x-www-form-urlencoded") .post("descripcion=Hay que refactorizar amigos&tags=hola;&&id_usuario=1") .get(timeout); assertEquals(BAD_REQUEST, response.getStatus()); //"hola" no es una lista de tags valida response = WS .url("http://localhost:3333/tareas/nueva") .setFollowRedirects(false) .setHeader("Cookie",WSUtils.getSessionCookie("pepito","perez")) .setContentType("application/x-www-form-urlencoded") .post("descripcion=Hay que refactorizar amigos&tags=1;3;&&id_usuario=1") .get(timeout); assertEquals(303, response.getStatus()); //las tags 1 y 3 son suyas, asi que redirect //obtenemos las tareas del usuario List<Tarea> tareas = TareaService.findAllTareasUsuario(1); Tarea tarea = null; //buscamos la recien creada for(int i=0; i<tareas.size(); i++) { if(tareas.get(i).descripcion.equals("Hay que refactorizar amigos")) { tarea=tareas.get(i); } } //comprobamos que tiene las dos etiquetas assertEquals(2, tarea.etiquetas.size()); }); }); } @Test public void testWebCreaTareaPropagaBorradoTag() { running(testServer(3333, app), () -> { JPA.withTransaction(() -> { int timeout = 4000; WSResponse response = WS .url("http://localhost:3333/tareas/nueva") .setFollowRedirects(false) .setHeader("Cookie",WSUtils.getSessionCookie("pepito","perez")) .setContentType("application/x-www-form-urlencoded") .post("descripcion=Hay que refactorizar amigos&tags=1;&&id_usuario=1") .get(timeout); assertEquals(303, response.getStatus()); response = WS .url("http://localhost:3333/usuarios/1/tareas") .setHeader("Cookie",WSUtils.getSessionCookie("pepito","perez")) .get() .get(timeout); assertEquals(OK, response.getStatus()); assertTrue(response.getBody().contains("Hay que refactorizar amigos")); Etiqueta e = EtiquetaDAO.find(1); EtiquetaDAO.delete(1); //borramos el tag 1 //obtenemos las etiquetas del usuario 1 List<Tarea> tareas = TareaService.findAllTareasUsuario(1); Tarea tarea = null; //Buscamos la recien creada for(int i=0; i<tareas.size(); i++) { if(tareas.get(i).descripcion.equals("Hay que refactorizar amigos")) { tarea=tareas.get(i); } } //comprobamos que al borrar la etiqueta se quita de la tarea tambien assertEquals(0, tarea.etiquetas.size()); }); }); } }
#!/bin/sh # Copyright (c) 2014-2015 The Syndicate Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. set -e UNSIGNED="$1" SIGNATURE="$2" ARCH=x86_64 ROOTDIR=dist TEMPDIR=signed.temp OUTDIR=signed-app if [ -z "$UNSIGNED" ]; then echo "usage: $0 <unsigned app> <signature>" exit 1 fi if [ -z "$SIGNATURE" ]; then echo "usage: $0 <unsigned app> <signature>" exit 1 fi rm -rf ${TEMPDIR} && mkdir -p ${TEMPDIR} tar -C ${TEMPDIR} -xf ${UNSIGNED} cp -rf "${SIGNATURE}"/* ${TEMPDIR} if [ -z "${PAGESTUFF}" ]; then PAGESTUFF=${TEMPDIR}/pagestuff fi if [ -z "${CODESIGN_ALLOCATE}" ]; then CODESIGN_ALLOCATE=${TEMPDIR}/codesign_allocate fi find ${TEMPDIR} -name "*.sign" | while read i; do SIZE=`stat -c %s "${i}"` TARGET_FILE="`echo "${i}" | sed 's/\.sign$//'`" echo "Allocating space for the signature of size ${SIZE} in ${TARGET_FILE}" ${CODESIGN_ALLOCATE} -i "${TARGET_FILE}" -a ${ARCH} ${SIZE} -o "${i}.tmp" OFFSET=`${PAGESTUFF} "${i}.tmp" -p | tail -2 | grep offset | sed 's/[^0-9]*//g'` if [ -z ${QUIET} ]; then echo "Attaching signature at offset ${OFFSET}" fi dd if="$i" of="${i}.tmp" bs=1 seek=${OFFSET} count=${SIZE} 2>/dev/null mv "${i}.tmp" "${TARGET_FILE}" rm "${i}" echo "Success." done mv ${TEMPDIR}/${ROOTDIR} ${OUTDIR} rm -rf ${TEMPDIR} echo "Signed: ${OUTDIR}"
package auth import ( "fmt" "github.com/MakeNowJust/heredoc" "github.com/spf13/cobra" ) type AlibabaCloudOptions struct { AccessKey string AccessSecret string } func NewAlibabaCloudAuthCmd() *cobra.Command{ cmd := &cobra.Command{ Use: "aliyun", Short: "Login, logout, and refresh your authentication", Long: heredoc.Doc(` manage authentication. `), } cmd.AddCommand(newAlibabaCloudLoginCmd()) cmd.AddCommand(newAlibabaCloudLogoutCmd()) cmd.AddCommand(newAlibabaCloudShowCmd()) return cmd } func newAlibabaCloudLoginCmd() *cobra.Command { opt := &AlibabaCloudOptions{} cmd := &cobra.Command{ Use: "login", Args: cobra.ExactArgs(2), Short: "Authenticate with a Alibaba Cloud,Please Use `login <accessKey> <accessToken>`", Long: heredoc.Doc("Authenticate with a Alibaba Cloud"), Example: heredoc.Doc(""), RunE: func(cmd *cobra.Command, args []string) error { opt.AccessKey = args[0] opt.AccessSecret = args[1] return nil }, } cmd.PersistentFlags().StringVarP(&opt.AccessKey,"key","k","","AccessKey") cmd.PersistentFlags().StringVarP(&opt.AccessSecret,"secret","s","","AccessSecret") cmd.MarkFlagRequired("key") cmd.MarkFlagRequired("secret") return cmd } func newAlibabaCloudShowCmd() *cobra.Command{ //opt := &AlibabaCloudOptions{} cmd := &cobra.Command{ Use: "show", Args: cobra.ExactArgs(0), Short: "Show your Authenticate Token For Alibaba Cloud ", Long: heredoc.Doc("Show Authenticate For Alibaba Cloud"), Example: heredoc.Doc(""), RunE: func(cmd *cobra.Command, args []string) error { fmt.Println("Alibaba Cloud Access_Key,Access_Secret") //fmt.Println(cmd.PersistentFlags().Lookup("key").Value) //fmt.Println(cmd.PersistentFlags().Lookup("secret").Value) return nil }, } return cmd } func newAlibabaCloudLogoutCmd() *cobra.Command{ cmd := &cobra.Command{ Use: "logout", Args: cobra.ExactArgs(0), Short: "Logout with Alibaba Cloud", Long: heredoc.Doc("Logout with Alibaba Cloud"), Example: heredoc.Doc(""), RunE: func(cmd *cobra.Command, args []string) error { fmt.Println("Logout with Alibaba Cloud") return nil }, } return cmd }
def on_cancel(self, button): # Stop the spinner animation self.spinner.stop() # Enable the specified buttons for user interaction self.friend_info.set_sensitive(True) self.save_bt.set_sensitive(True) self.close_bt.set_sensitive(True) self.generate_keys_bt.set_sensitive(True) self.copy_clipboard_bt.set_sensitive(True) # Add code to handle the cancel action, such as closing a dialog or resetting the input fields # For example, if this method is part of a dialog window, you can close the dialog self.dialog.close()
#!/bin/bash set -eo pipefail # converted from: ../cloudbiolinux/ggd-recipes/hg19/platinum-genome-NA12878.yaml mkdir -p $PREFIX/share/ggd/Homo_sapiens/hg19/ && cd $PREFIX/share/ggd/Homo_sapiens/hg19/ dir=validation/platinum-genome-NA12878 mkdir -p $dir wget --quiet --no-check-certificate -c -O $dir/truth_small_variants.vcf.gz ftp://platgene_ro:@ussd-ftp.illumina.com/hg19/8.0.1/NA12878/NA12878.vcf.gz wget --quiet --no-check-certificate -c -O - ftp://platgene_ro:@ussd-ftp.illumina.com/hg19/8.0.1/NA12878/ConfidentRegions.bed.gz | gunzip -c > $dir/truth_regions.bed cd $dir/ wget --quiet https://raw.githubusercontent.com/gogetdata/ggd-recipes/master/genomes/hg19/hg19.genome mkdir tmp gsort truth_small_variants.vcf.gz hg19.genome | bgzip -c > tmp/truth_small_variants.vcf.gz gsort truth_regions.bed hg19.genome | bgzip -c > tmp/truth_regions.bed.gz mv tmp/* . rm -r tmp tabix truth_small_variants.vcf.gz tabix truth_regions.bed.gz rm truth_regions.bed hg19.genome
import { CommonModule } from '@angular/common'; import { FormsModule } from '@angular/forms'; import { ChangeDetectionStrategy, Component, EventEmitter, Input, Output, ViewChild, NgModule } from '@angular/core'; import { __awaiter, __generator, __spread, __extends, __values } from 'tslib'; import { BrowserMultiFormatReader, ChecksumException, FormatException, NotFoundException, ArgumentException, BarcodeFormat, DecodeHintType } from '@zxing/library'; import { BehaviorSubject } from 'rxjs'; /** * @fileoverview added by tsickle * @suppress {checkTypes,extraRequire,missingReturn,uselessCode} checked by tsc */ /** * Based on zxing-typescript BrowserCodeReader */ var /** * Based on zxing-typescript BrowserCodeReader */ BrowserMultiFormatContinuousReader = /** @class */ (function (_super) { __extends(BrowserMultiFormatContinuousReader, _super); function BrowserMultiFormatContinuousReader() { var _this = _super !== null && _super.apply(this, arguments) || this; /** * Says if there's a torch available for the current device. */ _this._isTorchAvailable = new BehaviorSubject(undefined); return _this; } Object.defineProperty(BrowserMultiFormatContinuousReader.prototype, "isTorchAvailable", { /** * Exposes _tochAvailable . */ get: /** * Exposes _tochAvailable . * @return {?} */ function () { return this._isTorchAvailable.asObservable(); }, enumerable: true, configurable: true }); /** * Starts the decoding from the current or a new video element. * * @param callbackFn The callback to be executed after every scan attempt * @param deviceId The device's to be used Id * @param videoSource A new video element */ /** * Starts the decoding from the current or a new video element. * * @param {?=} deviceId The device's to be used Id * @param {?=} videoSource A new video element * @return {?} */ BrowserMultiFormatContinuousReader.prototype.continuousDecodeFromInputVideoDevice = /** * Starts the decoding from the current or a new video element. * * @param {?=} deviceId The device's to be used Id * @param {?=} videoSource A new video element * @return {?} */ function (deviceId, videoSource) { var _this = this; this.reset(); // Keeps the deviceId between scanner resets. if (typeof deviceId !== 'undefined') { this.deviceId = deviceId; } if (typeof navigator === 'undefined') { return; } /** @type {?} */ var scan$ = new BehaviorSubject({}); try { // this.decodeFromInputVideoDeviceContinuously(deviceId, videoSource, (result, error) => scan$.next({ result, error })); this.getStreamForDevice({ deviceId: deviceId }) .then(function (stream) { return _this.attachStreamToVideoAndCheckTorch(stream, videoSource); }) .then(function (videoElement) { return _this.decodeOnSubject(scan$, videoElement, _this.timeBetweenScansMillis); }); } catch (e) { scan$.error(e); } this._setScanStream(scan$); // @todo Find a way to emit a complete event on the scan stream once it's finished. return scan$.asObservable(); }; /** * Gets the media stream for certain device. * Falls back to any available device if no `deviceId` is defined. */ /** * Gets the media stream for certain device. * Falls back to any available device if no `deviceId` is defined. * @param {?} __0 * @return {?} */ BrowserMultiFormatContinuousReader.prototype.getStreamForDevice = /** * Gets the media stream for certain device. * Falls back to any available device if no `deviceId` is defined. * @param {?} __0 * @return {?} */ function (_a) { var deviceId = _a.deviceId; return __awaiter(this, void 0, void 0, function () { var constraints, stream; return __generator(this, function (_b) { switch (_b.label) { case 0: constraints = this.getUserMediaConstraints(deviceId); return [4 /*yield*/, navigator.mediaDevices.getUserMedia(constraints)]; case 1: stream = _b.sent(); return [2 /*return*/, stream]; } }); }); }; /** * Creates media steram constraints for certain `deviceId`. * Falls back to any environment available device if no `deviceId` is defined. */ /** * Creates media steram constraints for certain `deviceId`. * Falls back to any environment available device if no `deviceId` is defined. * @param {?} deviceId * @return {?} */ BrowserMultiFormatContinuousReader.prototype.getUserMediaConstraints = /** * Creates media steram constraints for certain `deviceId`. * Falls back to any environment available device if no `deviceId` is defined. * @param {?} deviceId * @return {?} */ function (deviceId) { /** @type {?} */ var video = typeof deviceId === 'undefined' ? { facingMode: { exact: 'environment' } } : { deviceId: { exact: deviceId } }; /** @type {?} */ var constraints = { video: video }; return constraints; }; /** * Enables and disables the device torch. */ /** * Enables and disables the device torch. * @param {?} on * @return {?} */ BrowserMultiFormatContinuousReader.prototype.setTorch = /** * Enables and disables the device torch. * @param {?} on * @return {?} */ function (on) { if (!this._isTorchAvailable.value) { // compatibility not checked yet return; } /** @type {?} */ var tracks = this.getVideoTracks(this.stream); if (on) { this.applyTorchOnTracks(tracks, true); } else { this.applyTorchOnTracks(tracks, false); // @todo check possibility to disable torch without restart this.restart(); } }; /** * Update the torch compatibility state and attachs the stream to the preview element. */ /** * Update the torch compatibility state and attachs the stream to the preview element. * @param {?} stream * @param {?} videoSource * @return {?} */ BrowserMultiFormatContinuousReader.prototype.attachStreamToVideoAndCheckTorch = /** * Update the torch compatibility state and attachs the stream to the preview element. * @param {?} stream * @param {?} videoSource * @return {?} */ function (stream, videoSource) { this.updateTorchCompatibility(stream); return this.attachStreamToVideo(stream, videoSource); }; /** * Checks if the stream supports torch control. * * @param stream The media stream used to check. */ /** * Checks if the stream supports torch control. * * @param {?} stream The media stream used to check. * @return {?} */ BrowserMultiFormatContinuousReader.prototype.updateTorchCompatibility = /** * Checks if the stream supports torch control. * * @param {?} stream The media stream used to check. * @return {?} */ function (stream) { return __awaiter(this, void 0, void 0, function () { var e_1, _a, tracks, tracks_1, tracks_1_1, track, e_1_1; return __generator(this, function (_b) { switch (_b.label) { case 0: tracks = this.getVideoTracks(stream); _b.label = 1; case 1: _b.trys.push([1, 6, 7, 8]); tracks_1 = __values(tracks), tracks_1_1 = tracks_1.next(); _b.label = 2; case 2: if (!!tracks_1_1.done) return [3 /*break*/, 5]; track = tracks_1_1.value; return [4 /*yield*/, this.isTorchCompatible(track)]; case 3: if (_b.sent()) { this._isTorchAvailable.next(true); return [3 /*break*/, 5]; } _b.label = 4; case 4: tracks_1_1 = tracks_1.next(); return [3 /*break*/, 2]; case 5: return [3 /*break*/, 8]; case 6: e_1_1 = _b.sent(); e_1 = { error: e_1_1 }; return [3 /*break*/, 8]; case 7: try { if (tracks_1_1 && !tracks_1_1.done && (_a = tracks_1.return)) _a.call(tracks_1); } finally { if (e_1) throw e_1.error; } return [7 /*endfinally*/]; case 8: return [2 /*return*/]; } }); }); }; /** * * @param stream The video stream where the tracks gonna be extracted from. */ /** * * @param {?} stream The video stream where the tracks gonna be extracted from. * @return {?} */ BrowserMultiFormatContinuousReader.prototype.getVideoTracks = /** * * @param {?} stream The video stream where the tracks gonna be extracted from. * @return {?} */ function (stream) { /** @type {?} */ var tracks = []; try { tracks = stream.getVideoTracks(); } finally { return tracks || []; } }; /** * * @param track The track that will be checked for compatibility. */ /** * * @param {?} track The track that will be checked for compatibility. * @return {?} */ BrowserMultiFormatContinuousReader.prototype.isTorchCompatible = /** * * @param {?} track The track that will be checked for compatibility. * @return {?} */ function (track) { return __awaiter(this, void 0, void 0, function () { var compatible, imageCapture, capabilities; return __generator(this, function (_a) { switch (_a.label) { case 0: compatible = false; _a.label = 1; case 1: _a.trys.push([1, , 3, 4]); imageCapture = new ImageCapture(track); return [4 /*yield*/, imageCapture.getPhotoCapabilities()]; case 2: capabilities = _a.sent(); compatible = !!capabilities['torch'] || ('fillLightMode' in capabilities && capabilities.fillLightMode.length !== 0); return [3 /*break*/, 4]; case 3: return [2 /*return*/, compatible]; case 4: return [2 /*return*/]; } }); }); }; /** * Apply the torch setting in all received tracks. */ /** * Apply the torch setting in all received tracks. * @param {?} tracks * @param {?} state * @return {?} */ BrowserMultiFormatContinuousReader.prototype.applyTorchOnTracks = /** * Apply the torch setting in all received tracks. * @param {?} tracks * @param {?} state * @return {?} */ function (tracks, state) { tracks.forEach(function (track) { return track.applyConstraints({ advanced: [(/** @type {?} */ ({ torch: state, fillLightMode: state ? 'torch' : 'none' }))] }); }); }; /** * Correctly sets a new scanStream value. */ /** * Correctly sets a new scanStream value. * @param {?} scan$ * @return {?} */ BrowserMultiFormatContinuousReader.prototype._setScanStream = /** * Correctly sets a new scanStream value. * @param {?} scan$ * @return {?} */ function (scan$) { // cleans old stream this._cleanScanStream(); // sets new stream this.scanStream = scan$; }; /** * Cleans any old scan stream value. */ /** * Cleans any old scan stream value. * @return {?} */ BrowserMultiFormatContinuousReader.prototype._cleanScanStream = /** * Cleans any old scan stream value. * @return {?} */ function () { if (this.scanStream && !this.scanStream.isStopped) { this.scanStream.complete(); } this.scanStream = null; }; /** * Decodes values in a stream with delays between scans. * * @param scan$ The subject to receive the values. * @param videoElement The video element the decode will be applied. * @param delay The delay between decode results. */ /** * Decodes values in a stream with delays between scans. * * @param {?} scan$ The subject to receive the values. * @param {?} videoElement The video element the decode will be applied. * @param {?} delay The delay between decode results. * @return {?} */ BrowserMultiFormatContinuousReader.prototype.decodeOnSubject = /** * Decodes values in a stream with delays between scans. * * @param {?} scan$ The subject to receive the values. * @param {?} videoElement The video element the decode will be applied. * @param {?} delay The delay between decode results. * @return {?} */ function (scan$, videoElement, delay) { var _this = this; // stops loop if (scan$.isStopped) { return; } /** @type {?} */ var result; try { result = this.decode(videoElement); scan$.next({ result: result }); } catch (error) { // stream cannot stop on fails. if (!error || // scan Failure - found nothing, no error error instanceof NotFoundException || // scan Error - found the QR but got error on decoding error instanceof ChecksumException || error instanceof FormatException) { scan$.next({ error: error }); } else { scan$.error(error); } } finally { /** @type {?} */ var timeout = !result ? 0 : delay; setTimeout(function () { return _this.decodeOnSubject(scan$, videoElement, delay); }, timeout); } }; /** * Restarts the scanner. */ /** * Restarts the scanner. * @return {?} */ BrowserMultiFormatContinuousReader.prototype.restart = /** * Restarts the scanner. * @return {?} */ function () { // reset // start return this.continuousDecodeFromInputVideoDevice(this.deviceId, this.videoElement); }; return BrowserMultiFormatContinuousReader; }(BrowserMultiFormatReader)); /** * @fileoverview added by tsickle * @suppress {checkTypes,extraRequire,missingReturn,uselessCode} checked by tsc */ var ZXingScannerComponent = /** @class */ (function () { /** * Constructor to build the object and do some DI. */ function ZXingScannerComponent() { /** * How the preview element shoud be fit inside the :host container. */ this.previewFitMode = 'cover'; // instance based emitters this.autostarted = new EventEmitter(); this.autostarting = new EventEmitter(); this.torchCompatible = new EventEmitter(); this.scanSuccess = new EventEmitter(); this.scanFailure = new EventEmitter(); this.scanError = new EventEmitter(); this.scanComplete = new EventEmitter(); this.camerasFound = new EventEmitter(); this.camerasNotFound = new EventEmitter(); this.permissionResponse = new EventEmitter(true); this.hasDevices = new EventEmitter(); this.deviceChange = new EventEmitter(); this._device = null; this._enabled = true; this._hints = new Map(); this.autofocusEnabled = true; this.autostart = true; this.formats = [BarcodeFormat.QR_CODE]; // computed data this.hasNavigator = typeof navigator !== 'undefined'; this.isMediaDevicesSuported = this.hasNavigator && !!navigator.mediaDevices; } Object.defineProperty(ZXingScannerComponent.prototype, "codeReader", { /** * Exposes the current code reader, so the user can use it's APIs. */ get: /** * Exposes the current code reader, so the user can use it's APIs. * @return {?} */ function () { return this._codeReader; }, enumerable: true, configurable: true }); Object.defineProperty(ZXingScannerComponent.prototype, "device", { /** * User device acessor. */ get: /** * User device acessor. * @return {?} */ function () { return this._device; }, /** * User device input */ set: /** * User device input * @param {?} device * @return {?} */ function (device) { if (!device && device !== null) { throw new ArgumentException('The `device` must be a valid MediaDeviceInfo or null.'); } if (this.isCurrentDevice(device)) { console.warn('Setting the same device is not allowed.'); return; } if (this.isAutostarting) { // do not allow setting devices during auto-start, since it will set one and emit it. console.warn('Avoid setting a device during auto-start.'); return; } if (!this.hasPermission) { console.warn('Permissions not set yet, waiting for them to be set to apply device change.'); // this.permissionResponse // .pipe( // take(1), // tap(() => console.log(`Permissions set, applying device change${device ? ` (${device.deviceId})` : ''}.`)) // ) // .subscribe(() => this.device = device); // return; } // in order to change the device the codeReader gotta be reseted this._reset(); this._device = device; // if enabled, starts scanning if (this._enabled && device !== null) { this.scanFromDevice(device.deviceId); } }, enumerable: true, configurable: true }); Object.defineProperty(ZXingScannerComponent.prototype, "formats", { /** * Returns all the registered formats. */ get: /** * Returns all the registered formats. * @return {?} */ function () { return this.hints.get(DecodeHintType.POSSIBLE_FORMATS); }, /** * Registers formats the scanner should support. * * @param input BarcodeFormat or case-insensitive string array. */ set: /** * Registers formats the scanner should support. * * @param {?} input BarcodeFormat or case-insensitive string array. * @return {?} */ function (input) { var _this = this; if (typeof input === 'string') { throw new Error('Invalid formats, make sure the [formats] input is a binding.'); } // formats may be set from html template as BarcodeFormat or string array /** @type {?} */ var formats = input.map(function (f) { return _this.getBarcodeFormatOrFail(f); }); /** @type {?} */ var hints = this.hints; // updates the hints hints.set(DecodeHintType.POSSIBLE_FORMATS, formats); this.hints = hints; }, enumerable: true, configurable: true }); Object.defineProperty(ZXingScannerComponent.prototype, "hints", { /** * Returns all the registered hints. */ get: /** * Returns all the registered hints. * @return {?} */ function () { return this._hints; }, /** * Does what it takes to set the hints. */ set: /** * Does what it takes to set the hints. * @param {?} hints * @return {?} */ function (hints) { this._hints = hints; // @note avoid restarting the code reader when possible // new instance with new hints. this.restart(); }, enumerable: true, configurable: true }); Object.defineProperty(ZXingScannerComponent.prototype, "isAutostarting", { /** * */ set: /** * * @param {?} state * @return {?} */ function (state) { this._isAutostarting = state; this.autostarting.next(state); }, enumerable: true, configurable: true }); Object.defineProperty(ZXingScannerComponent.prototype, "isAutstarting", { /** * */ get: /** * * @return {?} */ function () { return this._isAutostarting; }, enumerable: true, configurable: true }); Object.defineProperty(ZXingScannerComponent.prototype, "torch", { /** * Allow start scan or not. */ set: /** * Allow start scan or not. * @param {?} on * @return {?} */ function (on) { this.getCodeReader().setTorch(on); }, enumerable: true, configurable: true }); Object.defineProperty(ZXingScannerComponent.prototype, "enable", { /** * Allow start scan or not. */ set: /** * Allow start scan or not. * @param {?} enabled * @return {?} */ function (enabled) { this._enabled = Boolean(enabled); if (!this._enabled) { this.reset(); } else if (this.device) { this.scanFromDevice(this.device.deviceId); } }, enumerable: true, configurable: true }); Object.defineProperty(ZXingScannerComponent.prototype, "enabled", { /** * Tells if the scanner is enabled or not. */ get: /** * Tells if the scanner is enabled or not. * @return {?} */ function () { return this._enabled; }, enumerable: true, configurable: true }); Object.defineProperty(ZXingScannerComponent.prototype, "tryHarder", { /** * If is `tryHarder` enabled. */ get: /** * If is `tryHarder` enabled. * @return {?} */ function () { return this.hints.get(DecodeHintType.TRY_HARDER); }, /** * Enable/disable tryHarder hint. */ set: /** * Enable/disable tryHarder hint. * @param {?} enable * @return {?} */ function (enable) { /** @type {?} */ var hints = this.hints; if (enable) { hints.set(DecodeHintType.TRY_HARDER, true); } else { hints.delete(DecodeHintType.TRY_HARDER); } this.hints = hints; }, enumerable: true, configurable: true }); /** * Gets and registers all cammeras. */ /** * Gets and registers all cammeras. * @return {?} */ ZXingScannerComponent.prototype.askForPermission = /** * Gets and registers all cammeras. * @return {?} */ function () { return __awaiter(this, void 0, void 0, function () { var stream, permission, err_1; return __generator(this, function (_a) { switch (_a.label) { case 0: if (!this.hasNavigator) { console.error('@zxing/ngx-scanner', 'Can\'t ask permission, navigator is not present.'); this.setPermission(null); return [2 /*return*/, this.hasPermission]; } if (!this.isMediaDevicesSuported) { console.error('@zxing/ngx-scanner', 'Can\'t get user media, this is not supported.'); this.setPermission(null); return [2 /*return*/, this.hasPermission]; } _a.label = 1; case 1: _a.trys.push([1, 3, 4, 5]); return [4 /*yield*/, this.getAnyVideoDevice()]; case 2: // Will try to ask for permission stream = _a.sent(); permission = !!stream; return [3 /*break*/, 5]; case 3: err_1 = _a.sent(); return [2 /*return*/, this.handlePermissionException(err_1)]; case 4: this.terminateStream(stream); return [7 /*endfinally*/]; case 5: this.setPermission(permission); // Returns the permission return [2 /*return*/, permission]; } }); }); }; /** * */ /** * * @return {?} */ ZXingScannerComponent.prototype.getAnyVideoDevice = /** * * @return {?} */ function () { return navigator.mediaDevices.getUserMedia({ video: true }); }; /** * Terminates a stream and it's tracks. */ /** * Terminates a stream and it's tracks. * @param {?} stream * @return {?} */ ZXingScannerComponent.prototype.terminateStream = /** * Terminates a stream and it's tracks. * @param {?} stream * @return {?} */ function (stream) { if (stream) { stream.getTracks().forEach(function (t) { return t.stop(); }); } stream = undefined; }; /** * Initializes the component without starting the scanner. */ /** * Initializes the component without starting the scanner. * @return {?} */ ZXingScannerComponent.prototype.initAutostartOff = /** * Initializes the component without starting the scanner. * @return {?} */ function () { // do not ask for permission when autostart is off this.isAutostarting = null; // just update devices information this.updateVideoInputDevices(); }; /** * Initializes the component and starts the scanner. * Permissions are asked to accomplish that. */ /** * Initializes the component and starts the scanner. * Permissions are asked to accomplish that. * @return {?} */ ZXingScannerComponent.prototype.initAutostartOn = /** * Initializes the component and starts the scanner. * Permissions are asked to accomplish that. * @return {?} */ function () { return __awaiter(this, void 0, void 0, function () { var hasPermission, e_1, devices; return __generator(this, function (_a) { switch (_a.label) { case 0: this.isAutostarting = true; _a.label = 1; case 1: _a.trys.push([1, 3, , 4]); return [4 /*yield*/, this.askForPermission()]; case 2: // Asks for permission before enumerating devices so it can get all the device's info hasPermission = _a.sent(); return [3 /*break*/, 4]; case 3: e_1 = _a.sent(); console.error('Exception occurred while asking for permission:', e_1); return [2 /*return*/]; case 4: if (!hasPermission) return [3 /*break*/, 6]; return [4 /*yield*/, this.updateVideoInputDevices()]; case 5: devices = _a.sent(); this.autostartScanner(__spread(devices)); _a.label = 6; case 6: return [2 /*return*/]; } }); }); }; /** * Checks if the given device is the current defined one. */ /** * Checks if the given device is the current defined one. * @param {?} device * @return {?} */ ZXingScannerComponent.prototype.isCurrentDevice = /** * Checks if the given device is the current defined one. * @param {?} device * @return {?} */ function (device) { return this.device && device && device.deviceId === this.device.deviceId; }; /** * Executed after the view initialization. */ /** * Executed after the view initialization. * @return {?} */ ZXingScannerComponent.prototype.ngAfterViewInit = /** * Executed after the view initialization. * @return {?} */ function () { var _this = this; // makes torch availability information available to user this.getCodeReader().isTorchAvailable.subscribe(function (x) { return _this.torchCompatible.emit(x); }); if (!this.autostart) { console.warn('New feature \'autostart\' disabled, be careful. Permissions and devices recovery has to be run manually.'); // does the necessary configuration without autostarting this.initAutostartOff(); return; } // configurates the component and starts the scanner this.initAutostartOn(); }; /** * Executes some actions before destroy the component. */ /** * Executes some actions before destroy the component. * @return {?} */ ZXingScannerComponent.prototype.ngOnDestroy = /** * Executes some actions before destroy the component. * @return {?} */ function () { this.reset(); }; /** * Stops old `codeReader` and starts scanning in a new one. */ /** * Stops old `codeReader` and starts scanning in a new one. * @return {?} */ ZXingScannerComponent.prototype.restart = /** * Stops old `codeReader` and starts scanning in a new one. * @return {?} */ function () { /** @type {?} */ var prevDevice = this._reset(); if (!prevDevice) { return; } // @note apenas necessario por enquanto causa da Torch this._codeReader = undefined; this.device = prevDevice; }; /** * Discovers and updates known video input devices. */ /** * Discovers and updates known video input devices. * @return {?} */ ZXingScannerComponent.prototype.updateVideoInputDevices = /** * Discovers and updates known video input devices. * @return {?} */ function () { return __awaiter(this, void 0, void 0, function () { var devices, hasDevices; return __generator(this, function (_a) { switch (_a.label) { case 0: // permissions aren't needed to get devices, but to access them and their info return [4 /*yield*/, this.getCodeReader().listVideoInputDevices()]; case 1: devices = (_a.sent()) || []; hasDevices = devices && devices.length > 0; // stores discovered devices and updates information this.hasDevices.next(hasDevices); this.camerasFound.next(__spread(devices)); if (!hasDevices) { this.camerasNotFound.next(); } return [2 /*return*/, devices]; } }); }); }; /** * Starts the scanner with the back camera otherwise take the last * available device. */ /** * Starts the scanner with the back camera otherwise take the last * available device. * @param {?} devices * @return {?} */ ZXingScannerComponent.prototype.autostartScanner = /** * Starts the scanner with the back camera otherwise take the last * available device. * @param {?} devices * @return {?} */ function (devices) { /** @type {?} */ var matcher = function (_a) { var label = _a.label; return /back|trás|rear|traseira|environment|ambiente/gi.test(label); }; // select the rear camera by default, otherwise take the last camera. /** @type {?} */ var device = devices.find(matcher) || devices.pop(); if (!device) { throw new Error('Impossible to autostart, no input devices available.'); } this.device = device; // @note when listening to this change, callback code will sometimes run before the previous line. this.deviceChange.emit(device); this.isAutostarting = false; this.autostarted.next(); }; /** * Dispatches the scan success event. * * @param result the scan result. */ /** * Dispatches the scan success event. * * @param {?} result the scan result. * @return {?} */ ZXingScannerComponent.prototype.dispatchScanSuccess = /** * Dispatches the scan success event. * * @param {?} result the scan result. * @return {?} */ function (result) { this.scanSuccess.next(result.getText()); }; /** * Dispatches the scan failure event. */ /** * Dispatches the scan failure event. * @param {?=} reason * @return {?} */ ZXingScannerComponent.prototype.dispatchScanFailure = /** * Dispatches the scan failure event. * @param {?=} reason * @return {?} */ function (reason) { this.scanFailure.next(reason); }; /** * Dispatches the scan error event. * * @param error the error thing. */ /** * Dispatches the scan error event. * * @param {?} error the error thing. * @return {?} */ ZXingScannerComponent.prototype.dispatchScanError = /** * Dispatches the scan error event. * * @param {?} error the error thing. * @return {?} */ function (error) { this.scanError.next(error); }; /** * Dispatches the scan event. * * @param result the scan result. */ /** * Dispatches the scan event. * * @param {?} result the scan result. * @return {?} */ ZXingScannerComponent.prototype.dispatchScanComplete = /** * Dispatches the scan event. * * @param {?} result the scan result. * @return {?} */ function (result) { this.scanComplete.next(result); }; /** * Returns the filtered permission. */ /** * Returns the filtered permission. * @param {?} err * @return {?} */ ZXingScannerComponent.prototype.handlePermissionException = /** * Returns the filtered permission. * @param {?} err * @return {?} */ function (err) { // failed to grant permission to video input console.error('@zxing/ngx-scanner', 'Error when asking for permission.', err); /** @type {?} */ var permission; switch (err.name) { // usually caused by not secure origins case 'NotSupportedError': console.warn('@zxing/ngx-scanner', err.message); // could not claim permission = null; // can't check devices this.hasDevices.next(null); break; // user denied permission case 'NotAllowedError': console.warn('@zxing/ngx-scanner', err.message); // claimed and denied permission permission = false; // this means that input devices exists this.hasDevices.next(true); break; // the device has no attached input devices case 'NotFoundError': console.warn('@zxing/ngx-scanner', err.message); // no permissions claimed permission = null; // because there was no devices this.hasDevices.next(false); // tells the listener about the error this.camerasNotFound.next(err); break; case 'NotReadableError': console.warn('@zxing/ngx-scanner', 'Couldn\'t read the device(s)\'s stream, it\'s probably in use by another app.'); // no permissions claimed permission = null; // there are devices, which I couldn't use this.hasDevices.next(false); // tells the listener about the error this.camerasNotFound.next(err); break; default: console.warn('@zxing/ngx-scanner', 'I was not able to define if I have permissions for camera or not.', err); // unknown permission = null; // this.hasDevices.next(undefined; break; } this.setPermission(permission); // tells the listener about the error this.permissionResponse.error(err); return permission; }; /** * Returns a valid BarcodeFormat or fails. */ /** * Returns a valid BarcodeFormat or fails. * @param {?} format * @return {?} */ ZXingScannerComponent.prototype.getBarcodeFormatOrFail = /** * Returns a valid BarcodeFormat or fails. * @param {?} format * @return {?} */ function (format) { return typeof format === 'string' ? BarcodeFormat[format.trim().toUpperCase()] : format; }; /** * Retorna um code reader, cria um se nenhume existe. */ /** * Retorna um code reader, cria um se nenhume existe. * @return {?} */ ZXingScannerComponent.prototype.getCodeReader = /** * Retorna um code reader, cria um se nenhume existe. * @return {?} */ function () { if (!this._codeReader) { this._codeReader = new BrowserMultiFormatContinuousReader(this.hints); } return this._codeReader; }; /** * Starts the continuous scanning for the given device. * * @param deviceId The deviceId from the device. */ /** * Starts the continuous scanning for the given device. * * @param {?} deviceId The deviceId from the device. * @return {?} */ ZXingScannerComponent.prototype.scanFromDevice = /** * Starts the continuous scanning for the given device. * * @param {?} deviceId The deviceId from the device. * @return {?} */ function (deviceId) { var _this = this; /** @type {?} */ var videoElement = this.previewElemRef.nativeElement; /** @type {?} */ var codeReader = this.getCodeReader(); /** @type {?} */ var decodingStream = codeReader.continuousDecodeFromInputVideoDevice(deviceId, videoElement); if (!decodingStream) { throw new Error('Undefined decoding stream, aborting.'); } /** @type {?} */ var next = function (x) { return _this._onDecodeResult(x.result, x.error); }; /** @type {?} */ var error = function (err) { return _this._onDecodeError(err); }; /** @type {?} */ var complete = function () { _this.reset(); console.log('completed'); }; decodingStream.subscribe(next, error, complete); }; /** * Handles decode errors. */ /** * Handles decode errors. * @param {?} err * @return {?} */ ZXingScannerComponent.prototype._onDecodeError = /** * Handles decode errors. * @param {?} err * @return {?} */ function (err) { this.dispatchScanError(err); this.reset(); }; /** * Handles decode results. */ /** * Handles decode results. * @param {?} result * @param {?} error * @return {?} */ ZXingScannerComponent.prototype._onDecodeResult = /** * Handles decode results. * @param {?} result * @param {?} error * @return {?} */ function (result, error) { if (result) { this.dispatchScanSuccess(result); } else { this.dispatchScanFailure(error); } this.dispatchScanComplete(result); }; /** * Stops the code reader and returns the previous selected device. */ /** * Stops the code reader and returns the previous selected device. * @return {?} */ ZXingScannerComponent.prototype._reset = /** * Stops the code reader and returns the previous selected device. * @return {?} */ function () { if (!this._codeReader) { return; } /** @type {?} */ var device = this.device; // do not set this.device inside this method, it would create a recursive loop this._device = null; this._codeReader.reset(); return device; }; /** * Resets the scanner and emits device change. */ /** * Resets the scanner and emits device change. * @return {?} */ ZXingScannerComponent.prototype.reset = /** * Resets the scanner and emits device change. * @return {?} */ function () { this._reset(); this.deviceChange.emit(null); }; /** * Sets the permission value and emmits the event. */ /** * Sets the permission value and emmits the event. * @param {?} hasPermission * @return {?} */ ZXingScannerComponent.prototype.setPermission = /** * Sets the permission value and emmits the event. * @param {?} hasPermission * @return {?} */ function (hasPermission) { this.hasPermission = hasPermission; this.permissionResponse.next(hasPermission); }; ZXingScannerComponent.decorators = [ { type: Component, args: [{ selector: 'zxing-scanner', template: "<video #preview [style.object-fit]=\"previewFitMode\">\r\n <p>\r\n Your browser does not support this feature, please try to upgrade it.\r\n </p>\r\n <p>\r\n Seu navegador n\u00E3o suporta este recurso, por favor tente atualiz\u00E1-lo.\r\n </p>\r\n</video>\r\n", changeDetection: ChangeDetectionStrategy.OnPush, styles: [":host{display:block}video{width:100%;height:auto;-o-object-fit:contain;object-fit:contain}"] }] } ]; /** @nocollapse */ ZXingScannerComponent.ctorParameters = function () { return []; }; ZXingScannerComponent.propDecorators = { previewElemRef: [{ type: ViewChild, args: ['preview',] }], autofocusEnabled: [{ type: Input }], autostarted: [{ type: Output }], autostarting: [{ type: Output }], autostart: [{ type: Input }], previewFitMode: [{ type: Input }], torchCompatible: [{ type: Output }], scanSuccess: [{ type: Output }], scanFailure: [{ type: Output }], scanError: [{ type: Output }], scanComplete: [{ type: Output }], camerasFound: [{ type: Output }], camerasNotFound: [{ type: Output }], permissionResponse: [{ type: Output }], hasDevices: [{ type: Output }], device: [{ type: Input }], deviceChange: [{ type: Output }], formats: [{ type: Input }], torch: [{ type: Input }], enable: [{ type: Input }], tryHarder: [{ type: Input }] }; return ZXingScannerComponent; }()); /** * @fileoverview added by tsickle * @suppress {checkTypes,extraRequire,missingReturn,uselessCode} checked by tsc */ var ZXingScannerModule = /** @class */ (function () { function ZXingScannerModule() { } ZXingScannerModule.decorators = [ { type: NgModule, args: [{ imports: [ CommonModule, FormsModule ], declarations: [ZXingScannerComponent], exports: [ZXingScannerComponent], },] } ]; return ZXingScannerModule; }()); /** * @fileoverview added by tsickle * @suppress {checkTypes,extraRequire,missingReturn,uselessCode} checked by tsc */ /** * @fileoverview added by tsickle * @suppress {checkTypes,extraRequire,missingReturn,uselessCode} checked by tsc */ export { ZXingScannerModule, ZXingScannerComponent }; //# sourceMappingURL=zxing-ngx-scanner.js.map
TERMUX_PKG_HOMEPAGE=https://rgbds.gbdev.io TERMUX_PKG_DESCRIPTION="Rednex Game Boy Development System - An assembly toolchain for the Nintendo Game Boy & Game Boy Color" TERMUX_PKG_LICENSE="MIT" TERMUX_PKG_VERSION=0.4.2 TERMUX_PKG_SRCURL=https://github.com/gbdev/rgbds/releases/download/v${TERMUX_PKG_VERSION}/rgbds-${TERMUX_PKG_VERSION}.tar.gz TERMUX_PKG_SHA256=0bac46f0d3cfabf8683c62145b9f01a607c703295ef2978a3e548f106f50fac1 TERMUX_PKG_DEPENDS="libpng"
#!/bin/bash # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # set -e # set -x function fail { >&2 echo "\033[31m FATAL ERROR ----------- $1 \033[0m" exit 1 } function getJavaVersionFromPom { cat << EOF | xmllint --noent --shell pom.xml | grep content | cut -f2 -d= setns pom=http://maven.apache.org/POM/4.0.0 xpath /pom:project/pom:properties/pom:maven.compiler.source/text() EOF } function getProjectVersionFromPom { cat << EOF | xmllint --noent --shell pom.xml | grep content | cut -f2 -d= setns pom=http://maven.apache.org/POM/4.0.0 xpath /pom:project/pom:version/text() EOF } function getJdkToolchain { xmllint ~/.m2/toolchains.xml --xpath "/toolchains/toolchain[provides/version/text() = '$JAVA_VERSION']/configuration/jdkHome/text()" } function generate_promotion_script { echo "Generating release promotion script 'promote-$version.sh'" read -d '' script <<- EOF #!/bin/bash echo "Promoting release $version Actions about to be performed: ------------------------------ \$(cat \$0 | tail -n +14) ------------------------------------------" read -p "Press enter to continue or CTRL-C to abort" # push the release tag to ASF git repo git push origin $tag # promote the source distribution by moving it from the staging area to the release area svn mv https://dist.apache.org/repos/dist/dev/wicket/$version https://dist.apache.org/repos/dist/release/wicket -m "Upload release to the mirrors" mvn org.sonatype.plugins:nexus-staging-maven-plugin:1.6.7:rc-release -DstagingRepositoryId=$stagingrepoid -DnexusUrl=https://repository.apache.org -DserverId=apache.releases.https -Ddescription="Release vote has passed" # Renumber the next development iteration $next_version: git checkout $GIT_BRANCH mvn release:update-versions --batch-mode mvn versions:set versions:commit -DnewVersion=$next_version git add --all echo " Check the new versions and commit and push them to origin: git commit -m \"Start next development version\" git push Remove the previous version of Wicket using this command: svn rm https://dist.apache.org/repos/dist/release/wicket/$previous_version -m \\\"Remove previous version from mirrors\\\" " EOF echo "$script" > promote-$version.sh chmod +x promote-$version.sh git add promote-$version.sh } function generate_rollback_script { echo "Generating release rollback script 'revert-$version.sh'" read -d '' script <<- EOF #!/bin/bash echo -n "Reverting release $version Actions about to be performed: ------------------------------ \$(cat \$0 | tail -n +14) ------------------------------------------ Press enter to continue or CTRL-C to abort" read # clean up local repository git checkout $GIT_BRANCH git branch -D $branch git tag -d $tag # clean up staging repository git push staging --delete refs/heads/$branch git push staging --delete $tag # clean up staging dist area svn rm https://dist.apache.org/repos/dist/dev/wicket/$version -m "Release vote has failed" # clean up staging maven repository mvn org.sonatype.plugins:nexus-staging-maven-plugin:LATEST:rc-drop -DstagingRepositoryId=$stagingrepoid -DnexusUrl=https://repository.apache.org -DserverId=apache.releases.https -Ddescription="Release vote has failed" # clean up remaining release files find . -name "*.releaseBackup" -exec rm {} \\; [ -f release.properties ] && rm release.properties EOF echo "$script" > revert-$version.sh chmod +x revert-$version.sh git add revert-$version.sh } function generate_signatures_from_release { echo "======================================================================== The signatures for the source release artefacts: " > /tmp/release-$version-sigs.txt pushd target/dist > /dev/null for i in apache-wicket*{zip,tar.gz} do echo "Signature for $i: $(cat $i.asc) " >> /tmp/release-$version-sigs.txt done popd > /dev/null echo "======================================================================== CHANGELOG for $version: " >> /tmp/release-$version-sigs.txt if [ -f "/tmp/release-notes-$version.txt" ] ; then tail -n +4 /tmp/release-notes-$version.txt >> /tmp/release-$version-sigs.txt else awk "/Release Notes - Wicket - Version $version/{flag=1;next} /==================/{flag=0} flag { print }" CHANGELOG-$major_version.x >> /tmp/release-$version-sigs.txt fi } function generate_release_vote_email { echo "Generating Vote email" echo " This is a vote to release Apache Wicket $version Please download the source distributions found in our staging area linked below. I have included the signatures for both the source archives. This vote lasts for 72 hours minimum. [ ] Yes, release Apache Wicket $version [ ] No, don't release Apache Wicket $version, because ... Distributions, changelog, keys and signatures can be found at: https://dist.apache.org/repos/dist/dev/wicket/$version Staging repository: https://repository.apache.org/content/repositories/$stagingrepoid/ The binaries are available in the above link, as are a staging repository for Maven. Typically the vote is on the source, but should you find a problem with one of the binaries, please let me know, I can re-roll them some way or the other. Staging git repository data: Repository: $(git config --get remote.staging.url) Branch: $branch Release tag: $tag " | tail -n+2 > release-vote.txt cat /tmp/release-$version-sigs.txt >> release-vote.txt git add release-vote.txt } function generate_announce_email { echo " The Apache Wicket PMC is proud to announce Apache Wicket $version! Apache Wicket is an open source Java component oriented web application framework that powers thousands of web applications and web sites for governments, stores, universities, cities, banks, email providers, and more. You can find more about Apache Wicket at https://wicket.apache.org This release marks another minor release of Wicket $major_version. We use semantic versioning for the development of Wicket, and as such no API breaks are present breaks are present in this release compared to $major_version.0.0. <OPTIONAL> New and noteworthy <OPTIONAL> ------------------ <OPTIONAL> Using this release ------------------ With Apache Maven update your dependency to (and don't forget to update any other dependencies on Wicket projects to the same version): <dependency> <groupId>org.apache.wicket</groupId> <artifactId>wicket-core</artifactId> <version>$version</version> </dependency> Or download and build the distribution yourself, or use our convenience binary package you can find here: * Download: http://wicket.apache.org/start/wicket-$major_version.x.html#manually Upgrading from earlier versions ------------------------------- If you upgrade from $major_version.y.z this release is a drop in replacement. If you come from a version prior to $major_version.0.0, please read our Wicket $major_version migration guide found at * http://s.apache.org/wicket${major_version}migrate Have fun! — The Wicket team " | tail -n+2 > release-announce.txt cat /tmp/release-$version-sigs.txt >> release-announce.txt git add release-announce.txt } function generate_announce_md { echo $'---\nlayout: post\ntitle: Apache Wicket' $version $'released\n---' > wicket-$version-released.md sed -e "s/$optionOpenTag/\{\% highlight xml\%\}\n$optionOpenTag/g" release-announce.txt | sed -e "s/$optionCloseTag/$optionCloseTag\n\{\% endhighlight\%\}/g" | sed -e s/' \*'/' \*'/g | sed -e "s/ CHANGELOG for $version/### This Release\n\n#### CHANGELOG for $version/g" | sed -e s/'\*\*'/'#####'/g | sed -e "s/ $beginPgp/<div class='highlight'><pre>\n$beginPgp/g" | sed -e "s/$endPgp/$endPgp\n<\/pre><\/div>\n/g" | sed -e "s/Source: http:\/\/www.apache.org\/dyn\/closer.cgi\/wicket\/$version/Source: [$version source download]\(http:\/\/www.apache.org\/dyn\/closer.cgi\/wicket\/$version\)/g" | sed -e "s/Binary: http:\/\/www.apache.org\/dyn\/closer.cgi\/wicket\/$version\/binaries/Binary: [$version binary download]\(http:\/\/www.apache.org\/dyn\/closer.cgi\/wicket\/$version\/binaries\)/g" | sed -e "s/Upgrading from earlier versions/<!--more-->\n\nUpgrading from earlier versions/g" >> wicket-$version-released.md git add wicket-$version-released.md } # the branch on which the code base lives for this version (master is # always current development version) GIT_BRANCH=master JAVA_VERSION=$(getJavaVersionFromPom) echo " Apache Wicket Release script ---------------------------- Building a release for Apache Wicket. This script assumes you are running on OS X, it hasn't been tested on any other operating systems, and you can bet it won't work on Windows... REQUIREMENTS: - A Java version $JAVA_VERSION configured through the Maven toolchain - Maven 3.3.0 (older releases are b0rked, just don't bother) - gpg, gpg-agent and pinentry for signing " if [ ! $( git config --get remote.staging.url ) ] ; then fail " No staging remote git repository found. The staging repository is used to temporarily publish the build artifacts during the voting process. Since no staging repository is available at Apache, it is best to use a git mirror on your personal github account. First fork the github Apache Wicket mirror (https://github.com/apache/wicket) and then add the remote staging repository with the following command: $ git remote add staging git@github.com:<your github username>/wicket.git $ git fetch staging $ git push staging This will bring the staging area in sync with the origin and the release script can push the build branch and the tag to the staging area. " fi if [ ! -f ~/.m2/toolchains.xml ] ; then fail " Maven will load the Java $JAVA_VERSION environment from the toolchain specified in ~/.m2/toolchains.xml You don't have a toolchains.xml file in your .m2 folder. Please specify your JDK's in the toolchains.xml file. " fi grep -q "<version>$JAVA_VERSION</version>" ~/.m2/toolchains.xml if [ $? -ne 0 ] ; then fail " Your ~/.m2/toolchains.xml file doesn't provide a Java $JAVA_VERSION toolchain. " fi echo "Java version for running Maven is: $(java -version 2>&1 | tail -n 2 | head -n 1) Java used to compile (from toolchain) is: $(getJdkToolchain) " agentcount=`ps aux|grep gpg-agent|wc -l` current_version=$(getProjectVersionFromPom) major_version=$(expr $current_version : '\(.*\)\..*\..*\-.*') minor_version=$(expr $current_version : '.*\.\(.*\)\..*\-.*') bugfix_version=$(expr $current_version : '.*\..*\.\(.*\)-.*') version="$major_version.$minor_version.0" optionOpenTag='<dependency>' optionCloseTag='<\/dependency>' beginPgp='-----BEGIN PGP SIGNATURE-----' endPgp='-----END PGP SIGNATURE-----' default_version="$version" version= while [[ ! $version =~ ^[0-9]+\.[0-9]+\.[0-9]+(-M[0-9]+)?$ ]] do read -p "Version to release (default is $default_version): " -e t1 if [ -n "$t1" ] then version="$t1" else version="$default_version" fi done # recalculate the version coordinates for the current release major_version=$(expr $version : '\(.*\)\..*\..*') minor_version=$(expr $version : '.*\.\(.*\)\..*') bugfix_version=$(expr $version : '.*\..*\.\(.*\)') if [[ $version =~ .+-M[0-9]+ ]] then milestone_version=$(expr $version : '.*\..*-M\(.*\)') fi if [ ! -z "$milestone_version" ] ; then next_version="$major_version.0.0-M$(expr $milestone_version + 1)-SNAPSHOT" previous_version="$major_version.0.0-M$milestone_version-SNAPSHOT" else next_version="$major_version.$(expr $minor_version + 1).0-SNAPSHOT" previous_minor_version=$(expr $minor_version - 1) if [ $previous_minor_version -lt 0 ] ; then previous_version="$major_version.0.0-SNAPSHOT" else previous_version="$major_version.$(expr $minor_version - 1).0" fi fi # work around for versions upgrade (TODO maybe no longer necessary?) mvn_version_to_replace="$major_version.$minor_version.1-SNAPSHOT" mvn_version_to_replace2="$major_version.$minor_version.0-SNAPSHOT" # Check if the changelog has the issues this release grep -q "$version\$" CHANGELOG-$major_version.x if [ $? -ne 0 ] ; then fail "You have forgotten to add the closed tickets for Wicket $version to the CHANGELOG-$major_version.x file Use build-changelog.sh to add the release notes to the changelog. " fi git status --porcelain CHANGELOG-$major_version.x | grep -q "M" if [ $? -eq 0 ] ; then fail "You have changes in your workspace that have not been committed. $(git status) " fi echo "Cleaning up any release artifacts that might linger" mvn -q release:clean log=$(pwd)/release.out if [ -f $log ] ; then rm $log fi branch="build/wicket-$version" tag="rel/wicket-$version" echo "# Release configuration for Wicket-$version scm.tag=${tag} " > release.properties ./build-versions.py $version $next_version >> release.properties echo "Contents of the release properties generated for Maven: ------------------------------------------------------------------------------- $(cat ./release.properties) ------------------------------------------------------------------------------- Writing detailed log to $log This script will release version: Apache Wicket $version and continue development with $next_version Press enter to continue or CTRL-C to abort \c" read echo "Ensuring we are starting from wicket-$major_version.x" # otherwise we can't remove a previous release branch that failed git checkout $GIT_BRANCH echo "Removing previous release tag $tag (if exists)" oldtag=`git tag -l |grep -e "$tag"|wc -l` >> release.out [ "$oldtag" -ne 0 ] && git tag -d $tag >> release.out echo "Removing previous build branch $branch (if exists)" oldbranch=`git branch |grep -e "$branch"|wc -l` >> release.out [ "$oldbranch" -ne 0 ] && git branch -D $branch >> release.out echo "Removing previous staging branch (if exists)" git push staging --delete refs/heads/$branch >> release.out git push staging --delete $tag >> release.out echo "Creating release branch" git checkout -b $branch >> release.out # Clear the current NOTICE.txt file echo "Creating notice file." NOTICE=NOTICE echo "Apache Wicket Copyright 2006-$(date +%Y) The Apache Software Foundation This product includes software developed at The Apache Software Foundation (http://www.apache.org/). This is an aggregated NOTICE file for the Apache Wicket projects included in this distribution. NB: DO NOT ADD LICENSES/NOTICES/ATTRIBUTIONS TO THIS FILE, BUT IN THE NOTICE FILE OF THE CORRESPONDING PROJECT. THE RELEASE PROCEDURE WILL AUTOMATICALLY INCLUDE THE NOTICE IN THIS FILE. " > $NOTICE # next concatenate all NOTICE files from sub projects to the root file for i in `find . -name "NOTICE" -not -regex ".*/target/.*" -not -regex "./NOTICE"` do echo "---------------------------------------------------------------------------" >> $NOTICE echo "src/"$i | sed -e "s/\/src.*//g" >> $NOTICE echo "---------------------------------------------------------------------------" >> $NOTICE cat $i >> $NOTICE echo >> $NOTICE done echo "Committing changes" git commit -am "Changes to notice files" # clean all projects echo "Clean all projects" mvn -q clean -Pall # package and assemble the release echo "Prepare the release" mvn --batch-mode release:prepare -X -l $log -DpreparationGoals="clean" -Dtag=$tag -Papache-release,release if [ $? -ne 0 ] ; then fail "ERROR: mvn release:prepare was not successful" fi echo "Performing the release using Maven" mvn -Dgpg.passphrase="$passphrase" -ff -l $log release:perform -DlocalCheckout=true -Dtag=$tag -Papache-release,release if [ $? -ne 0 ] ; then fail "ERROR: mvn release:perform was not successful" fi # Determine the staging repository and close it after deploying the release to the staging area stagingrepoid=$(mvn org.sonatype.plugins:nexus-staging-maven-plugin:LATEST:rc-list -DnexusUrl=https://repository.apache.org -DserverId=apache.releases.https | grep -v "CLOSED" | grep -Eo "(orgapachewicket-\d+)";) echo "Closing staging repository with id $stagingrepoid" mvn org.sonatype.plugins:nexus-staging-maven-plugin:LATEST:rc-close -DstagingRepositoryId=$stagingrepoid -DnexusUrl=https://repository.apache.org -DserverId=apache.releases.https -Ddescription="Release has been built, awaiting vote" generate_promotion_script generate_rollback_script echo "Create and sign the source tarballs" mkdir -p target/dist/binaries git archive --format=tar.gz --prefix=apache-wicket-$version/ -o target/dist/apache-wicket-$version.tar.gz $tag git archive --format=zip --prefix=apache-wicket-$version/ -o target/dist/apache-wicket-$version.zip $tag gpg --armor --detach-sign --use-agent --sign target/dist/apache-wicket-$version.tar.gz gpg --armor --detach-sign --use-agent --sign target/dist/apache-wicket-$version.zip pushd target/dist sha256sum apache-wicket-$version.tar.gz > apache-wicket-$version.tar.gz.sha256 sha256sum apache-wicket-$version.zip > apache-wicket-$version.zip.sha256 popd echo "Create and sign the binaries" mkdir target/apache-wicket-$version-bin pushd target/apache-wicket-$version-bin find ../checkout ! \( -type d -name "WEB-INF" -prune \) -regex ".*wicket-[^/]*.[j]ar" ! -name "*-sources*" ! -name "*-javadoc*" ! -name "*wicket-archetype-quickstart*" ! -name "wicket-common-tests*" -type f -exec cp {} . \; find ../checkout ! \( -type d -name "WEB-INF" -prune \) -regex ".*wicket-[^/]*.[j]ar\.asc" ! -name "*-sources*" ! -name "*-javadoc*" ! -name "*wicket-archetype-quickstart*" ! -name "wicket-common-tests*" -type f -exec cp {} . \; cp ../../LICENSE . cp ../../README . cp ../../NOTICE . cp ../../CHANGELOG* . popd pushd target tar cfz dist/binaries/apache-wicket-$version-bin.tar.gz apache-wicket-$version-bin zip -r dist/binaries/apache-wicket-$version-bin.zip apache-wicket-$version-bin gpg --armor --detach-sign --use-agent --sign dist/binaries/apache-wicket-$version-bin.tar.gz gpg --armor --detach-sign --use-agent --sign dist/binaries/apache-wicket-$version-bin.zip pushd dist/binaries sha256sum apache-wicket-$version-bin.tar.gz > apache-wicket-$version-bin.tar.gz.sha256 sha256sum apache-wicket-$version-bin.zip > apache-wicket-$version-bin.zip.sha256 popd popd echo "Uploading release to dist.apache.org" pushd target/dist svn mkdir https://dist.apache.org/repos/dist/dev/wicket/$version -m "Create $version release staging area" svn co --force --depth=empty https://dist.apache.org/repos/dist/dev/wicket/$version . cp ../../CHANGELOG* . svn add * svn commit -m "Upload wicket-$version to staging area" popd generate_signatures_from_release generate_release_vote_email generate_announce_email generate_announce_md # Done with the tasks, now print out the next things the release manager # needs to do pushd target/dist find . -name "*.asc" -exec gpg --verify {} \; popd echo " The release has been created. It is up to you to check if the release is up to par, and perform the following commands yourself when you start the vote to enable future development during the vote and after. A vote email has been generated in release-vote.txt, you can copy/paste it using: cat release-vote.txt | pbcopy You can find the distribution in target/dist. Failed release -------------- To rollback a release due to a failed vote or some other complication use: $ ./revert-$version.sh This will clean up the artfifacts from the staging areas, including Nexus, dist.apache.org and the release branch and tag. Successful release ------------------ Congratulations on the successful release vote! Use the release-announce.txt as a starter for the release announcement: cat release-announce.txt | pbcopy A Markdown file called wicket-$version-released.md has been also generated. You can use it to update the site with the release announcement. To promote the release after a successful vote, run: $ ./promote-$version.sh This will promote the Nexus staging repository to Maven Central, and move the release artifacts from the staging area to dist.apache.org. It will also sign the release tag and push the release branch to git.apache.org You can read this message at a later time using: $ cat release.txt Happy voting! " > release.txt git add release.txt echo "Adding post-release scripts and vote/release email templates to build branch" git commit -m "Added post-release scripts and vote/release email templates" echo "Signing the release tag" git checkout $tag git tag --sign --force --message \"Signed release tag for Apache Wicket $version\" $tag >> $log git checkout $branch echo "Pushing build artifacts to the staging repository" git push staging $branch:refs/heads/$branch echo "Pushing release tag to the staging repository" git push staging $tag cat release.txt
#!/usr/bin/env bash echo key=$KEY echo $PWD if [ -n "$IROHA_POSTGRES_HOST" ]; then echo "NOTE: IROHA_POSTGRES_HOST should match 'host' option in config file" PG_PORT=${IROHA_POSTGRES_PORT:-5432} /wait-for-it.sh -h $IROHA_POSTGRES_HOST -p $PG_PORT -t 30 -- true else echo "WARNING: IROHA_POSTGRES_HOST is not defined. Do not wait for Postgres to become ready. Iroha may fail to start up" fi irohad --genesis_block genesis.block --config config.docker --keypair_name $KEY --overwrite-ledger
#!/bin/bash ## "nodes" file should not include the last receiving node ### NOTE: after setting the vlan, need to use vlan ip for hostname in "nodes" ib_write_bw=/proj/rsc-PG0/yiwen/frdma/perftest-4.2/ib_write_bw ib_write_lat=/proj/rsc-PG0/yiwen/frdma/perftest-4.2/ib_write_lat out_dir=/tmp ip_dst=10.10.1.10 receiver=10.10.1.10 bw_size=1000000 lat_size=16 bw_iters=8000 lat_iter=1000000 port_base=8887 ib_dev=mlx5_1 gidx=5 cnt=1 num_senders=$(wc -l < nodes) #echo $num_senders if [ "$#" -ne 1 ]; then echo "Usage: bash $0.sh <sender/receiver>" exit fi #./ib_write_bw -F -e -d mlx5_1 -x 3 -s 1000000 -n 10000 -l 1 -t 1 -p 8888 192.168.1.2 #./ib_write_lat -F -d mlx5_1 -x 3 -s 16 -n 1000000 -l 1 -t 1 -p 9999 192.168.1.2 if [ $1 == "sender" ]; then for node in $(cat nodes); do let port="$port_base + $cnt" if [[ $cnt -eq $num_senders ]]; then sleep 1 output="$out_dir/lat_result_dcqcn_$node.txt" cmd="$ib_write_lat -F -d $ib_dev -x $gidx -s $lat_size -n $lat_iter -l 1 -t 1 -p $port -S 3 $ip_dst |tee $output" else sleep 1 output="$out_dir/bw_result_dcqcn_$node.txt" log="$out_dir/bw_log_dcqcn_$node.txt" cmd="$ib_write_bw -F -e -d $ib_dev -x $gidx -s $bw_size -n $bw_iters -l 1 -t 1 -p $port -S 3 $ip_dst -L $log |tee $output" fi echo "On $node: execute $cmd" ssh -o "StrictHostKeyChecking no" -p 22 $node $cmd & let cnt="$cnt + 1" #((++cnt)) done wait echo "DONE" elif [ $1 == "receiver" ]; then for node in $(cat nodes); do let port="$port_base + $cnt" if [[ $cnt -eq $num_senders ]]; then cmd="$ib_write_lat -F -d $ib_dev -x $gidx -s $lat_size -n $lat_iter -l 1 -t 1 -p $port -S 3 &> /dev/null" else cmd="$ib_write_bw -F -e -d $ib_dev -x $gidx -s $bw_size -n $bw_iters -l 1 -t 1 -p $port -S 3 &> /dev/null" fi echo "On $receiver: execute $cmd" ssh -o "StrictHostKeyChecking no" -p 22 $receiver $cmd & let cnt="$cnt + 1" done echo "DONE" else echo "Usage: bash $0.sh <sender/receiver>" exit fi
using System; public class TransactionRecord { /// <summary> /// 日期 /// </summary> public DateTime RecordDate { get; set; } /// <summary> /// 类别名称 /// </summary> public string CategoryName { get; set; } /// <summary> /// Returns a formatted string representation of the transaction record. /// </summary> /// <returns>Formatted string in the format "Date: [RecordDate], Category: [CategoryName]".</returns> public string GetFormattedRecord() { return $"Date: {RecordDate}, Category: {CategoryName}"; } }
// Copyright © 2019 The Things Network Foundation, The Things Industries B.V. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package grpc_test import ( "context" "time" "github.com/grpc-ecosystem/grpc-gateway/runtime" "go.thethings.network/lorawan-stack/v3/pkg/component" "go.thethings.network/lorawan-stack/v3/pkg/ttnpb" "google.golang.org/grpc" ) var testRights = []ttnpb.Right{ ttnpb.Right_RIGHT_APPLICATION_INFO, ttnpb.Right_RIGHT_APPLICATION_DEVICES_READ, ttnpb.Right_RIGHT_APPLICATION_DEVICES_WRITE, ttnpb.Right_RIGHT_APPLICATION_TRAFFIC_READ, ttnpb.Right_RIGHT_APPLICATION_TRAFFIC_DOWN_WRITE, ttnpb.Right_RIGHT_APPLICATION_TRAFFIC_UP_WRITE, } type mockRegisterer struct { context.Context ttnpb.AppAsServer } func (m *mockRegisterer) Roles() []ttnpb.ClusterRole { return nil } func (m *mockRegisterer) RegisterServices(s *grpc.Server) { ttnpb.RegisterAppAsServer(s, m.AppAsServer) } func (m *mockRegisterer) RegisterHandlers(s *runtime.ServeMux, conn *grpc.ClientConn) { ttnpb.RegisterAppAsHandler(m.Context, s, conn) } func mustHavePeer(ctx context.Context, c *component.Component, role ttnpb.ClusterRole) { for i := 0; i < 20; i++ { time.Sleep(20 * time.Millisecond) if _, err := c.GetPeer(ctx, role, nil); err == nil { return } } panic("could not connect to peer") } type mockFetcher struct { calledWithIdentifiers *ttnpb.EndDeviceIdentifiers ids *ttnpb.EndDeviceIdentifiers err error } func (f *mockFetcher) Get(_ context.Context, ids *ttnpb.EndDeviceIdentifiers) (*ttnpb.EndDeviceIdentifiers, error) { f.calledWithIdentifiers = ids return f.ids, f.err }
<filename>src/app/contact/contact-array-input/contact-array-input.component.ts import { Component, EventEmitter, Input, OnInit, Output } from '@angular/core'; import { FormArray, FormControl } from '@angular/forms'; @Component({ selector: 'app-contact-array-input', templateUrl: './contact-array-input.component.html', styleUrls: ['./contact-array-input.component.css'] }) export class ContactArrayInputComponent implements OnInit { @Input() array: FormArray; @Input() label: string; @Input() arrayName: string; @Input() groupName?: string; @Input() root: string; constructor() { } ngOnInit() { } onAdd() { this.array.push(new FormControl()); } }