text
stringlengths
1
1.05M
<reponame>VasilStoyanov/holiday-extras-tasks const HtmlPlugin = require('html-webpack-plugin'); const { logger } = require('./server/utils/logger/logger'); const WEBPACK_BUILD_PROCESS_STARTED_MESSAGE = '> Started application build...'; const logInfoMessage = logger({ printer: console, method: 'info', colourKey: 'help', }); logInfoMessage(WEBPACK_BUILD_PROCESS_STARTED_MESSAGE); module.exports = { entry: [ 'babel-polyfill', 'react-hot-loader/patch', './client/index.js', ], output: { filename: 'app.js', path: `${__dirname}/client/dist/`, publicPath: '/', }, module: { rules: [{ test: /\.(js|jsx)$/, exclude: /node_modules/, loader: 'babel-loader', }, { test: /\.css$/, use: ['style-loader', 'css-loader'], }, { test: /\.(png|jpg|gif|svg|eot|ttf|woff|woff2)$/, loader: 'url-loader', options: { limit: 10000, }, }], }, resolve: { extensions: ['.js', '.jsx'], }, plugins: [ new HtmlPlugin({ template: './client/index.html', }), ], };
import { css, StyleSheet } from 'aphrodite' import * as React from 'react' import { iosArrowDown } from 'react-icons-kit/ionicons/' import { Motion, presets, spring } from 'react-motion' import Button from './Button' const styles = StyleSheet.create({ button: { display: 'flex', justifyContent: 'center', alignItems: 'center', width: 40, height: 40, borderRadius: 22, border: '2px #333 solid', fontWeight: 'bolder', fontSize: 'x-large', cursor: 'pointer' } }) interface CircularButtonProps { vertical?: boolean toggled: boolean onClick: () => void } const getRotation = (vertical: boolean, toggled: boolean) => { if (vertical) { return toggled ? 270 : 90 } else { return toggled ? 180 : 0 } } const getButtonStyle = (rotation: number) => ({ transform: `rotate(${rotation}deg)` }) const CircularButton = ({ vertical = false, toggled, onClick }: CircularButtonProps) => { return ( <Motion style={{ rotation: spring(getRotation(vertical, toggled), presets.stiff) }} > {({ rotation }) => <div style={getButtonStyle(rotation)}> <Button icon={iosArrowDown} text={''} onClick={onClick} /> </div>} </Motion> ) } export default CircularButton
<gh_stars>1000+ /** * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. */ #ifndef _FA_POSNFA_PACK_TRIV_H_ #define _FA_POSNFA_PACK_TRIV_H_ #include "FAConfig.h" #include "FASetImageA.h" #include "FARSNfaCA.h" #include "FAOffsetTable_pack.h" #include "FAChains_pack_triv.h" #include "FAIwMap_pack.h" namespace BlingFire { /// /// This class is able to interpret automaton image stored by FAPosNfaPack_triv /// class FAPosNfa_pack_triv : public FASetImageA, public FARSNfaCA { public: FAPosNfa_pack_triv (); public: void SetImage (const unsigned char * pAutImage); public: const int GetInitials (const int ** ppStates) const; const bool IsFinal (const int State) const; const int GetDest ( const int State, const int Iw, int * pDstStates, const int MaxCount ) const; private: // pointer to the automaton image const unsigned char * m_pAutImage; // Iw2Iw map FAIwMap_pack m_iw2iw; // State -> Offset mapping FAOffsetTable_pack m_state2offset; // destination sets keeper FAChains_pack_triv m_dest_sets; // initial state count int m_InitialCount; // pointer to the array of the initial states const int * m_pInitials; }; } #endif
sudo dpkg -i tb-gateway-1.4.0.deb
#!/bin/bash # Create a date stamped journal file # (UTC fits my day/night cycle better) journalfile=$(date --utc +%Y%m%d-%u.md); if [[ ! -f $journalfile ]]; then # Create journal file with today's date as Heading 1 echo "# "$(date "+%A, %e %B, %Y") >> $journalfile; # Add to git repo git add $journalfile; else echo "File already exists: $journalfile"; fi; vim $journalfile; git commit -m "Changes in journal file $journalfile" $journalfile; git push origin master;
<gh_stars>0 /** * @format */ import { AppRegistry, Platform, UIManager } from "react-native"; import App from "./App"; import { name as appName } from "./app.json"; import NotificationService from "./src/services/NotificationService"; AppRegistry.registerHeadlessTask( "RNFirebaseBackgroundMessage", () => NotificationService.backgroundPushes ); AppRegistry.registerComponent(appName, () => App);
package pl.project13.scala.words.verbs import collection.mutable /** * Enables you to create unique collections of any type, such as Lists, Maps etc, * by simply proving a "key" predicate, so you can filter arbitrary complex objects for your * own definition of uniqueness. */ trait UniquifyVerb { implicit def list2uniquifiable[A](list: List[A]) = new UniquifiableList(list) private def identity[A](a: A): A = a class UniquifiableList[A](list: List[A]) { def uniquify: List[A] = UniquifyVerb.uniquifyBy[A, A](list)(identity) def uniquified: List[A] = UniquifyVerb.uniquifyBy[A, A](list)(identity) def uniquifyOn[B](onKey: A => B): List[A] = UniquifyVerb.uniquifyBy[A, B](list)(onKey) def uniquifyByMerge[B](onKey: A => B)(merge: (A, A) => A): List[A] = UniquifyVerb.uniquifyByMerge[A, B](list)(onKey, merge) } def uniquify[A](list: List[A]): List[A] = uniquifyBy(list)(a => a).toList def uniquify[A](traversable: Traversable[A]): Traversable[A] = uniquifyBy(traversable) { a => a } /** * Create a unique copy of the given list. Uniqueness is determined by the `onKey` predicate. * * You should NOT depend on imlpementation details about which (first? last?) item will be kept * in the output collection. */ def uniquifyBy[A, B](list: Traversable[A])(onKey: A => B): List[A] = { (mutable.Map[B, A]() ++ list.map(el => (onKey(el) -> el))).values.toList } def uniquifyByMerge[A, B](list: List[A])(onKey: A => B, merge: (A, A) => A): List[A] = { val uniques = uniquifyBy(list)(onKey) val uniqueKeys = uniques.map(onKey).toSet if (uniqueKeys.size == list.size) { return uniques } // todo not preformant alg val notDuplicatedElements = uniques.filter(a => list.count(k => onKey(k) == onKey(a)) == 1).map(a => (onKey(a), a)) val res = collection.mutable.HashMap[B, A](notDuplicatedElements: _*) for(u <- uniqueKeys; if list.count(a => onKey(a) == u) > 1) { // if there are duplicates for this key val dups = list.filter(a => onKey(a) == u) res(u) = dups.drop(1).foldLeft(dups.head)(merge) } res.values.toList } } object UniquifyVerb extends UniquifyVerb
#!/usr/bin/env python """ Refine a given mesh """ import argparse import pymesh import numpy as np def parse_args(): parser = argparse.ArgumentParser(__doc__); parser.add_argument("--max-edge-length", "-l", type=float, help="maximum edge length", default=1.0); parser.add_argument("input_mesh"); parser.add_argument("output_mesh"); return parser.parse_args(); def main(): args = parse_args(); mesh = pymesh.load_mesh(args.input_mesh); out_mesh, info = pymesh.split_long_edges(mesh, args.max_edge_length); if mesh.has_attribute("corner_texture"): pymesh.map_corner_attribute(mesh, out_mesh, "corner_texture"); pymesh.save_mesh(args.output_mesh, out_mesh, *out_mesh.attribute_names); if __name__ == "__main__": main();
import json import tarfile import glob import os def test_sdist(utils, tmpdir, sh): tmpdir.chdir() # Work in separate tmp dir repo_dir = 'repo' # Prepare venv and clone repository to repo dir utils.clone_repo_with_fresh_venv(repo_dir) # Sdist with setup.py tmpdir.join(repo_dir).chdir() result = sh(utils.python, 'setup.py', 'sdist') assert result.was_successful, \ 'Could not sdist via setup.py: {}'.format(result.stderr) assert 'warning' not in result.outerr, \ 'There are some warnings in sdist output' # Check content of distributed .tar.gz file tmpdir.join(repo_dir).join('dist').chdir() dist_tgz_files = glob.glob('*.tar.gz') assert len(dist_tgz_files) != 0, \ 'No dist .tar.gz file has been produced' distfile = dist_tgz_files[0] tar_dist = tarfile.open(distfile) counts = { 'license': 0, 'template_html': 0, 'readme': 0 } for info in tar_dist.getmembers(): if not info.isfile(): continue path, file = os.path.split(info.name) if file in utils.get_set('license'): if info.size > 0: counts['license'] += 1 elif file in utils.get_set('readme'): if info.size > 0: counts['readme'] += 1 elif file.endswith(('.html', '.j2')) and path.endswith('templates'): # Sorry if you are using some custom name for templates dir if info.size > 0: counts['template_html'] += 1 # There should be exactly one non-empty license info file assert counts['license'] > 0, \ 'No LICENSE/COPYING file provided in distributed .tar.gz' assert counts['license'] == 1, \ 'Multiple LICENSE/COPYING files provided in distributed .tar.gz' # There should be exactly one non-empty readme file assert counts['readme'] > 0, \ 'No README(.md,.rst) file provided in distributed .tar.gz' assert counts['readme'] == 1, \ 'Multiple README(.md,.rst) files provided in distributed .tar.gz' # There should be some HTML templates for web app assert counts['template_html'] > 0, \ 'No web app templates included in distributed .tar.gz' def test_package_info(utils, tmpdir, sh): tmpdir.chdir() # Work in separate tmp dir repo_dir = 'repo' # Prepare venv and clone repository to repo dir utils.clone_repo_with_fresh_venv(repo_dir) # Sdist with setup.py tmpdir.join(repo_dir).chdir() result = sh(utils.python, 'setup.py', 'install') assert result.was_successful, \ 'Could not sdist via setup.py: {}'.format(result.stderr) tmpdir.chdir() # Read package metadata via external script (in fixtures) result = sh(utils.package_info, utils.package_name) assert result.was_successful, \ 'Could not retrieve information about package {}'.format(utils.package_name) info_items = json.loads(result.stdout) classifiers = [x[1] for x in info_items if x[0] == 'Classifier'] assert len(classifiers) > 4, 'Need to have at least 5 classifiers' assert 'Framework :: Flask' in classifiers assert 'Environment :: Console' in classifiers assert 'Environment :: Web Environment' in classifiers mandatory = ['Author', 'Author-email', 'License', 'Name', 'Summary', 'Description', 'Keywords', 'Version'] values = {k: '' for k in mandatory} for m in mandatory: found = False for item in info_items: if item[0] == m: values[m] = item[1] found = True assert len(item[1]) > 0, 'Metadata {} is empty'.format(m) break assert found, 'Metadata {} is not present'.format(m) # Check name, version, keywords as @hroncok mentioned during tutorials assert values['Name'] == utils.package_name.replace('_', '-'), \ 'Bad package name in metadata' keywords = values['Keywords'].replace(',', ' ').split(' ') keywords = [k for k in keywords if k != ''] assert len(keywords) > 3, \ 'Less than 4 package keywords in metadata'
<reponame>NickGraeff/launchkey-java<filename>sdk/src/test/java/com/iovation/launchkey/sdk/transport/domain/DirectoryV3SessionsDeleteRequestTest.java package com.iovation.launchkey.sdk.transport.domain; /** * Copyright 2017 iovation, Inc. * <p> * Licensed under the MIT License. * You may not use this file except in compliance with the License. * A copy of the License is located in the "LICENSE.txt" file accompanying * this file. This file is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import com.fasterxml.jackson.databind.ObjectMapper; import org.junit.Test; import static org.junit.Assert.assertEquals; public class DirectoryV3SessionsDeleteRequestTest { @Test public void getIdentifier() throws Exception { assertEquals("Expected", new DirectoryV3SessionsDeleteRequest("Expected").getIdentifier()); } @Test public void toJSON() throws Exception { assertEquals("{\"identifier\":\"Expected\"}", new ObjectMapper().writeValueAsString(new DirectoryV3SessionsDeleteRequest("Expected"))); } }
var __extends = (this && this.__extends) || function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; function __() { this.constructor = d; } d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); }; define(["require", "exports", '../OuterSubscriber', '../util/subscribeToResult'], function (require, exports, OuterSubscriber_1, subscribeToResult_1) { "use strict"; /** * Projects each source value to the same Observable which is flattened multiple * times with {@link switch} in the output Observable. * * <span class="informal">It's like {@link switchMap}, but maps each value * always to the same inner Observable.</span> * * <img src="./img/switchMapTo.png" width="100%"> * * Maps each source value to the given Observable `innerObservable` regardless * of the source value, and then flattens those resulting Observables into one * single Observable, which is the output Observable. The output Observables * emits values only from the most recently emitted instance of * `innerObservable`. * * @example <caption>Rerun an interval Observable on every click event</caption> * var clicks = Rx.Observable.fromEvent(document, 'click'); * var result = clicks.switchMapTo(Rx.Observable.interval(1000)); * result.subscribe(x => console.log(x)); * * @see {@link concatMapTo} * @see {@link switch} * @see {@link switchMap} * @see {@link mergeMapTo} * * @param {Observable} innerObservable An Observable to replace each value from * the source Observable. * @param {function(outerValue: T, innerValue: I, outerIndex: number, innerIndex: number): any} [resultSelector] * A function to produce the value on the output Observable based on the values * and the indices of the source (outer) emission and the inner Observable * emission. The arguments passed to this function are: * - `outerValue`: the value that came from the source * - `innerValue`: the value that came from the projected Observable * - `outerIndex`: the "index" of the value that came from the source * - `innerIndex`: the "index" of the value from the projected Observable * @return {Observable} An Observable that emits items from the given * `innerObservable` every time a value is emitted on the source Observable. * @return {Observable} An Observable that emits items from the given * `innerObservable` (and optionally transformed through `resultSelector`) every * time a value is emitted on the source Observable, and taking only the values * from the most recently projected inner Observable. * @method switchMapTo * @owner Observable */ function switchMapTo(innerObservable, resultSelector) { return this.lift(new SwitchMapToOperator(innerObservable, resultSelector)); } exports.switchMapTo = switchMapTo; var SwitchMapToOperator = (function () { function SwitchMapToOperator(observable, resultSelector) { this.observable = observable; this.resultSelector = resultSelector; } SwitchMapToOperator.prototype.call = function (subscriber, source) { return source._subscribe(new SwitchMapToSubscriber(subscriber, this.observable, this.resultSelector)); }; return SwitchMapToOperator; }()); /** * We need this JSDoc comment for affecting ESDoc. * @ignore * @extends {Ignored} */ var SwitchMapToSubscriber = (function (_super) { __extends(SwitchMapToSubscriber, _super); function SwitchMapToSubscriber(destination, inner, resultSelector) { _super.call(this, destination); this.inner = inner; this.resultSelector = resultSelector; this.index = 0; } SwitchMapToSubscriber.prototype._next = function (value) { var innerSubscription = this.innerSubscription; if (innerSubscription) { innerSubscription.unsubscribe(); } this.add(this.innerSubscription = subscribeToResult_1.subscribeToResult(this, this.inner, value, this.index++)); }; SwitchMapToSubscriber.prototype._complete = function () { var innerSubscription = this.innerSubscription; if (!innerSubscription || innerSubscription.isUnsubscribed) { _super.prototype._complete.call(this); } }; SwitchMapToSubscriber.prototype._unsubscribe = function () { this.innerSubscription = null; }; SwitchMapToSubscriber.prototype.notifyComplete = function (innerSub) { this.remove(innerSub); this.innerSubscription = null; if (this.isStopped) { _super.prototype._complete.call(this); } }; SwitchMapToSubscriber.prototype.notifyNext = function (outerValue, innerValue, outerIndex, innerIndex, innerSub) { var _a = this, resultSelector = _a.resultSelector, destination = _a.destination; if (resultSelector) { this.tryResultSelector(outerValue, innerValue, outerIndex, innerIndex); } else { destination.next(innerValue); } }; SwitchMapToSubscriber.prototype.tryResultSelector = function (outerValue, innerValue, outerIndex, innerIndex) { var _a = this, resultSelector = _a.resultSelector, destination = _a.destination; var result; try { result = resultSelector(outerValue, innerValue, outerIndex, innerIndex); } catch (err) { destination.error(err); return; } destination.next(result); }; return SwitchMapToSubscriber; }(OuterSubscriber_1.OuterSubscriber)); }); //# sourceMappingURL=switchMapTo.js.map
#!/bin/bash set -euxo pipefail if [[ "$@" == "--help" || "$@" == "-h" ]]; then set +x echo " Usage: ./test.sh command-to-run arguments Examples: - run the default (pytest): ./test.sh - run pytest with arguments: ./test.sh pytest -k mytest - make migrations: ./test.sh django-admin makemigrations - do interactive stuff: ./test.sh bash - disable building: NOBUILD=1 ./test.sh - disable service teardown (faster but less safe runs): NOCLEAN=1 ./test.sh " exit 0 fi PROJECT_NAME=$(grep COMPOSE_PROJECT_NAME .env | cut -d= -f2 || basename $PWD | sed -r 's/(\w)\w*($|\W+)/\1/g') export COMPOSE_PROJECT_NAME="${PROJECT_NAME}test" export COMPOSE_FILE=docker-compose.yml:docker-compose.test.yml USER="${USER:-$(id -nu)}" if [[ "$(uname)" == "Darwin" ]]; then USER_UID=1000 USER_GID=1000 else USER_UID="$(id --user "$USER")" USER_GID="$(id --group "$USER")" fi if [[ -z "${NOBUILD:-}" ]]; then docker-compose build \ --build-arg "USER_UID=$USER_UID" \ --build-arg "USER_GID=$USER_GID" \ test fi if [[ -z "$*" ]]; then set -- pytest fi homedir=$(dirname ${BASH_SOURCE[0]})/.home if [[ ! -e $homedir ]]; then # create it here so Docker don't create with root ownership mkdir $homedir fi function cleanup() { echo "Cleaning up ..." docker-compose down && docker-compose rm -fv } if [[ -n "${NODEPS:-}" ]]; then exec docker-compose run -e NODEPS=yes --no-deps --rm --user=$USER_UID test "$@" else if [[ -z "${NOCLEAN:-}" ]]; then trap cleanup EXIT cleanup || echo "Already clean :-)" fi docker-compose run --rm --user=$USER_UID test "$@" fi
#!/bin/bash if [ -z "$BASH_VERSION" ]; then echo "Invalid shell, re-running using bash..." exec bash "$0" "$@" exit $? fi SRCLOC="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" source "$SRCLOC/../../../build/utils/functions.sh" # Cleanup environment cleanupEnvironment # Verify input targetOS=$1 compiler=$2 targetArch=$3 if [[ "$targetOS" != "android" ]]; then echo "'android' is the only supported target, while '${targetOS}' was specified" exit 1 fi if [[ "$compiler" != "clang" ]]; then echo "'clang' is the only supported compilers, while '${compiler}' was specified" exit 1 fi if [[ "$targetArch" != "arm64-v8a" ]] && [[ "$targetArch" != "armeabi-v7a" ]] && [[ "$targetArch" != "x86" ]]; then echo "'arm64-v8a', 'armeabi-v7a', 'x86' are the only supported target architectures, while '${targetArch}' was specified" exit 1 fi echo "Going to build embedded Qt for ${targetOS}/${compiler}/${targetArch}" # Verify environment if [[ -z "$ANDROID_SDK" ]]; then echo "ANDROID_SDK is not set" exit 1 fi if [[ ! -d "$ANDROID_SDK" ]]; then echo "ANDROID_SDK '${ANDROID_SDK}' is set incorrectly" exit 1 fi export ANDROID_SDK_ROOT=$ANDROID_SDK echo "Using ANDROID_SDK '${ANDROID_SDK}'" if [[ -z "$ANDROID_NDK" ]]; then echo "ANDROID_NDK is not set" exit 1 fi if [[ ! -d "$ANDROID_NDK" ]]; then echo "ANDROID_NDK '${ANDROID_NDK}' is set incorrectly" exit 1 fi export ANDROID_NDK_ROOT=$ANDROID_NDK echo "Using ANDROID_NDK '${ANDROID_NDK}'" if [[ "$(uname -a)" =~ Linux ]]; then if [[ "$(uname -m)" == x86_64 ]] && [ -d "$ANDROID_NDK/prebuilt/linux-x86_64" ]; then export ANDROID_NDK_HOST=linux-x86_64 elif [ -d "$ANDROID_NDK/prebuilt/linux-x86" ]; then export ANDROID_NDK_HOST=linux-x86 else export ANDROID_NDK_HOST=linux fi if [[ -z "$OSMAND_BUILD_CPU_CORES_NUM" ]]; then OSMAND_BUILD_CPU_CORES_NUM=`nproc` fi elif [[ "$(uname -a)" =~ Darwin ]]; then if [[ "$(uname -m)" == x86_64 ]] && [ -d "$ANDROID_NDK/prebuilt/darwin-x86_64" ]; then export ANDROID_NDK_HOST=darwin-x86_64 elif [ -d "$ANDROID_NDK/prebuilt/darwin-x86" ]; then export ANDROID_NDK_HOST=darwin-x86 else export ANDROID_NDK_HOST=darwin fi if [[ -z "$OSMAND_BUILD_CPU_CORES_NUM" ]]; then OSMAND_BUILD_CPU_CORES_NUM=`sysctl hw.ncpu | awk '{print $2}'` fi else echo "'$(uname -a)' host is not supported" exit 1 fi if [[ -z "$ANDROID_SDK" ]]; then echo "ANDROID_NDK '${ANDROID_NDK}' contains no valid host prebuilt tools" exit 1 fi echo "Using ANDROID_NDK_HOST '${ANDROID_NDK_HOST}'" if [[ "$compiler" == "clang" ]]; then export ANDROID_NDK_TOOLCHAIN_VERSION=4.9 fi echo "Using ANDROID_NDK_TOOLCHAIN_VERSION '${ANDROID_NDK_TOOLCHAIN_VERSION}'" TOOLCHAIN_PATH="" if [[ "$targetArch" == "armeabi-v7a" ]]; then export ANDROID_NDK_PLATFORM=android-14 TOOLCHAIN_PATH="${ANDROID_NDK}/toolchains/arm-linux-androideabi-${ANDROID_NDK_TOOLCHAIN_VERSION}" elif [[ "$targetArch" == "arm64-v8a" ]]; then export ANDROID_NDK_PLATFORM=android-21 TOOLCHAIN_PATH="${ANDROID_NDK}/toolchains/aarch64-linux-android-${ANDROID_NDK_TOOLCHAIN_VERSION}" elif [[ "$targetArch" == "x86" ]]; then export ANDROID_NDK_PLATFORM=android-14 TOOLCHAIN_PATH="${ANDROID_NDK}/toolchains/x86-${ANDROID_NDK_TOOLCHAIN_VERSION}" fi if [[ ! -d "$TOOLCHAIN_PATH" ]]; then echo "Toolchain at '$TOOLCHAIN_PATH' not found" exit 1 fi echo "Using toolchain '${TOOLCHAIN_PATH}'" if [[ ! -d "${ANDROID_NDK}/platforms/${ANDROID_NDK_PLATFORM}" ]]; then echo "Platform '${ANDROID_NDK}/platforms/${ANDROID_NDK_PLATFORM}' does not exist" exit 1 fi echo "Using ANDROID_NDK_PLATFORM '${ANDROID_NDK_PLATFORM}'" export ANDROID_TARGET_ARCH=$targetArch targetArchFamily="" if [[ "$targetArch" == "armeabi-v7a" ]]; then targetArchFamily="arm" elif [[ "$targetArch" == "arm64-v8a" ]]; then targetArchFamily="arm64" elif [[ "$targetArch" == "x86" ]]; then targetArchFamily="x86" fi if [[ ! -d "${ANDROID_NDK}/platforms/${ANDROID_NDK_PLATFORM}/arch-${targetArchFamily}" ]]; then echo "Architecture headers '${ANDROID_NDK}/platforms/${ANDROID_NDK_PLATFORM}/arch-${targetArchFamily}' does not exist" exit 1 fi echo "Using ANDROID_TARGET_ARCH '${ANDROID_TARGET_ARCH}'" # Prepare configuration QTBASE_CONFIGURATION=$(echo " -release -opensource -confirm-license -c++std c++11 -sql-sqlite -qt-sqlite -qt-zlib -no-gif -no-libpng -no-libjpeg -no-openssl -qt-pcre -no-use-gold-linker -nomake tests -nomake examples -nomake tools -no-gui -no-widgets -no-cups -no-iconv -no-icu -no-dbus -no-opengl -no-evdev -no-warnings-are-errors -v " | tr '\n' ' ') # -no-gcc-sysroot -qt-xcb # Function: makeFlavor(type) makeFlavor() { local type=$1 local name="${compiler}-${targetArch}.${type}" local path="$SRCLOC/upstream.patched.${targetOS}.${name}" # Configure if [ ! -d "$path" ]; then cp -rpf "$SRCLOC/upstream.patched" "$path" (cd "$path" && ./configure -xplatform android-clang -android-toolchain-version 4.9 -android-arch ${targetArch} $QTBASE_CONFIGURATION -$type -prefix $path) retcode=$? if [ $retcode -ne 0 ]; then echo "Failed to configure 'qtbase-android' for '$name', aborting..." rm -rf "$path" exit $retcode fi fi # Build (cd "$path" && make -j$OSMAND_BUILD_CPU_CORES_NUM) retcode=$? if [ $retcode -ne 0 ]; then echo "Failed to build 'qtbase-android' for '$name', aborting..." rm -rf "$path" exit $retcode fi } makeFlavor "shared" makeFlavor "static"
def latest_semantic_version(git_tags: list) -> str: # Remove the 'v' prefix and split the version numbers into tuples of integers versions = [tuple(map(int, tag[1:].split('.'))) for tag in git_tags] # Sort the versions in descending order sorted_versions = sorted(versions, reverse=True) # Convert the sorted version back to the string format latest_version = 'v' + '.'.join(map(str, sorted_versions[0])) return latest_version
<gh_stars>0 class User < ActiveRecord::Base belongs_to :team belongs_to :discipline validates :name, :presence => true validates :username, :length => { :maximum => 10 }, :uniqueness => true validates :country, :format => { :with => /[A-Za-z]/, :allow_blank => true } validates :age, :numericality => { :only_integer => true, :greater_than => 13 } validates :bio, :presence => true validates :password, :format => { :with => /.+/ }, :confirmation => true validates :accepted, :acceptance => true validates :gender, :inclusion => { :in => ["male", "female", "other", "withheld"] } validates :dob, :presence => true validates :team_id, :presence => true validates :time_zone, :presence => true validates :discipline_id, :presence => true validates :city, :city => true validates :name, :length => { :maximum => 10 }, :if => Proc.new { false } validates :bio, :uniqueness => true validates :country, :uniqueness => { :case_sensitive => false } validates :dob, :uniqueness => true, :unless => Proc.new { true } validates :team_id, :numericality => { :only_integer => true, :judge => :ignore } validates :discipline_id, :uniqueness => { :judge => :force }, :if => Proc.new { false } validates :time_zone, :presence => { :judge => :ignore } validates :gender, :presence => { :judge => :unknown_option } end
#!/bin/bash EXEC_PATH=${1:-/home/$USER} COMMAND=${@:2:($#-1)} docker run -it --gpus all \ -w ${EXEC_PATH} \ -v /etc/group:/etc/group:ro \ -v /etc/passwd:/etc/passwd:ro \ -v /home/$USER/:/home/$USER \ -v /mnt:/mnt \ -u $(id -u $USER):$(id -g $USER) bottlenome/gpt2:latest ${COMMAND}
<filename>src/Boj18253.java<gh_stars>1-10 import java.io.BufferedReader; import java.io.InputStreamReader; import java.util.StringTokenizer; public class Boj18253 { private static long[][] dp; private static final String NEW_LINE = "\n"; public static void main(String[] args) throws Exception{ BufferedReader br = new BufferedReader(new InputStreamReader(System.in)); StringTokenizer st = new StringTokenizer(br.readLine()); int N = Integer.parseInt(st.nextToken()); int M = Integer.parseInt(st.nextToken()); int[][] arr = new int[N][M]; for(int i = 0; i < N; i++) { st = new StringTokenizer(br.readLine()); for(int j = 0; j < M; j++) { arr[i][j] = Integer.parseInt(st.nextToken()); } } makePath(N, M, arr); StringBuilder sb = new StringBuilder(); int Q = Integer.parseInt(br.readLine()); while(Q-- > 0) { } } private static void makePath(int n, int m, int[][] arr) { // 4 방향 다 고려해야함. dp = new long[n][m]; dp[0][0] = arr[0][0]; for(int i = 1; i < n; i++) { dp[i][0] = dp[i - 1][0] + arr[i][0]; } for(int i = 1; i < m; i++) { dp[0][i] = dp[0][i - 1] + arr[0][i]; } for(int i = 1; i < n; i++) { for(int j = 1; j < m; j++) { dp[i][j] = Math.min(dp[i - 1][j], dp[j][i - 1]) + arr[i][j]; } } } }
'''Rewrite your pay computation with time-and-a-half for overtime and create a function called "computepay" which takes two parameters: "hours" and "rate" ''' def computepay(hr,r): try : hr = float(hr) r = float(r) # print hours of work print('Hours of work:', hr) # print rate of pay per worked hour print('Rate of pay:', r) if hr > 40 : extra_pay = (40*r + (hr-40)*r*1.5) print('Pay: ', extra_pay) else : print('Total pay: ', hr*r) except : print('Error, please enter numeric input') computepay(45, 10)
/* eslint-disable no-console */ import 'reflect-metadata'; import express, { Request, Response, NextFunction } from 'express'; import cors from 'cors'; import 'express-async-errors'; // Permite tratativa de erros assíncronos. // DEVE SER IMPORTADA LOGO APÓS A IMPORTAÇÃO DO EXPRESS. import uploadConfig from '@config/upload'; import AppError from '@shared/errors/AppError'; import routes from '@shared/infra/http/routes'; import '@shared/infra/typeorm'; // Apenas carrega o arquivo database -> n contem exports. import '@shared/container'; const app = express(); const port = 3333; app.use(cors()); // Evita que sites nao autenticados tenham acessos à api (ver docs). Só é necessário para requisições feitas atravez de browsers. Native e insomnia n usam o cors. app.use(express.json()); app.use('/files', express.static(uploadConfig.uploadsFolder)); // Permite servir uma pasta estática pelo express app.use(routes); // Tratativa de erros: Middlewares para tttiva de erros teem 4 parametros. Variaveis _ n sao usadas -> Config personalizada eslint app.use((err: Error, request: Request, response: Response, _: NextFunction) => { if (err instanceof AppError) { return response .status(err.statusCode) .json({ status: 'error', message: err.message }); } console.error(err); return response.status(500).json({ status: 'error', message: 'Internal server error', }); }); app.listen(port, () => { console.log(`🐓 GREAT SUCCESS! Server started on ${port}`); });
#!/bin/bash go clean go build go install js-bot -t "[TOKEN]"
public class TrackableItemTracker { private NativeArray<TrackableId> m_Added; private NativeArray<TrackableId> m_Updated; private NativeArray<TrackableId> m_Removed; private bool isCreated; // Constructor public TrackableItemTracker(NativeArray<TrackableId> added, NativeArray<TrackableId> updated, NativeArray<TrackableId> removed) { m_Added = added; m_Updated = updated; m_Removed = removed; isCreated = true; } // Method to get the changes public (NativeArray<TrackableId> added, NativeArray<TrackableId> updated, NativeArray<TrackableId> removed) GetChanges() { if (!isCreated) { throw new InvalidOperationException("Tracking system has not been initialized."); } // Simulated logic to retrieve changes NativeArray<TrackableId> addedItems = m_Added; NativeArray<TrackableId> updatedItems = m_Updated; NativeArray<TrackableId> removedItems = m_Removed; // Reset the state after retrieving changes m_Added = new NativeArray<TrackableId>(0, Allocator.Temp); m_Updated = new NativeArray<TrackableId>(0, Allocator.Temp); m_Removed = new NativeArray<TrackableId>(0, Allocator.Temp); return (addedItems, updatedItems, removedItems); } }
<filename>src/util/parser/MpXmlParser.js require('module-alias/register') const Parsers = require('@parser') const XmlDataParser = Parsers.XmlDataParser const Models = require('@model') const Politician = Models.Politician class MpXmlParser extends XmlDataParser { constructor (xml, mustBeACurrentMember = false) { super(xml) this.mustBeACurrentMember = mustBeACurrentMember } get tagName () { return 'MemberOfParliament' } get listTagName () { return 'ArrayOfMemberOfParliament' } generateNewParser (xml) { return new MpXmlParser(xml, this.mustBeACurrentMember) } buildJson () { const name = this.getDataInTag('PersonOfficialFirstName') + ' ' + this.getDataInTag('PersonOfficialLastName') const mp = Politician.builder(name.toLowerCase()) mp.withParty(this.getDataInTag('CaucusShortName').toLowerCase()) mp.withRiding(this.getDataInTag('ConstituencyName').toLowerCase()) mp.withStartYear(Number(this.getDataInTag('FromDateTime').substring(0, 4))) mp.withEndYear(Number(this.getDataInTag('ToDateTime').substring(0, 4))) return mp.build() } passesFilters () { return (!this.mustBeACurrentMember || this.isACurrentMember()) } isACurrentMember () { const dateEnded = this.getDataInTag('ToDateTime', true) return dateEnded === '' } hasData () { return super.hasData() || this.isTagInXml(this.tagName + 'Role') } } module.exports.MpXmlParser = MpXmlParser
const express = require('express'); const mongoose = require('mongoose'); const router = express.Router(); // Models const Book = require('../models/book'); // GET - Read all books router.get('/', (req, res) => { Book.find() .exec() .then(books => { res.status(200).json(books); }) .catch(err => { res.status(500).json({ error: err }); }); }); // POST - Create a new book router.post('/', (req, res) => { const book = new Book({ title: req.body.title, author: req.body.author }); book .save() .then(result => { res.status(201).json({ message: 'Book successfully created', createdBook: book }); }) .catch(err => { res.status(500).json({ error: err }); }); }); // PUT - Update an existing book router.put('/:bookId', (req, res) => { const _id = req.params.bookId; Book.updateOne({ _id }, { $set: { title: req.body.title, author: req.body.author } }) .exec() .then(result => { res.status(200).json({ message: 'Book successfully updated' }); }) .catch(err => { res.status(500).json({ error: err }); }); }); // DELETE - Delete an existing book router.delete('/:bookId', (req, res) => { const _id = req.params.bookId; Book.deleteOne({ _id }) .exec() .then(result => { res.status(200).json({ message: 'Book successfully deleted' }); }) .catch(err => { res.status(500).json({ error: err }); }); }); module.exports = router;
/* * Copyright (c) 2019 <NAME> and others * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * and Apache License v2.0 which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Apache License v2.0 is available at http://www.opensource.org/licenses/apache2.0.php. * * You may elect to redistribute this code under either of these licenses. * * Contributors: * * <NAME> */ package org.jnosql.diana.mongodb.document; import com.mongodb.AuthenticationMechanism; import com.mongodb.MongoCredential; import jakarta.nosql.CommunicationException; import jakarta.nosql.Settings; import org.junit.jupiter.api.Test; import java.util.Arrays; import static com.mongodb.AuthenticationMechanism.GSSAPI; import static com.mongodb.AuthenticationMechanism.PLAIN; import static com.mongodb.AuthenticationMechanism.SCRAM_SHA_1; import static com.mongodb.AuthenticationMechanism.SCRAM_SHA_256; import static org.jnosql.diana.mongodb.document.MongoDBDocumentConfigurations.AUTHENTICATION_MECHANISM; import static org.jnosql.diana.mongodb.document.MongoDBDocumentConfigurations.AUTHENTICATION_SOURCE; import static org.jnosql.diana.mongodb.document.MongoDBDocumentConfigurations.PASSWORD; import static org.jnosql.diana.mongodb.document.MongoDBDocumentConfigurations.USER; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; class MongoAuthenticationTest { @Test public void shouldReturnErrorWhenTheNumberParameterIsInvalid() { Settings settings = Settings.builder().put(USER.get(), "value") .build(); assertThrows(CommunicationException.class, () -> MongoAuthentication.of(settings)); } @Test public void shouldReturnOneAuthentication() { Settings settings = Settings.builder() .put(AUTHENTICATION_SOURCE.get(), "database") .put(PASSWORD.get(), "password") .put(USER.get(), "user") .build(); MongoCredential credential = MongoAuthentication.of(settings).get(); assertEquals("database", credential.getSource()); assertTrue(Arrays.equals("password".toCharArray(), credential.getPassword())); assertEquals("user", credential.getUserName()); assertEquals(PLAIN.getMechanismName(), credential.getMechanism()); } @Test public void shouldReturnOneAuthenticationWithGSSAPI() { Settings settings = Settings.builder() .put(AUTHENTICATION_SOURCE.get(), "database") .put(PASSWORD.get(), "password") .put(USER.get(), "user") .put(AUTHENTICATION_MECHANISM.get(), "GSSAPI") .build(); MongoCredential credential = MongoAuthentication.of(settings).get(); assertEquals("$external", credential.getSource()); assertEquals("user", credential.getUserName()); assertEquals(GSSAPI.getMechanismName(), credential.getMechanism()); } @Test public void shouldReturnOneAuthenticationWithMongoX509() { Settings settings = Settings.builder() .put(AUTHENTICATION_SOURCE.get(), "database") .put(PASSWORD.get(), "password") .put(USER.get(), "user") .put(AUTHENTICATION_MECHANISM.get(), "MONGODB-X509") .build(); MongoCredential credential = MongoAuthentication.of(settings).get(); assertEquals("$external", credential.getSource()); assertEquals("user", credential.getUserName()); assertEquals(AuthenticationMechanism.MONGODB_X509.getMechanismName(), credential.getMechanism()); } @Test public void shouldReturnOneAuthenticationWithSCRAMSHA1() { Settings settings = Settings.builder() .put(AUTHENTICATION_SOURCE.get(), "database") .put(PASSWORD.get(), "password") .put(USER.get(), "user") .put(AUTHENTICATION_MECHANISM.get(), "SCRAM-SHA-1") .build(); MongoCredential credential = MongoAuthentication.of(settings).get(); assertEquals("database", credential.getSource()); assertTrue(Arrays.equals("password".toCharArray(), credential.getPassword())); assertEquals("user", credential.getUserName()); assertEquals(SCRAM_SHA_1.getMechanismName(), credential.getMechanism()); } @Test public void shouldReturnOneAuthenticationWithSCRAMSHA256() { Settings settings = Settings.builder() .put(AUTHENTICATION_SOURCE.get(), "database") .put(PASSWORD.get(), "password") .put(USER.get(), "user") .put(AUTHENTICATION_MECHANISM.get(), "SCRAM-SHA-256") .build(); MongoCredential credential = MongoAuthentication.of(settings).get(); assertEquals("database", credential.getSource()); assertTrue(Arrays.equals("password".toCharArray(), credential.getPassword())); assertEquals("user", credential.getUserName()); assertEquals(SCRAM_SHA_256.getMechanismName(), credential.getMechanism()); } }
<reponame>signalfire/northcoders-news-backend<gh_stars>0 const app = require('../app'); const mongoose = require('mongoose'); const { expect } = require('chai'); const request = require('supertest')(app); const seedDB = require('../seed/seed'); const {articles, comments, topics, users} = require('../seed/testData'); describe('Northcoders News API', () => { let topicsDocs, userDocs, articleDocs, commentDocs; beforeEach(() => { return seedDB(topics, users, articles, comments) .then(docs => { [topicDocs, userDocs, articleDocs, commentDocs] = docs; }) }) after(() => { return mongoose.disconnect(); }) describe('/api', () => { it('GET homepage', () => { return request .get('/api') .expect(200) .then(res => { expect(res.text).to.contain('Northcoders News'); }) }) }) describe('/api/topics', () => { it('GET should return an array of topics', () => { return request .get('/api/topics') .expect(200) .then(({body}) => { const {topics} = body; expect(body).to.have.all.keys('topics'); expect(topics).to.be.an('array'); expect(topics.length).to.equal(topicDocs.length); expect(topics[0]).to.be.an('object'); expect(topics[0]).to.have.all.keys(['_id', '__v', 'slug', 'title']); expect(topics[0].title).to.equal(topicDocs[0].title); }); }); }); describe('/api/topics/:topic_slug/articles', () => { it ('GET should respond with a status code 200 and an array containing the articles', () => { return request .get(`/api/topics/${topicDocs[0].slug}/articles`) .expect(200) .then(({body}) => { const {articles, count} = body; const filteredDocs = articleDocs.filter(article => article.belongs_to === topicDocs[0].slug); expect(body).to.have.all.keys(['articles', 'count']); expect(articles).to.be.an('array'); expect(count).to.equal(filteredDocs.length); expect(articles.length).to.equal(filteredDocs.length); expect(articles[0]).to.be.an('object'); expect(articles[0].title).to.equal(filteredDocs[0].title); }) }); it('GET should return the first article when paged with pageSize 1 and page 1', () => { return request .get(`/api/topics/${topicDocs[0].slug}/articles?page=1&pageSize=1`) .expect(200) .then(({body}) => { const {articles, count} = body; const filteredDocs = articleDocs.filter(article => article.belongs_to === topicDocs[0].slug); expect(body).to.have.all.keys(['articles', 'count']); expect(count).to.equal(filteredDocs.length); expect(articles.length).to.equal(1); expect(articles[0]._id).to.equal(`${filteredDocs[0]._id}`); }) }); it('GET should return the last article when paged with pageSize 1 and page 2 (last page)', () => { return request .get(`/api/topics/${topicDocs[0].slug}/articles?page=2&pageSize=1`) .expect(200) .then(({body}) => { const {articles, count} = body; const filteredDocs = articleDocs.filter(article => article.belongs_to === topicDocs[0].slug); expect(body).to.have.all.keys(['articles', 'count']); expect(count).to.equal(filteredDocs.length); expect(articles.length).to.equal(1); expect(articles[0]._id).to.equal(`${filteredDocs[filteredDocs.length - 1]._id}`); }) }); it('GET should return the documents filtered by topic and sorted by created_at date asc (oldest)', () => { return request .get(`/api/topics/${topicDocs[0].slug}/articles?sort=created_at&direction=1`) .expect(200) .then(({body}) => { const {articles, count} = body; const filteredDocs = articleDocs.filter(article => article.belongs_to === topicDocs[0].slug); expect(body).to.have.all.keys(['articles', 'count']); expect(articles.length).to.equal(filteredDocs.length); expect(count).to.equal(filteredDocs.length); expect(articles[0]._id).to.equal(`${filteredDocs[0]._id}`); }) }); it('GET should return the documents filtered by topic sorted by created_at date desc (newest)', () => { return request .get(`/api/topics/${topicDocs[0].slug}/articles?sort=created_at&direction=-1`) .expect(200) .then(({body}) => { const {articles, count} = body; const filteredDocs = articleDocs.filter(article => article.belongs_to === topicDocs[0].slug); expect(body).to.have.all.keys(['articles', 'count']); expect(articles.length).to.equal(filteredDocs.length); expect(count).to.equal(filteredDocs.length); expect(articles[0]._id).to.equal(`${filteredDocs[filteredDocs.length - 1]._id}`); }) }); it ('GET should respond with a status code 404 when passed a topic slug that does not exist', () => { return request .get('/api/topics/i-do-not-exist/articles') .expect(404) .then(({body}) => { expect(body).to.be.an('object'); expect(body).to.have.all.keys(['msg', 'status']); expect(body.status).to.equal(404); expect(body.msg).to.equal('Page Not Found'); }) }); it ('POST should respond with a status code 201 and an object representing the added article', () => { return request .post(`/api/topics/${topicDocs[0].slug}/articles`) .send({ title: 'I am a test article', body: 'I am a test body content for a new article in mitch topic', created_by: userDocs[0]._id, }) .expect(201) .then(({body}) => { const {article} = body; expect(body).to.have.all.keys('article'); expect(article).to.be.an('object'); expect(article).to.have.all.keys(['_id', 'votes', 'title', 'body', 'created_by', 'belongs_to', 'created_at', 'comment_count', '__v']); expect(article.created_by).to.be.an('object'); expect(article.created_by).to.have.all.keys(['_id','username','name','avatar_url','__v']) expect(article.belongs_to).to.equal(topicDocs[0].slug); }) }); it ('POST should respond with a status code 400 as object posted is missing a required title field', () => { return request .post(`/api/topics/${topicDocs[0].slug}/articles`) .send({ body: 'I am a test body content for a new article in mitch topic', created_by: userDocs[0]._id, }) .expect(400) .then(({body}) => { expect(body).to.be.an('object'); expect(body).to.have.all.keys(['msg', 'status']); expect(body.status).to.equal(400); expect(body.msg).to.equal('Bad Request'); }) }); it ('POST should respond with a status code 400 as object posted is missing a required body field', () => { return request .post(`/api/topics/${topicDocs[0].slug}/articles`) .send({ title: 'I am a test article', created_by: userDocs[0]._id, }) .expect(400) .then(({body}) => { expect(body).to.be.an('object'); expect(body).to.have.all.keys(['msg', 'status']); expect(body.status).to.equal(400); expect(body.msg).to.equal('Bad Request'); }) }); it ('POST should respond with a status code 400 as object posted is missing a required created_by field', () => { return request .post(`/api/topics/${topicDocs[0].slug}/articles`) .send({ title: 'I am a test article', body: 'I am a test body content for a new article in mitch topic', }) .expect(400) .then(({body}) => { expect(body).to.be.an('object'); expect(body).to.have.all.keys(['msg', 'status']); expect(body.status).to.equal(400); expect(body.msg).to.equal('Bad Request'); }) }); it ('POST should respond with a status code 400 as object posted has an invalid created_by mongo id for user', () => { return request .post(`/api/topics/${topicDocs[0].slug}/articles`) .send({ title: 'I am a test article', body: 'I am a test body content for a new article in mitch topic', created_by: 'mr-wibble', }) .expect(400) .then(({body}) => { expect(body).to.be.an('object'); expect(body).to.have.all.keys(['msg', 'status']); expect(body.status).to.equal(400); expect(body.msg).to.equal('Bad Request'); }) }); it ('POST should respond with a status code 400 as object posted has an valid mongoid for created_by but the id does not exist as a user', () => { return request .post(`/api/topics/${topicDocs[0].slug}/articles`) .send({ title: 'I am a test article', body: 'I am a test body content for a new article in mitch topic', created_by: topicDocs[0]._id, }) .expect(400) .then(({body}) => { expect(body).to.be.an('object'); expect(body).to.have.all.keys(['msg', 'status']); expect(body.status).to.equal(400); expect(body.msg).to.equal('Bad Request'); }) }); }) describe('/api/articles', () => { it('GET should return an array of articles', () => { return request .get('/api/articles') .expect(200) .then(({body}) => { const {articles, count} = body; const filteredDocs = userDocs.filter(user => user._id === articleDocs[0].created_by); expect(body).to.have.all.keys(['articles', 'count']); expect(count).to.equal(articleDocs.length); expect(articles).to.be.an('array'); expect(articles.length).to.equal(articleDocs.length); expect(articles[0]._id).to.equal(`${articleDocs[0]._id}`); expect(articles[0].created_by).to.be.an('object'); expect(articles[0].created_by.username).to.equal(filteredDocs[0].username); }); }); it('GET should return the articles matching a search term', () => { return request .get('/api/articles?search=shadow') .expect(200) .then(({body}) => { const {articles, count} = body; const filteredDocs = articleDocs.filter(article => article.title.indexOf('shadow') > -1); expect(articles.length).to.equal(filteredDocs.length); expect(articles[0]._id).to.equal(`${filteredDocs[0]._id}`); }) }); it('GET should return the first article when paged with pageSize 1 and page 1', () => { return request .get('/api/articles?page=1&pageSize=1') .expect(200) .then(({body}) => { const {articles, count} = body; expect(count).to.equal(articleDocs.length); expect(articles.length).to.equal(1); expect(articles[0]._id).to.equal(`${articleDocs[0]._id}`); }) }); it('GET should return the last article when paged with pageSize 1 and page 4', () => { return request .get('/api/articles?page=4&pageSize=1') .expect(200) .then(({body}) => { const {articles, count} = body; expect(count).to.equal(articleDocs.length); expect(articles.length).to.equal(1); expect(articles[0]._id).to.equal(`${articleDocs[articleDocs.length - 1]._id}`); }) }); it('GET should return the documents sorted by created_at date asc (oldest)', () => { return request .get('/api/articles?sort=created_at&direction=1') .expect(200) .then(({body}) => { const {articles, count} = body; expect(count).to.equal(articleDocs.length); expect(articles.length).to.equal(articleDocs.length); expect(articles[0]._id).to.equal(`${articleDocs[0]._id}`); }) }); it('GET should return the documents sorted by created_at date desc (newest)', () => { return request .get('/api/articles?sort=created_at&direction=-1') .expect(200) .then(({body}) => { const {articles, count} = body; expect(count).to.equal(articleDocs.length); expect(articles.length).to.equal(articleDocs.length); expect(articles[0]._id).to.equal(`${articleDocs[articleDocs.length - 1]._id}`); }) }); }); describe('/api/articles/:article_id', () => { it('GET should return a single article when a valid mongoid is used for a document in the articles collection', () => { return request .get(`/api/articles/${articleDocs[0]._id}`) .expect(200) .then(({body}) => { const {article} = body; const filteredDocs = userDocs.filter(user => user._id === articleDocs[0].created_by); expect(body).to.have.all.keys('article'); expect(article).to.be.an('object'); expect(article).to.have.all.keys(['__v','_id', 'title', 'body', 'votes', 'created_at', 'belongs_to', 'created_by', 'comment_count']); expect(article._id).to.equal(`${articleDocs[0]._id}`); expect(article.created_by).to.be.an('object'); expect(article.created_by.username).to.equal(filteredDocs[0].username); }) }); it('GET should return a 400 status when the mongoid used is invalid', () =>{ return request .get(`/api/articles/something-terribly-geeky`) .expect(400) .then(({body}) => { expect(body).to.be.an('object'); expect(body).to.have.all.keys(['msg', 'status']); expect(body.status).to.equal(400); expect(body.msg).to.equal('Bad Request'); }) }) it('GET should return a 404 status when the mongoid used is valid, but data with mongoid doesnt exist in articles collection', () => { return request .get(`/api/articles/${topicDocs[0]._id}`) .expect(404) .then(({body}) => { expect(body).to.be.an('object'); expect(body).to.have.all.keys(['msg', 'status']); expect(body.status).to.equal(404); expect(body.msg).to.equal('Page Not Found'); }) }) it('PATCH should increment the votes of an article by 1', () => { return request .patch(`/api/articles/${articleDocs[0]._id}?vote=up`) .expect(200) .then(({body}) => { const {article} = body; expect(body).to.have.all.keys('article'); expect(article.votes).to.equal(articleDocs[0].votes + 1); }); }) it('PATCH should decrease the votes of an article by 1', () => { return request .patch(`/api/articles/${articleDocs[0]._id}?vote=down`) .expect(200) .then(({body}) => { const {article} = body; expect(body).to.have.all.keys('article'); expect(article.votes).to.equal(articleDocs[0].votes - 1); }); }) it('PATCH should return a 400 as article mongoid is invalid', () => { return request .patch(`/api/articles/something-terrible-geeky?vote=up`) .expect(400) .then(({body}) => { expect(body).to.be.an('object'); expect(body).to.have.all.keys(['msg', 'status']); expect(body.status).to.equal(400); expect(body.msg).to.equal('Bad Request'); }); }) it('PATCH should return a 404 as article mongoid is valid, but data with mongoid doesnt exist in collection', () => { return request .patch(`/api/articles/${topicDocs[0]._id}?vote=up`) .expect(404) .then(({body}) => { expect(body).to.be.an('object'); expect(body).to.have.all.keys(['msg', 'status']); expect(body.status).to.equal(404); expect(body.msg).to.equal('Page Not Found'); }); }) it('PATCH should return a 400 as vote key is missing in query', () => { return request .patch(`/api/articles/${topicDocs[0]._id}`) .expect(400) .then(({body}) => { expect(body).to.be.an('object'); expect(body).to.have.all.keys(['msg', 'status']); expect(body.status).to.equal(400); expect(body.msg).to.equal('Bad Request'); }); }) it('PATCH should return a 400 as vote key is in query but unexpected value', () => { return request .patch(`/api/articles/${topicDocs[0]._id}?vote=test`) .expect(400) .then(({body}) => { expect(body).to.be.an('object'); expect(body).to.have.all.keys(['msg', 'status']); expect(body.status).to.equal(400); expect(body.msg).to.equal('Bad Request'); }); }) }); describe('/api/articles/:article_id/comments', () => { it('GET should return comments for a single article', () => { return request .get(`/api/articles/${articleDocs[0]._id}/comments`) .expect(200) .then(({body}) => { const {comments} = body; const filteredComments = commentDocs.filter(comment => comment.belongs_to === articleDocs[0]._id); const filteredUsers = userDocs.filter(user => user._id === filteredComments[0].created_by); expect(body).to.have.all.keys('comments'); expect(comments).to.be.an('array'); expect(comments.length).to.equal(filteredComments.length); expect(comments[0]).to.have.all.keys(['_id','__v', 'votes', 'body', 'belongs_to', 'created_by', 'created_at']); expect(comments[0].body).to.equal(filteredComments[0].body); expect(comments[0].created_by).to.be.an('object'); expect(comments[0].created_by._id).to.equal(`${filteredUsers[0]._id}`); }) }); it('GET should return 400 when requesting comments for an article by mongoid that is invalid', () => { return request .get('/api/articles/something-terribly-geeky/comments') .expect(400) .then(({body}) => { expect(body).to.be.an('object'); expect(body).to.have.all.keys(['msg', 'status']); expect(body.status).to.equal(400); expect(body.msg).to.equal('Bad Request'); }) }); it('POST should create a new comment for a single article', () => { return request .post(`/api/articles/${articleDocs[0]._id}/comments`) .send({ body: `I am a test comment for article ${articleDocs[0].title}`, created_by: userDocs[0]._id, }) .expect(201) .then(({body}) => { const {comment} = body; expect(body).to.have.all.keys('comment'); expect(comment).to.be.an('object'); expect(comment).to.have.all.keys(['_id', 'votes', 'body', 'created_by', 'belongs_to', 'created_at', '__v']); expect(comment.created_by).to.be.an('object'); expect(comment.created_by).to.have.all.keys(['_id','username','name','avatar_url','__v']); expect(comment.created_by._id).to.equal(`${userDocs[0]._id}`); expect(comment.belongs_to).to.be.an('object'); expect(comment.belongs_to).to.have.all.keys(['__v','_id', 'title', 'body', 'votes', 'created_at', 'belongs_to', 'created_by']); }) }); it('POST should respond with a status code 400 when attempting to add a comment with an invalid mongoid', () => { return request .post('/api/articles/something-really-geeky-here/comments') .send({ body: `I am a test comment for article ${articleDocs[0].title}`, created_by: userDocs[0]._id, }) .expect(400) .then(({body}) => { expect(body).to.be.an('object'); expect(body).to.have.all.keys(['msg', 'status']); expect(body.status).to.equal(400); expect(body.msg).to.equal('Bad Request'); }) }); it ('POST should respond with a status code 400 as object posted has an invalid created_by mongo id for user', () => { return request .post(`/api/articles/${articleDocs[0]._id}/comments`) .send({ body: `I am a test comment for article ${articleDocs[0].title}`, created_by: 'mr-wibble', }) .expect(400) .then(({body}) => { expect(body).to.be.an('object'); expect(body).to.have.all.keys(['msg', 'status']); expect(body.status).to.equal(400); expect(body.msg).to.equal('Bad Request'); }) }); it ('POST should respond with a status code 400 as object posted has an valid created_by mongo id but for another collection', () => { return request .post(`/api/articles/${articleDocs[0]._id}/comments`) .send({ body: `I am a test comment for article ${articleDocs[0].title}`, created_by: topicDocs[0]._id, }) .expect(400) .then(({body}) => { expect(body).to.be.an('object'); expect(body).to.have.all.keys(['msg', 'status']); expect(body.status).to.equal(400); expect(body.msg).to.equal('Bad Request'); }) }); it('POST should respond with a status code 400 when attempting to add a comment with an valid mongoid but for another collection', () => { return request .post(`/api/articles/${topicDocs[0]._id}/comments`) .send({ body: `I am a test comment for article ${articleDocs[0].title}`, created_by: userDocs[0]._id, }) .expect(400) .then(({body}) => { expect(body).to.be.an('object'); expect(body).to.have.all.keys(['msg', 'status']); expect(body.status).to.equal(400); expect(body.msg).to.equal('Bad Request'); }) }); it('POST should respond with a status code 400 as object posted is missing a required body field', () => { return request .post(`/api/articles/${articleDocs[0]._id}/comments`) .send({ created_by: userDocs[0]._id, }) .expect(400) .then(({body}) => { expect(body).to.be.an('object'); expect(body).to.have.all.keys(['msg', 'status']); expect(body.status).to.equal(400); expect(body.msg).to.equal('Bad Request'); }) }); it('POST should respond with a status code 400 as object posted is missing a created_by field', () => { return request .post(`/api/articles/${articleDocs[0]._id}/comments`) .send({ body: `I am a test comment for article ${articleDocs[0].title}`, }) .expect(400) .then(({body}) => { expect(body).to.be.an('object'); expect(body).to.have.all.keys(['msg', 'status']); expect(body.status).to.equal(400); expect(body.msg).to.equal('Bad Request'); }) }); }); describe('/api/users/:username', () => { it('GET should return a single user', () => { return request .get(`/api/users/${userDocs[0].username}`) .expect(200) .then(({body}) => { const {user} = body; expect(body).to.have.all.keys('user'); expect(user).to.be.an('object'); expect(user).to.have.all.keys(['__v', '_id', 'username', 'name', 'avatar_url']) expect(user.username).to.equal(userDocs[0].username); }) }); it('GET should return a 404 when user is not found in collection', () => { return request .get(`/api/users/benny-hill`) .expect(404) .then(({body}) => { expect(body).to.be.an('object'); expect(body).to.have.all.keys(['msg', 'status']); expect(body.status).to.equal(404); expect(body.msg).to.equal('Page Not Found'); }) }); }) describe('/api/users/:username/articles', () => { it('GET should return articles for a username', () => { return request .get(`/api/users/${userDocs[0].username}/articles`) .expect(200) .then(({body}) => { const {articles, count} = body; const filteredDocs = articleDocs.filter(article => article.created_by === userDocs[0]._id); expect(body).to.have.all.keys(['articles', 'count']); expect(count).to.equal(filteredDocs.length); expect(articles.length).to.equal(filteredDocs.length); expect(articles[0]._id).to.equal(`${filteredDocs[0]._id}`); }); }) }); describe('/api/comments/:comment_id', () => { it('DELETE should delete a single comment', () => { return request .delete(`/api/comments/${commentDocs[0]._id}`) .expect(200) .then(({body}) => { const {comment} = body; expect(body).to.have.all.keys('comment'); expect(comment).to.be.an('object'); expect(comment).to.have.all.keys(['_id', '__v', 'body', 'votes', 'created_at', 'belongs_to', 'created_by']); }) }); it('DELETE should return a 404 status when trying to delete a valid mongoid that does not exist in the collection', () => { return request .delete(`/api/comments/${topicDocs[0]._id}`) .expect(404) .then(({body}) => { expect(body).to.be.an('object'); expect(body).to.have.all.keys(['msg', 'status']); expect(body.status).to.equal(404); expect(body.msg).to.equal('Page Not Found'); }) }); it('DELETE should return a 400 status when trying to delete a invalid mongoid', () => { return request .delete('/api/comments/something-really-geeky-here') .expect(400) .then(({body}) => { expect(body).to.be.an('object'); expect(body).to.have.all.keys(['msg', 'status']); expect(body.status).to.equal(400); expect(body.msg).to.equal('Bad Request'); }) }); it('PATCH should increment the votes of an comment by 1', () => { return request .patch(`/api/comments/${commentDocs[0]._id}?vote=up`) .expect(200) .then(({body}) => { const {comment} = body; expect(body).to.have.all.keys('comment'); expect(comment).to.be.an('object'); expect(comment).to.have.all.keys(['_id', 'body', 'belongs_to', 'created_by', 'created_at', 'votes', '__v']); expect(comment.created_by).to.be.an('object'); expect(comment.created_by).to.have.all.keys(['_id', 'username', 'name', 'avatar_url', '__v']); expect(comment.votes).to.equal(commentDocs[0].votes + 1); }); }) it('PATCH should decrease the votes of an comment by 1', () => { return request .patch(`/api/comments/${commentDocs[0]._id}?vote=down`) .expect(200) .then(({body}) => { const {comment} = body; expect(body).to.have.all.keys('comment'); expect(comment).to.be.an('object'); expect(comment).to.have.all.keys(['_id', 'body', 'belongs_to', 'created_by', 'created_at', 'votes', '__v']); expect(comment.created_by).to.be.an('object'); expect(comment.created_by).to.have.all.keys(['_id', 'username', 'name', 'avatar_url', '__v']); expect(comment.votes).to.equal(commentDocs[0].votes - 1); }); }) it('PATCH should return a 400 as comment mongoid is invalid', () => { return request .patch(`/api/comments/something-terrible-geeky?vote=up`) .expect(400) .then(({body}) => { expect(body).to.be.an('object'); expect(body).to.have.all.keys(['msg', 'status']); expect(body.status).to.equal(400); expect(body.msg).to.equal('Bad Request'); }); }) it('PATCH should return a 404 as comment mongoid is valid, but data with mongoid doesnt exist in collection', () => { return request .patch(`/api/comments/${topicDocs[0]._id}?vote=up`) .expect(404) .then(({body}) => { expect(body).to.be.an('object'); expect(body).to.have.all.keys(['msg', 'status']); expect(body.status).to.equal(404); expect(body.msg).to.equal('Page Not Found'); }); }) it('PATCH should return a 400 as vote key is missing in query', () => { return request .patch(`/api/comments/${commentDocs[0]._id}`) .expect(400) .then(({body}) => { expect(body).to.be.an('object'); expect(body).to.have.all.keys(['msg', 'status']); expect(body.status).to.equal(400); expect(body.msg).to.equal('Bad Request'); }); }) it('PATCH should return a 400 as vote key is in query but unexpected value', () => { return request .patch(`/api/comments/${commentDocs[0]._id}?vote=test`) .expect(400) .then(({body}) => { expect(body).to.be.an('object'); expect(body).to.have.all.keys(['msg', 'status']); expect(body.status).to.equal(400); expect(body.msg).to.equal('Bad Request'); }); }); }); describe('/api/stats', () => { it('Should return comment and article counts per user', () => { return request .get('/api/stats') .expect(200) .then(({body}) => { const {comments, articles} = body; expect(comments).to.be.an('array'); expect(articles).to.be.an('array'); userDocs.forEach(user => { const filteredComments = commentDocs.filter(comment => comment.created_by === user._id) const filteredArticles = articleDocs.filter(article => article.created_by === user._id) const userInComments = comments.filter(comment => `${comment._id}` === `${user._id}`); const userInArticles = articles.filter(article => `${article._id}` === `${user._id}`); expect(userInComments[0].comment_count).to.equal(filteredComments.length); expect(userInArticles[0].article_count).to.equal(filteredArticles.length); }) }) }) }) });
package com.honyum.elevatorMan.utils; import android.view.Gravity; import android.view.View; import android.view.ViewGroup; import android.widget.FrameLayout; import android.widget.LinearLayout; import android.widget.NumberPicker; import java.util.ArrayList; import java.util.List; import static com.baidu.navisdk.util.common.ScreenUtil.dip2px; import static com.baidu.navisdk.util.common.ScreenUtil.getScreenWidth; /** * Created by star on 2018/3/31. */ public class ViewUtils { /** * 调整FrameLayout大小 * * @param tp */ public static void resizePikcer(FrameLayout tp) { float[] size = null; //npList size==3 代表 datepicker 年月日宽度对应为 0.25f 0.2f 0.2f //npList size==2 代表 timepicker 时分宽度对应为 0.175f 0.175f List npList = findNumberPicker(tp); if (npList.size() == 3) { size = new float[]{0.25f, 0.2f, 0.2f}; } else if (npList.size() == 2) { size = new float[]{0.175f, 0.175f}; } for (int i = 0; i < npList.size(); i++) { NumberPicker np = (NumberPicker) npList.get(i); resizeNumberPicker(np, size[i]); } } /** * 得到viewGroup里面的numberpicker组件 * * @param viewGroup * @return */ private static List findNumberPicker(ViewGroup viewGroup) { List npList = new ArrayList(); View child = null; if (null != viewGroup) { for (int i = 0; i < viewGroup.getChildCount(); i++) { child = viewGroup.getChildAt(i); if (child instanceof NumberPicker) { npList.add((NumberPicker) child); } else if (child instanceof LinearLayout) { List result = findNumberPicker((ViewGroup) child); if (result.size() > 0) { return result; } } } } return npList; } /** * 调整numberpicker大小 * @param np * @param size 每个numberPicker对应分得屏幕宽度 */ private static void resizeNumberPicker(NumberPicker np, float size) { int dp5 = dip2px(np.getContext(), 5); int dp2 = dip2px(np.getContext(), 2); //timepicker 时 分 左右各自有8dp空白 int dp32 = dip2px(np.getContext(), 32); //屏幕宽度 - timepicker左右空白 -自设周边5dp空白 LinearLayout.LayoutParams params = new LinearLayout.LayoutParams( (int) ((getScreenWidth(np.getContext()) - dp32 - dp5 * 10) *size), ViewGroup.LayoutParams.WRAP_CONTENT); params.setMargins(0,0,0,0); np.setLayoutParams(params); } }
import Frisbee from 'frisbee' const URL = 'http://localhost:4000/api/' export const endpoints = { data: (id) => id ? `/data${id}` : 'data' } export const api = new Frisbee({ baseURI: URL, headers: { 'Accept': 'application/json', 'Content-Type': 'application/json' } });
import React from 'react'; import { FamilieSelect } from '../select'; import { SelectProps } from 'nav-frontend-skjema'; export interface IMånedProps extends Omit<SelectProps, 'children'>{ value: string | undefined; erLesevisning?: boolean; } const månedValg = [ { mndNr: '01', verdi: 'Januar' }, { mndNr: '02', verdi: 'Februar' }, { mndNr: '03', verdi: 'Mars' }, { mndNr: '04', verdi: 'April' }, { mndNr: '05', verdi: 'Mai' }, { mndNr: '06', verdi: 'Juni' }, { mndNr: '07', verdi: 'Juli' }, { mndNr: '08', verdi: 'August' }, { mndNr: '09', verdi: 'September' }, { mndNr: '10', verdi: 'Oktober' }, { mndNr: '11', verdi: 'November' }, { mndNr: '12', verdi: 'Desember' }, ]; const månedsnavnForNummer = (value: string | undefined) => { return value ? månedValg.find((mnd) => mnd.mndNr === value)?.verdi : ''; } export const MånedVelger: React.FC<IMånedProps> = ({ value, erLesevisning = false, ...props }) => { return ( <FamilieSelect erLesevisning={erLesevisning} lesevisningVerdi={månedsnavnForNummer(value)} value={value} {...props} > <option value="">Måned</option> {månedValg.map((mnd) => ( <option value={mnd.mndNr} key={mnd.mndNr}> {mnd.verdi} </option> ))} </FamilieSelect> ); };
package aufgabe10_8; public class False extends Condition { @Override public void accept(Visitor visitor) { visitor.visit(this); } }
<gh_stars>0 package aufgabe12_9; // utf8: "Köpfchen in das Wasser, Schwänzchen in die Höh." -CIA-Verhörmethode public class Constructor implements Visitable { private String name; private SingleDeclaration[] parmeters; private Declaration[] declarations; private Statement[] statements; public Constructor(String name, SingleDeclaration[] parmeters, Declaration[] declarations, Statement[] statements) { this.name = name; this.parmeters = parmeters; this.declarations = declarations; this.statements = statements; } public String getName() { return name; } public SingleDeclaration[] getParameters() { return parmeters; } public Declaration[] getDeclarations() { return declarations; } public Statement[] getStatements() { return statements; } @Override public void accept(Visitor visitor) { visitor.visit(this); } @Override public String toString() { FormatVisitor f = new FormatVisitor(); f.visit(this); return f.getResult(); } }
@extends('layouts.app') @section('content') <div class="container"> <h1>Consolidated Exchanges</h1> <ul> @foreach ($exchanges as $exchange) <li> <h3>{{ $exchange->name }}</h3> <p>Date: {{ $exchange->date }}</p> <!-- Add any other relevant exchange details here --> </li> @endforeach </ul> {{ $exchanges->links() }} <!-- Pagination links --> </div> @endsection
<filename>Lab11/src/academy/pocu/comp2500/lab11/pocu/WarehouseType.java package academy.pocu.comp2500.lab11.pocu; public enum WarehouseType { APPLE, MICROSOFT, SAMSUNG }
<reponame>murphybytes/saml package saml import ( "bytes" "encoding/xml" "testing" "github.com/murphybytes/saml/generated" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) func TestEntityDescriptor(t *testing.T) { buff, err := generated.Asset("test_data/metadata.xml") require.Nil(t, err) require.NotNil(t, buff) var descriptor EntityDescriptor err = xml.Unmarshal(buff, &descriptor) require.Nil(t, err) require.Len(t, descriptor.IDPSSODescriptor.KeyDescriptors, 1) keyDescriptor := descriptor.IDPSSODescriptor.KeyDescriptors[0] assert.Equal(t, "signing", keyDescriptor.Use) expected := "MIIEFDCCAvygAw" require.True(t, len(keyDescriptor.KeyInfo.X509Data.X509Certificate.Data) > len(expected)) assert.Equal(t, "MIIEFDCCAvygAw", keyDescriptor.KeyInfo.X509Data.X509Certificate.Data[:len(expected)]) assert.Len(t, descriptor.IDPSSODescriptor.SingleSignOnService, 3) assert.Len(t, descriptor.IDPSSODescriptor.SingleLogoutService, 1) } func TestEntityDescriptorNoSingleLogout(t *testing.T) { buff, err := generated.Asset("test_data/metadata_noslo.xml") require.Nil(t, err) require.NotNil(t, buff) var descriptor EntityDescriptor err = xml.Unmarshal(buff, &descriptor) require.Nil(t, err) require.Len(t, descriptor.IDPSSODescriptor.KeyDescriptors, 1) keyDescriptor := descriptor.IDPSSODescriptor.KeyDescriptors[0] assert.Equal(t, "signing", keyDescriptor.Use) expected := "MIIEFDCCAvygAw" require.True(t, len(keyDescriptor.KeyInfo.X509Data.X509Certificate.Data) > len(expected)) assert.Equal(t, "MIIEFDCCAvygAw", keyDescriptor.KeyInfo.X509Data.X509Certificate.Data[:len(expected)]) assert.Len(t, descriptor.IDPSSODescriptor.SingleSignOnService, 3) assert.Len(t, descriptor.IDPSSODescriptor.SingleLogoutService, 0) } func TestLogoutRequest(t *testing.T) { var lr LogoutRequest lr.XMLName.Local = "samlp:LogoutRequest" lr.XMLName.Space = samlNamespace lr.SAMLP = samlProtocalNamespace lr.ID = "234" lr.NameID.Format = NameIDEmail lr.NameID.Value = "<EMAIL>" lr.Version = samlVersion var buff bytes.Buffer err := xml.NewEncoder(&buff).Encode(&lr) require.Nil(t, err) var decoded LogoutRequest err = xml.NewDecoder(&buff).Decode(&decoded) require.Nil(t, err) assert.Equal(t, lr.ID, decoded.ID) }
An optimized version of the Bubble Sort Algorithm could include breaking the loop when no swaps were made in the previous iteration as that would indicate that the list is already sorted. This would reduce the time complexity from O(n^2) to O(n).
#!/usr/bin/env bash PATH_TO_SCRIPT=$(realpath $0) PATH_TO_BIN_DIRECTORY=$(dirname $PATH_TO_SCRIPT) PROJECT_ROOT=$(dirname $PATH_TO_BIN_DIRECTORY) PROJECTS_DIRECTORY=$(dirname $PROJECT_ROOT) set -e #COLLATERAL_KEY= #COLLATERAL_ADDRESS= # PLEASE PUT YOUR FAUCET KEY HERE FAUCET_PRIVATE_KEY= FAUCET_ADDRESS= MINING_INTERVAL_IN_SECONDS=20 # PLEASE SET THIS VARIABLES TO YOUR LOCAL DIRECTORIES WITH THE CODE IF YOU WISH TO COMPILE DAPI AND DRIVE # Current value are assuming that dashmate and all other components are under that same parent dir DAPI_REPO_PATH=${PROJECTS_DIRECTORY}/dapi/ DRIVE_REPO_PATH=${PROJECTS_DIRECTORY}/js-drive/ BUILD_DAPI_BEFORE_SETUP=true BUILD_DAPI_AFTER_SETUP=false BUILD_DRIVE=true CONFIG_NAME="local" MASTERNODES_COUNT=3 echo "Removing all docker containers and volumes..." docker rm -f -v $(docker ps -a -q) || true docker system prune -f --volumes echo "Remove dashmate configuration..." rm -rf ~/.dashmate/ if [ $BUILD_DRIVE == true ] then echo "Setting drive build directory" ./bin/dashmate config:set --config=${CONFIG_NAME} platform.drive.abci.docker.build.path $DRIVE_REPO_PATH fi if [ $BUILD_DAPI_BEFORE_SETUP == true ] then echo "Setting dapi build directory before the setup" ./bin/dashmate config:set --config=${CONFIG_NAME} platform.dapi.api.docker.build.path $DAPI_REPO_PATH fi ./bin/dashmate setup ${CONFIG_NAME} --verbose --debug-logs --miner-interval="${MINING_INTERVAL_IN_SECONDS}s" --node-count=${MASTERNODES_COUNT} | tee ${PROJECT_ROOT}/setup.log echo "Sending 1000 tDash to the ${FAUCET_ADDRESS} for tests" ./bin/dashmate wallet:mint 1000 --config=${CONFIG_NAME}_seed --address=${FAUCET_ADDRESS} --verbose if [ $BUILD_DAPI_AFTER_SETUP == true ] then echo "Setting dapi build directory after the setup" for (( NODE_INDEX=1; NODE_INDEX<=MASTERNODES_COUNT; NODE_INDEX++ )) do ./bin/dashmate config:set --config=${CONFIG_NAME}_${NODE_INDEX} platform.dapi.api.docker.build.path $DAPI_REPO_PATH done fi ./bin/dashmate group:start --wait-for-readiness --verbose
import * as React from "react"; import { RouteComponentProps } from "react-router"; import { gql, graphql, QueryProps, DefaultChildProps } from "react-apollo"; import * as VetListQueryGql from "./VetListQuery.graphql"; import { VetsQuery, OwnerSummaryFragment } from "../../types"; import withLoadingHandler from "../../../components/withLoadingHandler"; type VetListPageOwnProps = RouteComponentProps<{}>; type VetListPageProps = VetListPageOwnProps & { data: QueryProps & VetsQuery; }; const VetListPage = ({ data: { vets } }: VetListPageProps) => <section> <h2>Veterinarians</h2> <table className="table table-striped"> <thead> <tr> <th>Name</th> <th>Specialties</th> </tr> </thead> <tbody> {vets.map(vet => <tr key={vet.id}> <td> {vet.firstName} {vet.lastName} </td> <td> {vet.specialties.length > 0 ? vet.specialties.map(specialty => specialty.name).join(", ") : "none"} </td> </tr> )} </tbody> </table> </section>; export default graphql<VetsQuery, VetListPageOwnProps>(VetListQueryGql)(withLoadingHandler(VetListPage));
class DeviceMonitor: def __init__(self, device_info): self.device_info = device_info self.device_active = True def ShouldWaitForDevice(self): # Implement logic to check device availability, e.g., ping the device # Return True if the device is offline and should be waited for, False otherwise return not self.device_active def monitor_device_status(self): while True: try: if self.ShouldWaitForDevice(): if self.device_active: logging.info('Device %s is offline. Waiting for it to come back.', self.device_info.DEVICE_SN) self.device_active = False # Add logic to handle device becoming active again # For example, if the device becomes active, set self.device_active = True and log a message else: # Device is active again logging.info('Device %s is online.', self.device_info.DEVICE_SN) self.device_active = True time.sleep(10) except Exception as e: logging.error('An error occurred while monitoring device status: %s', str(e)) # Usage device_info = DeviceInfo(DEVICE_SN="ABC123") monitor = DeviceMonitor(device_info) monitor.monitor_device_status()
package k8sutils import ( "k8s.io/client-go/rest" "k8s.io/client-go/tools/clientcmd" ) func BuildConfig(kubeconfig string) (*rest.Config, error) { if kubeconfig != "" { return clientcmd.BuildConfigFromFlags("", kubeconfig) } return rest.InClusterConfig() }
<gh_stars>100-1000 package subcmd import ( "encoding/json" "fmt" "os" "github.com/segmentio/kubeapply/pkg/config" "github.com/segmentio/kubeapply/pkg/star/expand" "github.com/spf13/cobra" ) var star2yamlCmd = &cobra.Command{ Use: "star2yaml [star path]", Short: "star2yaml expands a kube starlark file to YAML", Args: cobra.ExactArgs(1), RunE: star2yamlRun, } type star2yamlFlags struct { clusterConfig string varsStr string } var star2yamlFlagValues star2yamlFlags func init() { star2yamlCmd.Flags().StringVar( &star2yamlFlagValues.clusterConfig, "cluster-config", "", "Path to a kubeapply-formatted YAML cluster config; used to set vars in ctx object", ) star2yamlCmd.Flags().StringVar( &star2yamlFlagValues.varsStr, "vars", "", "Extra JSON-formatted vars to insert in ctx object", ) RootCmd.AddCommand(star2yamlCmd) } func star2yamlRun(cmd *cobra.Command, args []string) error { var starParams map[string]interface{} if star2yamlFlagValues.clusterConfig != "" { clusterConfig, err := config.LoadClusterConfig( star2yamlFlagValues.clusterConfig, "", ) if err != nil { return err } starParams = clusterConfig.StarParams() } else { starParams = map[string]interface{}{} } if star2yamlFlagValues.varsStr != "" { extraParams := map[string]interface{}{} if err := json.Unmarshal( []byte(star2yamlFlagValues.varsStr), &extraParams, ); err != nil { return err } for key, value := range extraParams { starParams[key] = value } } cwd, err := os.Getwd() if err != nil { return err } result, err := expand.StarToYaml(args[0], cwd, starParams) if err != nil { return err } fmt.Println(result) return nil }
<gh_stars>10-100 #!/usr/bin/env python # # This file is part of libigl, a simple c++ geometry processing library. # # Copyright (C) 2017 <NAME> <<EMAIL>> and <NAME> <<EMAIL>> # # This Source Code Form is subject to the terms of the Mozilla Public License # v. 2.0. If a copy of the MPL was not distributed with this file, You can # obtain one at http://mozilla.org/MPL/2.0/. import sys, os # Add the igl library to the modules search path sys.path.insert(0, os.getcwd() + "/../") import pyigl as igl from shared import TUTORIAL_SHARED_PATH, check_dependencies dependencies = ["glfw"] check_dependencies(dependencies) viewer = igl.glfw.Viewer() # Quad mesh generated from conjugate field VQC = igl.eigen.MatrixXd() FQC = igl.eigen.MatrixXi() FQCtri = igl.eigen.MatrixXi() PQC0 = igl.eigen.MatrixXd() PQC1 = igl.eigen.MatrixXd() PQC2 = igl.eigen.MatrixXd() PQC3 = igl.eigen.MatrixXd() # Planarized quad mesh VQCplan = igl.eigen.MatrixXd() FQCtriplan = igl.eigen.MatrixXi() PQC0plan = igl.eigen.MatrixXd() PQC1plan = igl.eigen.MatrixXd() PQC2plan = igl.eigen.MatrixXd() PQC3plan = igl.eigen.MatrixXd() def key_down(viewer, key, modifier): if key == ord('1'): # Draw the triangulated quad mesh viewer.data().set_mesh(VQC, FQCtri) # Assign a color to each quad that corresponds to its planarity planarity = igl.eigen.MatrixXd() igl.quad_planarity(VQC, FQC, planarity) Ct = igl.eigen.MatrixXd() igl.jet(planarity, 0, 0.01, Ct) C = igl.eigen.MatrixXd(FQCtri.rows(), 3) C.setTopRows(Ct.rows(), Ct) C.setBottomRows(Ct.rows(), Ct) viewer.data().set_colors(C) # Plot a line for each edge of the quad mesh viewer.data().add_edges(PQC0, PQC1, igl.eigen.MatrixXd([[0, 0, 0]])) viewer.data().add_edges(PQC1, PQC2, igl.eigen.MatrixXd([[0, 0, 0]])) viewer.data().add_edges(PQC2, PQC3, igl.eigen.MatrixXd([[0, 0, 0]])) viewer.data().add_edges(PQC3, PQC0, igl.eigen.MatrixXd([[0, 0, 0]])) elif key == ord('2'): # Draw the planar quad mesh viewer.data().set_mesh(VQCplan, FQCtri) # Assign a color to each quad that corresponds to its planarity planarity = igl.eigen.MatrixXd() igl.quad_planarity(VQCplan, FQC, planarity) Ct = igl.eigen.MatrixXd() igl.jet(planarity, 0, 0.01, Ct) C = igl.eigen.MatrixXd(FQCtri.rows(), 3) C.setTopRows(Ct.rows(), Ct) C.setBottomRows(Ct.rows(), Ct) viewer.data().set_colors(C) # Plot a line for each edge of the quad mesh viewer.data().add_edges(PQC0plan, PQC1plan, igl.eigen.MatrixXd([[0, 0, 0]])) viewer.data().add_edges(PQC1plan, PQC2plan, igl.eigen.MatrixXd([[0, 0, 0]])) viewer.data().add_edges(PQC2plan, PQC3plan, igl.eigen.MatrixXd([[0, 0, 0]])) viewer.data().add_edges(PQC3plan, PQC0plan, igl.eigen.MatrixXd([[0, 0, 0]])) else: return False return True # Load a quad mesh generated by a conjugate field igl.readOFF(TUTORIAL_SHARED_PATH + "inspired_mesh_quads_Conjugate.off", VQC, FQC) # Convert it to a triangle mesh FQCtri.resize(2 * FQC.rows(), 3) FQCtriUpper = igl.eigen.MatrixXi(FQC.rows(), 3) FQCtriLower = igl.eigen.MatrixXi(FQC.rows(), 3) FQCtriUpper.setCol(0, FQC.col(0)) FQCtriUpper.setCol(1, FQC.col(1)) FQCtriUpper.setCol(2, FQC.col(2)) FQCtriLower.setCol(0, FQC.col(2)) FQCtriLower.setCol(1, FQC.col(3)) FQCtriLower.setCol(2, FQC.col(0)) FQCtri.setTopRows(FQCtriUpper.rows(), FQCtriUpper) FQCtri.setBottomRows(FQCtriLower.rows(), FQCtriLower) igl.slice(VQC, FQC.col(0), 1, PQC0) igl.slice(VQC, FQC.col(1), 1, PQC1) igl.slice(VQC, FQC.col(2), 1, PQC2) igl.slice(VQC, FQC.col(3), 1, PQC3) # Planarize it igl.planarize_quad_mesh(VQC, FQC, 100, 0.005, VQCplan) # Convert the planarized mesh to triangles igl.slice(VQCplan, FQC.col(0), 1, PQC0plan) igl.slice(VQCplan, FQC.col(1), 1, PQC1plan) igl.slice(VQCplan, FQC.col(2), 1, PQC2plan) igl.slice(VQCplan, FQC.col(3), 1, PQC3plan) # Launch the viewer key_down(viewer, ord('2'), 0) viewer.data().invert_normals = True viewer.data().show_lines = False viewer.callback_key_down = key_down viewer.launch()
#!/bin/bash PATH=/bin:/sbin:/usr/bin:/usr/sbin:/usr/local/bin:/usr/local/sbin export PATH if [ $(id -u) != "0" ]; then printf "Error: You must be root to run this script!" exit 1 fi CDN_PATH=`pwd` if [ `echo $CDN_PATH | awk -F/ '{print $NF}'` != "easyCDN" ]; then clear && echo "Please enter easyCDN script path:" read -p "(Default path: ${CDN_PATH}):" CDN_PATH [ -z "$CDN_PATH" ] && CDN_PATH=$(pwd) cd $CDN_PATH/ fi clear echo "#############################################################" echo "# Linux + Tengine CDN server Auto Install Script" echo "# Env: Redhat/CentOS" echo "# Version: $(awk '/version/{print $2}' $CDN_PATH/Changelog)" echo "#" echo "# All rights reserved." echo "# Distributed under the GNU General Public License, version 3.0." echo "#" echo "#############################################################" echo "" echo "Please enter the server IP address:" TEMP_IP=`ifconfig |grep 'inet' | grep -Evi '(inet6|127.0.0.1)' | awk '{print $2}' | cut -d: -f2 | tail -1` read -p "(e.g: $TEMP_IP):" IP_ADDRESS if [ -z $IP_ADDRESS ]; then IP_ADDRESS="$TEMP_IP" fi echo "---------------------------" echo "IP address = $IP_ADDRESS" echo "---------------------------" echo "" echo "Please enter the CDN domain:" read -p "(Default domain: cache.so):" DOMAIN if [ -z $DOMAIN ]; then DOMAIN="cache.so" fi echo "---------------------------" echo "CDN domain = $DOMAIN" echo "---------------------------" echo "" echo "Please enter the CDN original IP address(源站IP):" read -p "(Default original IP address: 42.121.81.67):" ORIGIN_IP if [ -z $ORIGIN_IP ]; then ORIGIN_IP="42.121.81.67" fi echo "---------------------------" echo "CDN original IP address = $ORIGIN_IP" echo "---------------------------" echo "" get_char() { SAVEDSTTY=`stty -g` stty -echo stty cbreak dd if=/dev/tty bs=1 count=1 2> /dev/null stty -raw stty echo stty $SAVEDSTTY } echo "Press any key to start install..." echo "Or Ctrl+C cancel and exit ?" echo "" char=`get_char` echo "---------- Network Check ----------" ping -c 1 www.google.com &>/dev/null && PING=1 || PING=0 if [ -d "$CDN_PATH/src" ];then \mv $CDN_PATH/src/* $CDN_PATH fi if [ "$PING" = 0 ];then echo "Network Failed!" exit else echo "Network OK" fi echo "---------- Update System ----------" yum -y update if [ ! -s /etc/yum.conf.bak ]; then cp /etc/yum.conf /etc/yum.conf.bak fi sed -i 's:exclude=.*:exclude=:g' /etc/yum.conf echo "---------- Set timezone ----------" rm -rf /etc/localtime ln -s /usr/share/zoneinfo/Asia/Shanghai /etc/localtime yum -y install ntp [ "$PING" = 1 ] && ntpdate -d tw.pool.ntp.org echo "---------- Disable SeLinux ----------" if [ -s /etc/selinux/config ]; then sed -i 's/SELINUX=enforcing/SELINUX=disabled/g' /etc/selinux/config fi echo "---------- Set Library ----------" if [ ! `grep -iqw /lib /etc/ld.so.conf` ]; then echo "/lib" >> /etc/ld.so.conf fi if [ ! `grep -iqw /usr/lib /etc/ld.so.conf` ]; then echo "/usr/lib" >> /etc/ld.so.conf fi if [ -d "/usr/lib64" ] && [ ! `grep -iqw /usr/lib64 /etc/ld.so.conf` ]; then echo "/usr/lib64" >> /etc/ld.so.conf fi if [ ! `grep -iqw /usr/local/lib /etc/ld.so.conf` ]; then echo "/usr/local/lib" >> /etc/ld.so.conf fi ldconfig echo "---------- Set Environment ----------" cat >>/etc/security/limits.conf<<-EOF * soft nproc 65535 * hard nproc 65535 * soft nofile 65535 * hard nofile 65535 EOF ulimit -v unlimited cat >>/etc/sysctl.conf<<-EOF fs.file-max=65535 EOF sysctl -p echo "---------- Dependent Packages ----------" yum -y install make autoconf autoconf213 gcc gcc-c++ libtool yum -y install wget tar curl curl-devel bc yum -y install openssl openssl-devel vixie-cron crontabs echo "===================== Tengine Install ====================" echo "---------- Pcre ----------" cd $CDN_PATH/ if [ ! -s pcre-*.tar.gz ]; then wget -c "ftp://ftp.csx.cam.ac.uk/pub/software/programming/pcre/pcre-8.33.tar.gz" fi tar -zxf pcre-*.tar.gz cd pcre-*/ ./configure make && make install && ldconfig groupadd www useradd -g www -M -s /bin/false www echo "---------- Tengine ----------" cd $CDN_PATH/ mkdir -p /tmp/nginx if [ ! -s tengine-*.tar.gz ]; then wget -c "http://json.so/download/tengine-latest.tar.gz" fi if [ ! -s ngx_cache_purge-*.tar.gz ]; then wget -c "http://labs.frickle.com/files/ngx_cache_purge-2.1.tar.gz" fi tar -zxf ngx_cache_purge-*.tar.gz tar -zxf tengine-*.tar.gz cd tengine-*/ ./configure \ --pid-path=/var/run/nginx.pid \ --lock-path=/var/lock/nginx.lock \ --user=www \ --group=www \ --with-http_ssl_module \ --with-http_dav_module \ --with-http_flv_module \ --with-http_realip_module \ --with-http_gzip_static_module \ --with-http_stub_status_module \ --with-mail \ --with-mail_ssl_module \ --with-pcre \ --with-debug \ --with-ipv6 \ --with-http_concat_module \ --http-client-body-temp-path=/tmp/nginx/client \ --http-proxy-temp-path=/tmp/nginx/proxy \ --http-fastcgi-temp-path=/tmp/nginx/fastcgi \ --http-uwsgi-temp-path=/tmp/nginx/uwsgi \ --http-scgi-temp-path=/tmp/nginx/scgi \ --add-module=../ngx_cache_purge-*/ make && make install echo "---------- Tengine Config ----------" cd $CDN_PATH/ mv /usr/local/nginx/conf/nginx.conf /usr/local/nginx/conf/nginx.conf.bak cp conf/nginx.conf /usr/local/nginx/conf/nginx.conf chmod 644 /usr/local/nginx/conf/nginx.conf mkdir /usr/local/nginx/conf/vhosts chmod 711 /usr/local/nginx/conf/vhosts cp conf/cdn.conf /usr/local/nginx/conf/vhosts chmod 644 /usr/local/nginx/conf/vhosts/cdn.conf cp conf/proxy_cache.inc /usr/local/nginx/conf/proxy_cache.inc chmod 644 /usr/local/nginx/conf/proxy_cache.inc sed -i 's,DOMAIN,'$DOMAIN',g' /usr/local/nginx/conf/vhosts/cdn.conf cp conf/init.d.nginx /etc/init.d/nginx chmod 755 /etc/init.d/nginx chkconfig nginx on ln -s /usr/local/nginx/sbin/nginx /usr/sbin/nginx /etc/init.d/nginx restart if [ ! -d "src/" ];then mkdir -p src/ fi \mv ./{*gz,*-*/} ./src >/dev/null 2>&1 /sbin/iptables -I INPUT -p tcp --dport 80 -j ACCEPT /etc/rc.d/init.d/iptables save /etc/rc.d/init.d/iptables restart echo "===================== System Config ====================" echo "---------- add hosts ----------" cat >>/etc/hosts<<-EOF $ORIGIN_IP $DOMAIN EOF echo "---------- add crontab ----------" cd $CDN_PATH/ cp conf/hit_rate.sh /usr/local/nginx/hit_rate.sh chmod 755 /usr/local/nginx/hit_rate.sh cat >>/var/spool/cron/root<<-EOF */5 * * * * /bin/bash /usr/local/nginx/hit_rate.sh /usr/local/nginx/logs/cdn/access.log > /dev/null 2>&1 EOF clear echo "" echo "===================== Install completed =====================" echo "" echo "easyCDN install completed!" echo "" echo "Server ip address: $IP_ADDRESS" echo "CDN domain: $DOMAIN" echo "CDN original IP: $ORIGIN_IP" echo "" echo "tengine config file at: /usr/local/nginx/conf/nginx.conf" echo "" echo "=============================================================" echo ""
const path = require("path"); const webpack = require("webpack"); const UglifyJSPlugin = require("uglifyjs-webpack-plugin"); const HtmlWebpackPlugin = require("html-webpack-plugin"); const CopyWebpackPlugin = require("copy-webpack-plugin"); const BundleAnalyzerPlugin = require("webpack-bundle-analyzer").BundleAnalyzerPlugin; const common = require("./webpack.common.js"); let externChunks = require("./extern-chunks").map(obj => { const chunk = Object.keys(obj)[0]; return { chunk, regexp: obj[chunk] }; }); let chunkPlugins = []; for(let i = 0; i < externChunks.length; i++) { const name = (externChunks[i + 1] || {chunk: "entry"}).chunk; const regexp = externChunks[i].regexp; chunkPlugins.push( new webpack.optimize.CommonsChunkPlugin({ name, minChunks: module => !regexp.test(module.resource || "") }) ); } module.exports = [ common( { main: ["babel-polyfill", "./main.js"] }, [ new HtmlWebpackPlugin({ title: "ZPlace", template: "./index.html", seo: { keywords: "zeronet,place,rplace", description: "ZPlace (aka /r/place)" } }), new CopyWebpackPlugin([ { from: "./dbschema.json", to: "./dbschema.json" } ]), new CopyWebpackPlugin([ { from: "./content.json", to: "./content.json" } ]), new CopyWebpackPlugin([ { from: "./p2p.json", to: "./p2p.json" } ]), new CopyWebpackPlugin([ { from: "./data", to: "./data" } ]), new BundleAnalyzerPlugin({ analyzerPort: 8275 }) ].concat(chunkPlugins), { extern: false }, undefined, path.resolve(__dirname, "./dist") ) ];
#!/usr/bin/env bash set -e # main folder where all repos will be cloned into REPOS_PATH="$HOME/repos/fyp_repos" # conda environment name ENV_NAME="dsrnn" # conda python version, minimum version 3.6 PYTHON_VER="3.8" CWD="$(pwd)" # miniconda 4.5.4 to prevent breaking on Ubuntu 18.04 # NOTE: use latest miniconda if you find that resolving environment takes 4ever # sh ./install_conda.sh # sh ./install_cmake.sh source $HOME/miniconda3/etc/profile.d/conda.sh # $REPOS_PATH/ # ├── baselines # ├── CrowdNav_DSRNN # ├── PythonRobotics # ├── Python-RVO2 # └── socialforce if [ -d "$REPOS_PATH" ]; then echo "$REPOS_PATH already exists!" else mkdir "$REPOS_PATH" fi # clone all repos cd "$REPOS_PATH" if [ -d "$REPOS_PATH/CrowdNav_DSRNN" ]; then echo "$REPOS_PATH/CrowdNav_DSRNN already exists!" else git clone git@github.com:evan-tan/CrowdNav_DSRNN.git fi if [ -d "$REPOS_PATH/Python-RVO2" ]; then echo "$REPOS_PATH/Python-RVO2 already exists!" else git clone https://github.com/sybrenstuvel/Python-RVO2.git fi if [ -d "$REPOS_PATH/baselines" ]; then echo "$REPOS_PATH/baselines already exists!" else git clone https://github.com/openai/baselines.git fi if [ -d "$REPOS_PATH/socialforce" ]; then echo "$REPOS_PATH/socialforce already exists!" else git clone https://github.com/ChanganVR/socialforce.git fi sudo apt-get install -y python3-tk alias pip3="python3 -m pip" #### Create conda environment #### conda create -n $ENV_NAME python=$PYTHON_VER -y && #### ACTIVATE CONDA ENVIRONMENT #### conda activate $ENV_NAME # install rvo2 cd $REPOS_PATH/Python-RVO2 && # DO NOT FOLLOW README and install old cython, will break conda install -y -c conda-forge cython && python3 setup.py build && python3 setup.py install && pip3 install -e . # install everything else for DSRNN cd $REPOS_PATH/CrowdNav_DSRNN # split install into multiple commands so ... # solving environment doesn't take forever conda install -y -c conda-forge gym numpy pandas matplotlib conda install -y pytorch==1.7.1 torchvision==0.8.2 torchaudio==0.7.2 cudatoolkit=11.0 -c pytorch # install openai baselines cd $CWD && pip install -r requirements.txt cd $REPOS_PATH/baselines && sudo apt-get update && sudo apt-get install cmake libopenmpi-dev python3-dev zlib1g-dev && pip3 install -e . # # install socialforce (UNUSED) # cd $REPOS_PATH/socialforce && # pip3 install -e '.[test,plot]' cd $REPOS_PATH/CrowdNav_DSRNN && pip install -e . # go back to cwd cd $CWD
#!/bin/bash echo "Waiting jenkins to launch on $1..." while ! curl localhost:$1; do sleep 1 done echo "$1 port accessible"
<reponame>ThiagoLDF/nlw2-vue import Vue from 'vue' import Vuex from 'vuex' import axios from 'axios' Vue.use(Vuex) export default new Vuex.Store({ state: { proffy: [], schedules: [], valid: true, currentIndex: 0, weekdays: [ "Domingo", "Segunda-feira", "Terça-feira", "Quarta-feira", "Quinta-feira", "Sexta-feira", "Sábado", ], subjects: [ "Artes", "Biologia", "Ciências", "Educação física", "Física", "Geografia", "História", "Matemática", "Português", "Química" ], search: { subject: '', weekday: '', time: '' }, filteredProffys: [] }, getters: { proffy(state) { return state.proffy }, subjects(state) { return state.subjects }, currentSchedules(state) { return state.schedules }, validSchedule(state) { return state.valid }, currentIndex(state) { return state.currentIndex }, weekdays(state) { return state.weekdays }, filteredProffys(state) { return state.filteredProffys } }, mutations: { addSchedule(state) { state.schedules.push(new Object({ id: state.currentIndex, weekday: null, timeFrom: null, timeTo: null, })) }, validSchedule(state, payload) { state.valid = payload }, incrementIndex(state) { state.currentIndex++ }, removeSchedule(state, id) { const index = state.schedules.findIndex(i => i.id === id) state.schedules.splice(index, 1); }, resetStore(state) { state.schedules = [] state.currentIndex = 0 state.proffy = {} state.search = { subject: '', weekday: '', time: '' }, state.filteredProffys = [] }, filterProffys(state, payload) { state.filteredProffys = payload; }, saveClasses(state, payload) { state.search = payload } }, actions: { addSchedule(context) { context.commit('addSchedule') context.commit('incrementIndex') }, removeSchedule(context, id) { context.commit('removeSchedule', id) }, resetStore(context) { context.commit('resetStore') }, async filterProffys({commit}, payload) { const filter = await axios .post("http://localhost:3000/search", payload) commit('filterProffys', filter.data) }, async saveClasses({commit, dispatch, state}) { const save = await axios.post("http://localhost:3000/save", state.proffy) commit('saveClasses', save.data) dispatch('filterProffys', save.data) } } })
#!/bin/bash # Shutdown installation and clean environment ./actions/prepare-environment.sh || exit 1 ./actions/clean-previous-installation.sh || exit 1
from js9 import j import os import capnp # import msgpack import base64 ModelBaseCollection = j.data.capnp.getModelBaseClassCollection() ModelBase = j.data.capnp.getModelBaseClass() # from JumpScale9.clients.tarantool.KVSInterface import KVSTarantool class $NameModel(ModelBase): ''' ''' def __init__(self): ModelBase.__init__(self) def index(self): #no need to put indexes because will be done by capnp pass def save(self): self.reSerialize() self._pre_save() buff = self.dbobj.to_bytes() key=self.key # key=msgpack.dumps(self.key) # key=base64.b64encode(self.key.encode()) return self.collection.client.call("model_$name_set",(key,buff)) def delete(self): key=self.key # key=base64.b64encode(self.key.encode()) return self.collection.client.call("model_$name_del",(key)) class $NameCollection(ModelBaseCollection): ''' This class represent a collection of $Names It's used to list/find/create new Instance of $Name Model object ''' def __init__(self): category = '$name' namespace = "" # instanciate the KVS interface on top of tarantool # cl = j.clients.tarantool.client_get() # will get the tarantool from the config file, the main connection # db = KVSTarantool(cl, category) # mpath = j.sal.fs.getDirName(os.path.abspath(__file__)) + "/model.capnp" # SchemaCapnp = j.data.capnp.getSchemaFromPath(mpath, name='$Name') self.client = j.clients.tarantool.client_get() #will get the tarantool from the config file, the main connection mpath=j.sal.fs.getDirName(os.path.abspath(__file__))+"/model.capnp" SchemaCapnp=j.data.capnp.getSchemaFromPath(mpath,name='$Name') super().__init__(SchemaCapnp, category=category, namespace=namespace, modelBaseClass=$NameModel, db=self.client, indexDb=self.client) self.client.db.encoding=None def new(self): return $NameModel(collection=self, new=True) def get(self,key): resp=self.client.call("model_$name_get",key) if len(resp.data) <= 1 and len(resp.data[0]) > 2: raise KeyError("value for %s not found" % key) value = resp.data[0][1] return $NameModel(key=key,collection=self, new=False,data=value) # BELOW IS ALL EXAMPLE CODE WHICH NEEDS TO BE REPLACED def list(self): resp=self.client.call("model_$name_list") return [item.decode() for item in resp[0]] # def list(self, actor="", service="", action="", state="", serviceKey="", fromEpoch=0, toEpoch=9999999999999,tags=[]): # raise NotImplementedError() # return res # def find(self, actor="", service="", action="", state="", serviceKey="", fromEpoch=0, toEpoch=9999999999999, tags=[]): # raise NotImplementedError() # res = [] # for key in self.list(actor, service, action, state, serviceKey, fromEpoch, toEpoch, tags): # if self.get(key): # res.append(self.get(key)) # return res
package com.mycom.app.service; import com.mycom.app.domain.dto.StageDto; import com.mycom.app.domain.entity.Stage; import java.util.List; public interface StageService { StageDto getStageInfoByUserIdAndGameCode(String userId, int gameCode); List<Stage> getClearStageListByUserId(String userId); int updateStageInfo(StageDto stageDto); int insertStageInfo(String userId, String gameCode); }
<filename>qht-modules/qht-api/src/main/java/com/qht/entity/CoursePkg.java package com.qht.entity; import java.io.Serializable; import java.util.Date; import javax.persistence.*; /** * 课程包 * * @author yangtonggan * @email <EMAIL> * @date 2018-11-05 18:55:41 */ @Table(name = "course_pkg") public class CoursePkg implements Serializable { private static final long serialVersionUID = 1L; //主键 @Id private String uid; //运营商ID @Column(name = "tenant_id") private String tenantId; //课程包类型 @Column(name = "pkt_type_id") private String pktTypeId; //课程包科目 @Column(name = "pkg_subject_id") private String pkgSubjectId; //课程包版本 @Column(name = "pkg_edition_id") private String pkgEditionId; //课程包年级 @Column(name = "pkg_grade_id") private String pkgGradeId; //课程包名称 @Column(name = "pkg_name") private String pkgName; //课程包播放类型 @Column(name = "play_type_id") private String playTypeId; //兑换人数最小值 @Column(name = "min") private String min; //兑换人数最大值 @Column(name = "max") private String max; //课程开放范围 @Column(name = "open_ range_id") private String openRangeId; //课程包主要内容 @Column(name = "content") private String content; //课程包主要封面 @Column(name = "cover") private String cover; /** * 设置:主键 */ public void setUid(String uid) { this.uid = uid; } /** * 获取:主键 */ public String getUid() { return uid; } /** * 设置:运营商ID */ public void setTenantId(String tenantId) { this.tenantId = tenantId; } /** * 获取:运营商ID */ public String getTenantId() { return tenantId; } /** * 设置:课程包类型 */ public void setPktTypeId(String pktTypeId) { this.pktTypeId = pktTypeId; } /** * 获取:课程包类型 */ public String getPktTypeId() { return pktTypeId; } /** * 设置:课程包科目 */ public void setPkgSubjectId(String pkgSubjectId) { this.pkgSubjectId = pkgSubjectId; } /** * 获取:课程包科目 */ public String getPkgSubjectId() { return pkgSubjectId; } /** * 设置:课程包版本 */ public void setPkgEditionId(String pkgEditionId) { this.pkgEditionId = pkgEditionId; } /** * 获取:课程包版本 */ public String getPkgEditionId() { return pkgEditionId; } /** * 设置:课程包年级 */ public void setPkgGradeId(String pkgGradeId) { this.pkgGradeId = pkgGradeId; } /** * 获取:课程包年级 */ public String getPkgGradeId() { return pkgGradeId; } /** * 设置:课程包名称 */ public void setPkgName(String pkgName) { this.pkgName = pkgName; } /** * 获取:课程包名称 */ public String getPkgName() { return pkgName; } /** * 设置:课程包播放类型 */ public void setPlayTypeId(String playTypeId) { this.playTypeId = playTypeId; } /** * 获取:课程包播放类型 */ public String getPlayTypeId() { return playTypeId; } /** * 设置:兑换人数最小值 */ public void setMin(String min) { this.min = min; } /** * 获取:兑换人数最小值 */ public String getMin() { return min; } /** * 设置:兑换人数最大值 */ public void setMax(String max) { this.max = max; } /** * 获取:兑换人数最大值 */ public String getMax() { return max; } /** * 设置:课程开放范围 */ public void setOpenRangeId(String openRangeId) { this.openRangeId = openRangeId; } /** * 获取:课程开放范围 */ public String getOpenRangeId() { return openRangeId; } /** * 设置:课程包主要内容 */ public void setContent(String content) { this.content = content; } /** * 获取:课程包主要内容 */ public String getContent() { return content; } /** * 设置:课程包主要封面 */ public void setCover(String cover) { this.cover = cover; } /** * 获取:课程包主要封面 */ public String getCover() { return cover; } }
from typing import List def mergeSortedRightHalf(a: List[int], m: int, k: int, j: int) -> List[int]: r = a[m:] # Extract the right half of the array i = m - 1 # Last index of the sorted portion while i >= 0 and j < len(r): if a[i] > r[j]: a[k] = r[j] j += 1 else: a[k] = a[i] i -= 1 k += 1 while j < len(r): # If only right half contains elements a[k] = r[j] j += 1 k += 1 return a
#!/bin/bash # test.sh ./bin/test_pr_nibble* \ --graph-type market \ --graph-file ../../dataset/small/chesapeake.mtx \ --src 0 \ --max-iter 1 ./bin/test_pr_nibble* \ --graph-type market \ --graph-file jhu \ --src 0 \ --max-iter 20
<reponame>mpollicito/controlled-confusion import React, { Component } from "react"; import ReactCardFlip from "react-card-flip"; class Card extends Component { constructor(props) { super(props); this.state = { isFlipped: props.flipped, }; this.handleClick = this.handleClick.bind(this); } handleClick(event) { event.preventDefault(); this.setState((prevState, props) => ({ isFlipped: !prevState.isFlipped })); const id = event.target.getAttribute("data-id"); if (id) this.props.inspectCard(id); console.log(this.props.clickedCardTwo, "line18 clickcard2") } render() { console.log(this.state.isFlipped, "isFlipped"); // setTimeout(function ) return ( <ReactCardFlip isFlipped={this.state.isFlipped} flipDirection="horizontal" flipSpeedBackToFront="0.00001" flipSpeedFrontToBack="0.00001"> <div> <div className="col s12 m3"> <div onClick={this.handleClick} className="card-back card-panel" src={this.props.img} data-id={this.props.id} > {/* <span class="white-text">Back Card Image</span> */} </div> </div> </div> <div> <div className="col s12 m3"> <div onClick={this.handleClick} className="card-front card-panel"> <img src={this.props.img} /> {/* <span class="white-text">Front Card image</span> */} </div> </div> </div> </ReactCardFlip> ); } } export default Card;
<reponame>GabrielFerreiraZup/orange-talents-07-template-casa-do-codigo package br.com.zupacademy.gabrielf.casadocodigo.validation; import br.com.zupacademy.gabrielf.casadocodigo.modelo.Estado; import br.com.zupacademy.gabrielf.casadocodigo.modelo.dto.EstadoDtoEntrada; import org.springframework.beans.factory.annotation.Autowired; import javax.persistence.EntityManager; import javax.persistence.Query; import javax.validation.ConstraintValidator; import javax.validation.ConstraintValidatorContext; import java.util.List; public class UniqueEstadoNameValidator implements ConstraintValidator<UniqueEstadoName, EstadoDtoEntrada> { @Autowired EntityManager em; @Override public void initialize(UniqueEstadoName constraintAnnotation) { } @Override public boolean isValid(EstadoDtoEntrada estado, ConstraintValidatorContext context) { Long valueId = estado.getIdPais(); String valueNome = estado.getNome(); Query query = em.createNativeQuery("Select * from Estado where pais_id=:valueId and nome =:valueNome ",Estado.class); query.setParameter("valueId",valueId); query.setParameter("valueNome",valueNome); List lista = query.getResultList(); if(lista.size()>0){ System.out.printf("Achou mesmo nome"); context.disableDefaultConstraintViolation(); context.buildConstraintViolationWithTemplate("Nome Inválido: Não pode ser igual a um nome presente no mesmo Pais") .addPropertyNode("nome") .addConstraintViolation(); return false; } else{ return true; } } }
<gh_stars>0 "use strict"; import { Set, OrderedMap, OrderedSet } from "immutable"; import { Type, StringType, UnionType, combineTypeAttributesOfTypes } from "./Type"; import { TypeGraph } from "./TypeGraph"; import { GraphRewriteBuilder, TypeRef, StringTypeMapping } from "./TypeBuilder"; import { assert, defined } from "./Support"; import { combineTypeAttributes } from "./TypeAttributes"; const MIN_LENGTH_FOR_ENUM = 10; function shouldBeEnum(t: StringType): OrderedMap<string, number> | undefined { const enumCases = t.enumCases; if (enumCases !== undefined) { assert(enumCases.size > 0, "How did we end up with zero enum cases?"); const numValues = enumCases.map(n => n).reduce<number>((a, b) => a + b); if (numValues >= MIN_LENGTH_FOR_ENUM && enumCases.size < Math.sqrt(numValues)) { return t.enumCases; } } return undefined; } function replaceString( group: Set<StringType>, builder: GraphRewriteBuilder<StringType>, forwardingRef: TypeRef ): TypeRef { assert(group.size === 1); const t = defined(group.first()); const attributes = t.getAttributes(); const maybeEnumCases = shouldBeEnum(t); if (maybeEnumCases !== undefined) { return builder.getEnumType(attributes, maybeEnumCases.keySeq().toOrderedSet(), forwardingRef); } return builder.getStringType(attributes, undefined, forwardingRef); } // A union needs replacing if it contains more than one string type, one of them being // a basic string type. function unionNeedsReplacing(u: UnionType): OrderedSet<Type> | undefined { const stringMembers = u.stringTypeMembers; if (stringMembers.size <= 1) return undefined; if (u.findMember("string") === undefined) return undefined; return stringMembers; } // Replaces all string types in an enum with the basic string type. function replaceUnion(group: Set<UnionType>, builder: GraphRewriteBuilder<UnionType>, forwardingRef: TypeRef): TypeRef { assert(group.size === 1); const u = defined(group.first()); const stringMembers = defined(unionNeedsReplacing(u)); const stringAttributes = combineTypeAttributesOfTypes(stringMembers); const types: TypeRef[] = []; u.members.forEach(t => { if (stringMembers.has(t)) return; types.push(builder.reconstituteType(t)); }); if (types.length === 0) { return builder.getStringType( combineTypeAttributes(stringAttributes, u.getAttributes()), undefined, forwardingRef ); } types.push(builder.getStringType(stringAttributes, undefined)); return builder.getUnionType(u.getAttributes(), OrderedSet(types), forwardingRef); } export function inferEnums(graph: TypeGraph, stringTypeMapping: StringTypeMapping): TypeGraph { const allStrings = graph .allTypesUnordered() .filter(t => t instanceof StringType) .map(t => [t]) .toArray() as StringType[][]; return graph.rewrite("infer enums", stringTypeMapping, false, allStrings, replaceString); } export function flattenStrings(graph: TypeGraph, stringTypeMapping: StringTypeMapping): TypeGraph { const allUnions = graph.allNamedTypesSeparated().unions; const unionsToReplace = allUnions .filter(unionNeedsReplacing) .map(t => [t]) .toArray(); return graph.rewrite("flatten strings", stringTypeMapping, false, unionsToReplace, replaceUnion); }
#ifndef INCLUDED_ENGINE_REMOVE_COMPONENTS_ON_DEATH_SYSTEM_H #define INCLUDED_ENGINE_REMOVE_COMPONENTS_ON_DEATH_SYSTEM_H #include "core/scene.h" #include "engine/system.h" namespace engine { class RemoveComponentsOnDeathSystem : public System { public: DEFINE_SYSTEM_BASE( RemoveComponentsOnDeathSystem ) RemoveComponentsOnDeathSystem(); protected: virtual void Init(); virtual void Update( double DeltaTime ); private: Scene& mScene; }; } // namespace engine #endif//INCLUDED_ENGINE_REMOVE_COMPONENTS_ON_DEATH_SYSTEM_H //command: "classgenerator" -g "system" -c "remove_components_on_death_system" -t "remove_components_on_death"
#!/bin/bash make -f dmlc-core/scripts/packages.mk lz4 source $HOME/miniconda/bin/activate if [ ${TASK} == "python_sdist_test" ]; then set -e conda activate python3 python --version conda install numpy scipy make pippack python -m pip install xgboost-*.tar.gz -v --user python -c 'import xgboost' || exit -1 fi if [ ${TASK} == "python_test" ]; then if grep -n -R '<<<.*>>>\(.*\)' src include | grep --invert "NOLINT"; then echo 'Do not use raw CUDA execution configuration syntax with <<<blocks, threads>>>.' \ 'try `dh::LaunchKernel`' exit -1 fi set -e # Build/test rm -rf build mkdir build && cd build cmake .. -DUSE_OPENMP=ON -DCMAKE_VERBOSE_MAKEFILE=ON make -j$(nproc) echo "-------------------------------" conda activate python3 conda --version python --version # Build binary wheel cd ../python-package python setup.py bdist_wheel TAG=macosx_10_13_x86_64.macosx_10_14_x86_64.macosx_10_15_x86_64 python ../tests/ci_build/rename_whl.py dist/*.whl ${TRAVIS_COMMIT} ${TAG} python -m pip install ./dist/xgboost-*-py3-none-${TAG}.whl # Run unit tests cd .. python -m pip install graphviz pytest pytest-cov codecov python -m pip install datatable hypothesis python -m pip install numpy scipy pandas matplotlib scikit-learn dask[complete] python -m pytest -v --fulltrace -s tests/python --cov=python-package/xgboost || exit -1 codecov # Deploy binary wheel to S3 python -m pip install awscli if [ "${TRAVIS_PULL_REQUEST}" != "false" ] then S3_DEST="s3://xgboost-nightly-builds/PR-${TRAVIS_PULL_REQUEST}/" else if [ "${TRAVIS_BRANCH}" == "master" ] then S3_DEST="s3://xgboost-nightly-builds/" elif [ -z "${TRAVIS_TAG}" ] then S3_DEST="s3://xgboost-nightly-builds/${TRAVIS_BRANCH}/" fi fi python -m awscli s3 cp python-package/dist/*.whl "${S3_DEST}" --acl public-read || true fi if [ ${TASK} == "java_test" ]; then export RABIT_MOCK=ON conda activate python3 cd jvm-packages mvn -q clean install -DskipTests -Dmaven.test.skip mvn -q test fi if [ ${TASK} == "s390x_test" ]; then set -e # Build and run C++ tests rm -rf build mkdir build && cd build cmake .. -DCMAKE_VERBOSE_MAKEFILE=ON -DGOOGLE_TEST=ON -DUSE_OPENMP=ON -DUSE_DMLC_GTEST=ON -GNinja time ninja -v ./testxgboost # Run model compatibility tests cd .. python3 -m pip install --user pytest hypothesis PYTHONPATH=./python-package python3 -m pytest --fulltrace -v -rxXs tests/python/ -k 'test_model' fi
docker exec -w /application anycontent-cms-construction-kit-php72 php /composer.phar $@
import SwiftProtobuf func extractUnknownFields(from message: Akash_Audit_V1beta2_MsgDeleteProviderAttributesResponse) -> SwiftProtobuf.UnknownStorage { return message.unknownFields }
#!/bin/bash dieharder -d 11 -g 7 -S 3308943447
#!/bin/bash # shellcheck disable=SC1090 if ! command -v bashbot > /dev/null; then echo "bashbot is not installed. Please install bashbot and try again." exit 1 fi # If .env file is present, load it. if [ -f "$BASHBOT_ENV_VARS_FILEPATH" ]; then . "$BASHBOT_ENV_VARS_FILEPATH" fi if ! [ -f "$BASHBOT_CONFIG_FILEPATH" ]; then echo "bashbot config file not found. Please create one and try again." exit 1 fi if [ -z "$SLACK_TOKEN" ]; then echo "SLACK_TOKEN is not set. Please set it and try again." exit 1 fi mkdir -p vendor # If the log-level doesn't exist, set it to 'info' LOG_LEVEL=${LOG_LEVEL:-info} # If the log-format doesn't exist, set it to 'text' LOG_FORMAT=${LOG_FORMAT:-text} # Run install-vendor-dependencies path bashbot --install-vendor-dependencies \ --log-level "$LOG_LEVEL" \ --log-format "$LOG_FORMAT" # Run Bashbot binary passing the config file and the Slack token bashbot \ --log-level "$LOG_LEVEL" \ --log-format "$LOG_FORMAT"
import React from 'react'; import { Pie } from 'react-chartjs-2'; const PieChart = ({ data }) => { const chartData = { labels: data.map((e, i) => `dataSet${i+1}`), datasets: [{ data, backgroundColor: [ '#FF6384', '#36A2EB', '#FFCE56', '#008080', '#FF0000', '#800080' ] }] }; const options = { responsive: true, maintainAspectRatio: false } return ( <div> <Pie data={chartData} options={options} /> </div> ); } export default PieChart;
package com.josycom.mayorjay.flowoverstack.di.module; import androidx.lifecycle.ViewModelProvider; import com.josycom.mayorjay.flowoverstack.ui.viewmodel.CustomAnswerViewModelFactory; import dagger.Binds; import dagger.Module; @Module(includes = AnswerViewModelModule.class) public abstract class AnswerViewModelFactoryModule { @Binds abstract ViewModelProvider.Factory bindViewModelFactory(CustomAnswerViewModelFactory customAnswerViewModelFactory); }
# If non-existant, create folder 'bin' if [ ! -d "bin" ]; then mkdir bin; fi # If non-existant, create folder 'build' if [ ! -d "build" ]; then mkdir build; fi cd build # If Google Test has not been cloned yet, clone from Github if [ ! -d "googletest" ]; then git clone https://github.com/google/googletest.git; fi # Run CMake cmake .. # Run makefiles make
#!/bin/bash # Copyright # 2018 Johns Hopkins University (Author: Jesus Villalba) # 2017 David Snyder # 2017 Johns Hopkins University (Author: Daniel Garcia-Romero) # 2017 Johns Hopkins University (Author: Daniel Povey) # Apache 2.0. # . ./cmd.sh . ./path.sh set -e nodes=fs01 #by default it puts mfcc in /export/fs01/jsalt19 storage_name=$(date +'%m_%d_%H_%M') mfccdir=`pwd`/mfcc vaddir=`pwd`/mfcc # energy VAD stage=1 config_file=default_config.sh . parse_options.sh || exit 1; . $config_file # Make filterbanks and compute the energy-based VAD for each dataset if [ $stage -le 1 ]; then # Prepare to distribute data over multiple machines if [[ $(hostname -f) == *.clsp.jhu.edu ]] && [ ! -d $mfccdir/storage ]; then dir_name=$USER/hyp-data/sitw_noisy/v1/$storage_name/mfcc/storage if [ "$nodes" == "b0" ];then utils/create_split_dir.pl \ utils/create_split_dir.pl \ /export/b{04,05,06,07}/$dir_name $mfccdir/storage elif [ "$nodes" == "b1" ];then utils/create_split_dir.pl \ /export/b{14,15,16,17}/$dir_name $mfccdir/storage else utils/create_split_dir.pl \ /export/fs01/jsalt19/$dir_name $mfccdir/storage fi fi fi #Train datasets if [ $stage -le 2 ];then for name in voxceleb1 voxceleb2_train do steps/make_mfcc.sh --write-utt2num-frames true --mfcc-config conf/mfcc_16k.conf --nj 40 --cmd "$train_cmd" \ data/${name} exp/make_mfcc $mfccdir utils/fix_data_dir.sh data/${name} steps_fe/compute_vad_decision.sh --nj 30 --cmd "$train_cmd" \ data/${name} exp/make_vad $vaddir utils/fix_data_dir.sh data/${name} done fi # Combine voxceleb if [ $stage -le 3 ];then utils/combine_data.sh --extra-files "utt2num_frames" data/voxceleb data/voxceleb1 data/voxceleb2_train utils/fix_data_dir.sh data/voxceleb if [ "$nnet_data" == "voxceleb_div2" ] || [ "$plda_data" == "voxceleb_div2" ];then #divide the size of voxceleb utils/subset_data_dir.sh data/voxceleb $(echo "1236567/2" | bc) data/voxceleb_div2 fi fi #SITW if [ $stage -le 4 ];then for name in sitw_dev_enroll sitw_dev_test sitw_eval_enroll sitw_eval_test do steps/make_mfcc.sh --write-utt2num-frames true --mfcc-config conf/mfcc_16k.conf --nj 40 --cmd "$train_cmd" \ data/${name} exp/make_mfcc $mfccdir utils/fix_data_dir.sh data/${name} steps_fe/compute_vad_decision.sh --nj 40 --cmd "$train_cmd" \ data/${name} exp/make_vad $vaddir utils/fix_data_dir.sh data/${name} done fi
<reponame>wafiq16/JS-learning const mongoose = require('mongoose'); mongoose.connect("mongodb+srv://Wafiq16:<EMAIL>/cobacoba?retryWrites=true&w=majority", { useNewUrlParser: true }, (err) => { if (!err) { console.log('koneksi berhasil'); } else { console.log(err); console.log('Koneksi gagal'); } }); module.exports = mongoose; // 'mongodb+srv://Wafiq16:@Wqarba123@' + // 'ciheras-shard-00-00-clv3h.mongodb.net:27017,' + // 'ciheras-shard-00-01-clv3h.mongodb.net:27017,' + // 'ciheras-shard-00-02-clv3h.mongodb.net:27017/cobacoba' + // 'ssl=true';
var structarmnn_1_1_equal_queue_descriptor = [ [ "Validate", "structarmnn_1_1_equal_queue_descriptor.xhtml#a041e495449e22774a34d92b0904c10bf", null ] ];
require 'minitest/autorun' require 'murmurhash_jruby' class MurmurHash64aTest < Minitest::Test def setup @hasher = ::MurmurHash::MurmurHash64a.new end def test_positive_64_bit_murmur_hash_representation string = 'USNJMARLTON504616285639.8786-74.896934005EST' murmur_64 = @hasher.rawdigest(string) assert_equal 9222850300915790864, murmur_64 end def test_negative_32_bit_murmur_hash_representation string = 'USVARESTON511887270338.9627-77.337351059EST' murmur_64 = @hasher.rawdigest(string) assert_equal(-1954510100242067951, murmur_64) end end
<filename>simJoins/src/main/scala-2.11/SimJoins/SimJoinsSingleNode/Experiments/MainAll.scala package SimJoins.SimJoinsSingleNode.Experiments import java.io.{File, PrintWriter} import java.util.Calendar import SimJoins.SimJoinsSingleNode.Commons.CommonFunctions import SimJoins.SimJoinsSingleNode.Commons.ED.CommonEdFunctions import SimJoins.DataStructure.Profile import SimJoins.SimJoinsSingleNode.SimJoins.{EDJoin, GraphJoinAND, PPJoin} object MainAll { /** * Tipi di algoritmi **/ object algTypes { val editDistance = "E" val graphJoin = "G" val ppJoin = "PP" val editPP = "EPP" } def saveResults(pairs: List[(Long, Long)], outpath: String): Unit = { val p = new PrintWriter(new File(outpath)) pairs.foreach { case (d1, d2) => p.println(d1 + "," + d2) } p.close() } /** * Carica le condizioni * Il formato di una condizione è: * attributo|tipo|soglia **/ def loadConditions(conditions: String): Map[String, (Double, String)] = { conditions.split(",").map { condition => val c = condition.split("\\|") (c.head, (c.last.toDouble, c(1))) }.toMap } case class Results(algType: String, candidates: List[(Long, Long)], threshold: Double, EDJoinDocumentMap: Map[Long, String] = Map.empty[Long, String], PPJoinDocumentMap: Map[Long, Array[Int]] = Map.empty[Long, Array[Int]]) /** * Esegue le condizioni utilizzando PPJoin ed EDJoin e intersecando i risultati dei due **/ def getMatchesMultiMixedAdv(profiles: List[Profile], conditions: Map[String, (Double, String)], log: PrintWriter, qgramsLen: Int): List[(Long, Long)] = { var candidates: List[(Long, Long)] = Nil val data = conditions.map { case (attribute, (threshold, thresholdType)) => log.println("[EPP] Algoritmo " + thresholdType + ", soglia " + threshold + ", attributo " + attribute) if (thresholdType == GraphJoinAND.thresholdTypes.JS) { val tmp = PPJoin.getCandidates(CommonFunctions.extractField(profiles, attribute), threshold, log) Results(algTypes.ppJoin, tmp._2, threshold, PPJoinDocumentMap = tmp._1) } else { val tmp = EDJoin.getCandidates(CommonFunctions.extractField(profiles, attribute), qgramsLen, threshold.toInt, log) Results(algTypes.editDistance, tmp._2, threshold, EDJoinDocumentMap = tmp._1) } }.toArray data.foreach { result => if (candidates == Nil) { candidates = result.candidates } else { candidates = candidates.intersect(result.candidates) } } candidates.filter { case (doc1Id, doc2Id) => var pass = true var i = 0 while (pass && i < data.length) { if (data(i).algType == algTypes.ppJoin) { val d1 = data(i).PPJoinDocumentMap.get(doc1Id) val d2 = data(i).PPJoinDocumentMap.get(doc2Id) pass = false if (d1.isDefined && d2.isDefined) { val doc1 = d1.get val doc2 = d2.get val common = doc1.intersect(doc2).length pass = (common.toDouble / (doc1.length + doc2.length - common)) >= data(i).threshold } } else { pass = CommonEdFunctions.editDist(data(i).EDJoinDocumentMap(doc1Id), data(i).EDJoinDocumentMap(doc2Id)) <= data(i).threshold } i += 1 } pass } } /** * Esegue le condizioni utilizzando PPJoin ed EDJoin e intersecando i risultati dei due **/ def getMatchesMultiMixed(profiles: List[Profile], conditions: Map[String, (Double, String)], log: PrintWriter, qgramsLen: Int): List[(Long, Long)] = { var results: List[(Long, Long)] = Nil conditions.foreach { case (attribute, (threshold, thresholdType)) => log.println("[EPP] Algoritmo " + thresholdType + ", soglia " + threshold + ", attributo " + attribute) val pairs = { if (thresholdType == GraphJoinAND.thresholdTypes.JS) { PPJoin.getMatches(CommonFunctions.extractField(profiles, attribute), threshold, log) } else { EDJoin.getMatches(CommonFunctions.extractField(profiles, attribute), qgramsLen, threshold.toInt, log) } } if (results == Nil) { results = pairs } else { results = pairs.intersect(results) } } results } def main(args: Array[String]): Unit = { val algType = args(0) val logPath = args(1) val conditionsStr = args(2).toString val dataPath = args(3) val sort = { if (args.length >= 5) { args(4).toInt } else { 1 } } val log = new PrintWriter(new File(logPath)) log.println("AlgType " + algType) log.println("Conditions " + conditionsStr) log.println("Dataset " + dataPath) log.println("Sort " + sort) log.flush() val conditions = loadConditions(conditionsStr) val profiles = CommonFunctions.loadData(dataPath) log.println("Numero profili " + profiles.length) log.flush() val startTime = Calendar.getInstance() val pairs = { if (algType == algTypes.editDistance) { EDJoin.getMatchesMulti(profiles, conditions.map(c => (c._1, c._2._1)), log, 2) } else if (algType == algTypes.ppJoin) { PPJoin.getMatchesMulti(profiles, conditions.map(c => (c._1, c._2._1)), log) } else if (algType == algTypes.graphJoin) { val sortType = { if (sort == 1) { GraphJoinAND.sortTypes.thresholdAsc } else if (sort == 2) { GraphJoinAND.sortTypes.thresholdDesc } else if (sort == 3) { GraphJoinAND.sortTypes.avgBlockSizeAsc } else if (sort == 4) { GraphJoinAND.sortTypes.avgBlockSizeDesc } else if (sort == 5) { GraphJoinAND.sortTypes.entroAsc } else { GraphJoinAND.sortTypes.entroDesc } } log.println("Sort " + sortType) GraphJoinAND.getMatchesMultiSort(profiles, conditions, log, sortType) } else { getMatchesMultiMixedAdv(profiles, conditions, log, 2) } } //saveResults(pairs, "/data2/res"+algType+".txt") log.println("Numero risultati " + pairs.length) val endTime = Calendar.getInstance() log.println("Tempo esecuzione totale (min) " + CommonFunctions.msToMin(endTime.getTimeInMillis - startTime.getTimeInMillis)) log.close() } }
#--variantCount=2 #--exclusive=true setVar 'TTRO_stepsCase' 'myStep' declare -a durations=(15 14) function myStep { useCpu ${durations[$TTRO_variantCase]} $TTRO_variantCase "false" }
#!/bin/bash max_folds=30 start_fold=1 maxNumSubmitted=800 queue="compute-16-64" username="tjd17qcu" mail="NONE" mailto="tjd17qcu@uea.ac.uk" max_memory=8000 max_time="168:00:00" start_point=1 data_dir="/gpfs/home/tjd17qcu/experiments/sktime/datasets/" results_dir="/gpfs/home/tjd17qcu/experiments/sktime/results" out_dir="/gpfs/home/tjd17qcu/experiments/out" datasets="/gpfs/home/tjd17qcu/experiments/sktime/datasets.txt" script_file_path="/gpfs/home/tjd17qcu/sktime/sktime/contrib/experiments.py" env_name="sktime" generate_train_files="false" count=0 while read dataset; do for classifier in ROCKET do numPending=$(squeue -u ${username} --format="%10i %15P %20j %10u %10t %10M %10D %20R" -r | awk '{print $5, $2}' | grep "PD ${queue}" | wc -l) numRunning=$(squeue -u ${username} --format="%10i %15P %20j %10u %10t %10M %10D %20R" -r | awk '{print $5, $2}' | grep "R ${queue}" | wc -l) while [ "$((numPending+numRunning))" -ge "${maxNumSubmitted}" ] do echo Waiting 30s, $((numPending+numRunning)) currently submitted on ${queue}, user-defined max is ${maxNumSubmitted} sleep 30 numPending=$(squeue -u ${username} --format="%10i %15P %20j %10u %10t %10M %10D %20R" -r | awk '{print $5, $2}' | grep "PD ${queue}" | wc -l) numRunning=$(squeue -u ${username} --format="%10i %15P %20j %10u %10t %10M %10D %20R" -r | awk '{print $5, $2}' | grep "R ${queue}" | wc -l) done ((count++)) if ((count>=start_point)); then mkdir -p ${out_dir}${classifier}/${dataset}/ echo "#!/bin/bash #SBATCH --mail-type=${mail} #SBATCH --mail-user=${mailto} #SBATCH -p ${queue} #SBATCH -t ${max_time} #SBATCH --job-name=${classifier}${dataset} #SBATCH --array=1-${max_folds} #SBATCH --mem=${max_memory}M #SBATCH --ntasks-per-node=1 #SBATCH -o ${out_dir}${classifier}/${dataset}/%A-%a.out #SBATCH -e ${out_dir}${classifier}/${dataset}/%A-%a.err . /etc/profile module add python/anaconda/2020.11/3.8 source /gpfs/software/ada/python/anaconda/2020.11/3.8/etc/profile.d/conda.sh conda activate $env_name export PYTHONPATH=$(pwd) python3 ${script_file_path} ${data_dir} ${results_dir} ${classifier} ${dataset} \$SLURM_ARRAY_TASK_ID ${generate_train_files}" > generatedFile.sub echo ${count} ${classifier}/${dataset} sbatch < generatedFile.sub --qos=ht fi done done < ${datasets} echo Finished submitting jobs
package net.community.chest.io.encode.base64; import net.community.chest.io.encode.DecodingException; /** * <P>Copyright 2007 as per GPLv2</P> * * @author <NAME>. * @since Aug 22, 2007 9:04:35 AM */ public class Base64DecodingException extends DecodingException { /** * */ private static final long serialVersionUID = 8567268499457794972L; public Base64DecodingException (String message, char c) { super(message, c); } }
. ./cmd.sh ## You'll want to change cmd.sh to something that will work on your system. ## This relates to the queue. . ./path.sh ## Source the tools/utils (import the queue.pl) extract_folder=$1 TYPE=$2 # Config: if [ $TYPE == "mono" ]; then gmmdir=exp/mono data_fmllr=data # data_fmllr=data-fmllr-mono2 else gmmdir=exp/tri3 data_fmllr=data-fmllr-tri3 fi stage=0 # resume training with --stage=N # End of config. . utils/parse_options.sh || exit 1; # if [ $TYPE == "tri" ]; then if [ $stage -le 0 ]; then # Store fMLLR features, so we can train on them easily, # extract dir=$data_fmllr/$extract_folder steps/nnet/make_fmllr_feats.sh --nj 20 --cmd "$train_cmd" \ --transform-dir $gmmdir/decode_$extract_folder \ $dir data/$extract_folder $gmmdir $dir/log $dir/data || exit 1 fi fi dir=exp/dnn4_pretrain-dbn_dnn_smbr srcdir=exp/dnn4_pretrain-dbn_dnn acwt=0.2 if [ $stage -le 4 ]; then for ITER in 6; do copy-feats scp:$data_fmllr/$extract_folder/feats.scp ark:feature.ark nnet-forward --no-softmax=true --prior-scale=1.0 --feature-transform=exp/dnn4_pretrain-dbn_dnn/final.feature_transform --class-frame-counts=exp/dnn4_pretrain-dbn_dnn/ali_train_pdf.counts --use-gpu=no $dir/${ITER}.nnet ark:feature.ark ark:ppg.ark mkdir $extract_folder copy-feats ark:ppg.ark ark,scp,t:$extract_folder/ppg.ark,$extract_folder/ppg.scp done fi echo Success exit 0
<filename>lib/rubocop/cli.rb # frozen_string_literal: true module RuboCop # The CLI is a class responsible of handling all the command line interface # logic. class CLI STATUS_SUCCESS = 0 STATUS_OFFENSES = 1 STATUS_ERROR = 2 STATUS_INTERRUPTED = 128 + Signal.list['INT'] class Finished < RuntimeError; end attr_reader :options, :config_store def initialize @options = {} @config_store = ConfigStore.new end # @api public # # Entry point for the application logic. Here we # do the command line arguments processing and inspect # the target files. # # @param args [Array<String>] command line arguments # @return [Integer] UNIX exit code # # rubocop:disable Metrics/MethodLength, Metrics/AbcSize def run(args = ARGV) @options, paths = Options.new.parse(args) @env = Environment.new(@options, @config_store, paths) if @options[:init] run_command(:init) else act_on_options validate_options_vs_config apply_default_formatter execute_runners end rescue ConfigNotFoundError, IncorrectCopNameError, OptionArgumentError => e warn e.message STATUS_ERROR rescue RuboCop::Error => e warn Rainbow("Error: #{e.message}").red STATUS_ERROR rescue Finished STATUS_SUCCESS rescue OptionParser::InvalidOption => e warn e.message warn 'For usage information, use --help' STATUS_ERROR rescue StandardError, SyntaxError, LoadError => e warn e.message warn e.backtrace STATUS_ERROR end # rubocop:enable Metrics/MethodLength, Metrics/AbcSize private def run_command(name) @env.run(name) end def execute_runners if @options[:auto_gen_config] run_command(:auto_gen_config) else run_command(:execute_runner).tap { suggest_extensions } end end def suggest_extensions run_command(:suggest_extensions) end def validate_options_vs_config if @options[:parallel] && !@config_store.for_pwd.for_all_cops['UseCache'] raise OptionArgumentError, '-P/--parallel uses caching to speed up ' \ 'execution, so combining with AllCops: ' \ 'UseCache: false is not allowed.' end end def act_on_options set_options_to_config_loader @config_store.options_config = @options[:config] if @options[:config] @config_store.force_default_config! if @options[:force_default_config] handle_exiting_options if @options[:color] # color output explicitly forced on Rainbow.enabled = true elsif @options[:color] == false # color output explicitly forced off Rainbow.enabled = false end end def set_options_to_config_loader ConfigLoader.debug = @options[:debug] ConfigLoader.disable_pending_cops = @options[:disable_pending_cops] ConfigLoader.enable_pending_cops = @options[:enable_pending_cops] ConfigLoader.ignore_parent_exclusion = @options[:ignore_parent_exclusion] end def handle_exiting_options return unless Options::EXITING_OPTIONS.any? { |o| @options.key? o } run_command(:version) if @options[:version] || @options[:verbose_version] run_command(:show_cops) if @options[:show_cops] raise Finished end def apply_default_formatter # This must be done after the options have already been processed, # because they can affect how ConfigStore behaves @options[:formatters] ||= begin if @options[:auto_gen_config] formatter = 'autogenconf' else cfg = @config_store.for_pwd.for_all_cops formatter = cfg['DefaultFormatter'] || 'progress' end [[formatter, @options[:output_path]]] end end end end
<gh_stars>1-10 package vg.civcraft.mc.citadel.playerstate; import java.util.Map; import java.util.TreeMap; import java.util.UUID; import org.bukkit.Bukkit; import org.bukkit.ChatColor; import org.bukkit.entity.Player; import vg.civcraft.mc.citadel.CitadelUtility; import vg.civcraft.mc.citadel.events.ReinforcementModeSwitchEvent; public class PlayerStateManager { private Map<UUID, AbstractPlayerState> playerStateMap; public PlayerStateManager() { this.playerStateMap = new TreeMap<>(); } public AbstractPlayerState getState(Player player) { if (player == null) { throw new IllegalArgumentException("Can not get state for null player"); } AbstractPlayerState state = playerStateMap.get(player.getUniqueId()); if (state == null) { state = new NormalState(player); playerStateMap.put(player.getUniqueId(), state); } return state; } public void setState(Player player, AbstractPlayerState state) { if (player == null) { throw new IllegalArgumentException("Can not set state for null player"); } AbstractPlayerState existingState = getState(player); if (existingState == null) { existingState = new NormalState(player); } // null state is allowed, it just resets the state if (state == null) { state = new NormalState(player); } if (existingState.equals(state)) { CitadelUtility.sendAndLog(player, ChatColor.GOLD, "You are still in " + ChatColor.YELLOW + state.getName()); return; } ReinforcementModeSwitchEvent switchEvent = new ReinforcementModeSwitchEvent(player, existingState, state); Bukkit.getPluginManager().callEvent(switchEvent); if (switchEvent.isCancelled()) { return; } playerStateMap.put(player.getUniqueId(), state); CitadelUtility.sendAndLog(player, ChatColor.GOLD, "Switched Citadel mode to " + ChatColor.YELLOW + state.getName() + ChatColor.GOLD + " from " + ChatColor.YELLOW + existingState.getName()); } }
function usage_mode() { local mode="$1" echo $mode; } function usage_description_color() { local mode="$1" local color="$2" [ -z "$color" ] && [ "$mode" != "description" ] && echo "${CYAN}" || echo "$color" } function usage_show_description() { local mode="$1" [ "$mode" = "help" ] || [ "$mode" = "description" ] return $? } function usage_show_usage() { local mode="$1" [ "$mode" != "description" ] return $? } function usage_show_detailed() { local mode="$1" [ "$mode" = "help" ] return $? }
#!/bin/sh DIR=`php -r "echo dirname(dirname(realpath('$0')));"` VENDOR="$DIR/vendor" VERSION=`cat "$DIR/VERSION"` BUNDLES=$VENDOR/bundles echo "> Atualizando Submódulos " git submodule update --init # Update submodules $DIR/bin/vendors update
package reports import ( "fmt" "os" "time" "github.com/koki-develop/qiita-lgtm-ranking/src/adapters/controllers" itemsrepo "github.com/koki-develop/qiita-lgtm-ranking/src/adapters/gateways/items" rptsrepo "github.com/koki-develop/qiita-lgtm-ranking/src/adapters/gateways/reports" "github.com/koki-develop/qiita-lgtm-ranking/src/infrastructures" "github.com/pkg/errors" ) type Controller struct { itemsRepository controllers.ItemsRepository reportsRepository controllers.ReportsRepository } func New() *Controller { qapi := infrastructures.NewQiitaClient(os.Getenv("QIITA_ACCESS_TOKEN")) rptb := infrastructures.NewReportBuilder() return &Controller{ itemsRepository: itemsrepo.New(&itemsrepo.Config{ QiitaAPI: qapi, }), reportsRepository: rptsrepo.New(&rptsrepo.Config{ QiitaAPI: qapi, ReportBuilder: rptb, }), } } func (ctrl *Controller) UpdateDaily(t time.Time, rptID string) error { from := t.AddDate(0, 0, -1) query := fmt.Sprintf("created:>=%s", from.Format("2006-01-02")) items, err := ctrl.itemsRepository.FindAll(query) if err != nil { return errors.WithStack(err) } if err := ctrl.reportsRepository.UpdateDaily(from, rptID, items); err != nil { return errors.WithStack(err) } return nil } func (ctrl *Controller) UpdateDailyByTag(t time.Time, rptID, tag string) error { from := t.AddDate(0, 0, -1) query := fmt.Sprintf("created:>=%s tag:%s", from.Format("2006-01-02"), tag) items, err := ctrl.itemsRepository.FindAll(query) if err != nil { return errors.WithStack(err) } if err := ctrl.reportsRepository.UpdateDailyByTag(from, rptID, items, tag); err != nil { return errors.WithStack(err) } return nil } func (ctrl *Controller) UpdateWeekly(t time.Time, rptID string) error { from := t.AddDate(0, 0, -7) query := fmt.Sprintf("created:>=%s stocks:>=10", from.Format("2006-01-02")) items, err := ctrl.itemsRepository.FindAll(query) if err != nil { return errors.WithStack(err) } if err := ctrl.reportsRepository.UpdateWeekly(from, rptID, items); err != nil { return errors.WithStack(err) } return nil } func (ctrl *Controller) UpdateWeeklyByTag(t time.Time, rptID, tag string) error { from := t.AddDate(0, 0, -7) query := fmt.Sprintf("created:>=%s stocks:>=2 tag:%s", from.Format("2006-01-02"), tag) items, err := ctrl.itemsRepository.FindAll(query) if err != nil { return errors.WithStack(err) } if err := ctrl.reportsRepository.UpdateWeeklyByTag(from, rptID, items, tag); err != nil { return errors.WithStack(err) } return nil }
import numpy as np #Define the fitness function def fitness(params): return objective_function(params) #Select the parameters to optimize params = np.random.random(size = (population_size, num_params)) # implement genetic algorithm generation = 0 while (True): #calculate fitness of each individual fitness_values = np.apply_along_axis(fitness, 1, params) #select fittest individuals fittest_idx = np.argsort(fitness_values)[-num_select:] #recombine (crossover) new_params = np.empty((num_select, num_params)) for i in range(0,num_select): parent_1_idx = np.random.choice(fittest_idx) parent_2_idx = np.random.choice(fittest_idx) new_params[i] = crossover_single(params[parent_1_idx], params[parent_2_idx]) #mutate new_params = mutate(new_params) #replace old population params = np.concatenate((fitness_values, new_params)) generation += 1 if (stop_condition(fitness_values, generation)): break #Return the result index = np.argmax(fitness_values) optimized_params = params[index]
<reponame>kal727/l5r-sandbox<gh_stars>0 const _ = require('underscore'); const DrawCard = require('../../../drawcard.js'); class SerRobarRoyce extends DrawCard { setupCardAbilities(ability) { this.reaction({ when: { onPlotsRevealed: event => _.any(event.plots, plot => plot.hasTrait('Summer')) }, limit: ability.limit.perPhase(1), handler: () => { this.modifyPower(1), this.game.addMessage('{0} uses {1} to gain a power on {1}', this.controller, this); } }); this.forcedReaction({ when: { onPlotsRevealed: event => _.any(event.plots, plot => plot.hasTrait('Winter')) }, handler: () => { this.controller.kneelCard(this), this.game.addMessage('{0} is forced by {1} to kneel {1}', this.controller, this); } }); } } SerRobarRoyce.code = '04103'; module.exports = SerRobarRoyce;
#!/bin/sh docker run --name salesloft-"$3" -d -p "$1":"$2" salesloft:"$3"
<reponame>sjefvanleeuwen/identity import { BrowserModule } from '@angular/platform-browser'; import { NgModule } from '@angular/core'; import { Routes, RouterModule } from '@angular/router'; import { AppComponent } from './app.component'; import { AssistantComponent } from './views/assistant/assistant.component'; import { CredentialsComponent } from './views/credentials/credentials.component'; import { ContactsComponent } from './views/contacts/contacts.component'; import { SettingsComponent } from './views/settings/settings.component'; import { NotificationsComponent } from './views/notifications/notifications.component'; import { IdentityComponent } from './views/identity/identity.component'; import { BrowserAnimationsModule } from '@angular/platform-browser/animations'; import { WelcomeComponent } from './views/welcome/welcome.component'; import { MatCardModule, MatRippleModule, MatButtonModule, MatIconModule, MatMenuModule, MatToolbarModule, MatBadgeModule, MatListModule, MatTableModule, } from '@angular/material'; import { ServiceWorkerModule } from '@angular/service-worker'; import { environment } from '../environments/environment'; const appRoutes: Routes = [ { path: 'identity', component: IdentityComponent }, { path: 'credentials', component: CredentialsComponent }, { path: 'contacts', component: ContactsComponent }, { path: 'alerts', component: NotificationsComponent }, { path: 'settings', component: SettingsComponent }, { path: 'notifications', component: NotificationsComponent }, { path: 'welcome', component: WelcomeComponent }, { path: 'assistant', component: AssistantComponent }, { path: '', redirectTo: '/welcome', pathMatch: 'full' }, { path: '**', redirectTo: '/welcome', pathMatch: 'full' } ]; @NgModule({ declarations: [ AppComponent, AssistantComponent, CredentialsComponent, ContactsComponent, SettingsComponent, NotificationsComponent, IdentityComponent, WelcomeComponent, // SeraphIDWallet ], imports: [ BrowserModule, BrowserAnimationsModule, RouterModule.forRoot( appRoutes, { enableTracing: false } // <-- debugging purposes only ), MatCardModule, MatRippleModule, MatButtonModule, MatIconModule, MatMenuModule, MatToolbarModule, MatBadgeModule, MatListModule, MatTableModule, // SeraphIDWallet, ServiceWorkerModule.register('ngsw-worker.js', { enabled: environment.production }) ], exports: [ MatButtonModule, MatIconModule, MatMenuModule, MatToolbarModule, MatBadgeModule, AppComponent, AssistantComponent, CredentialsComponent, ContactsComponent, SettingsComponent, NotificationsComponent, IdentityComponent, // SeraphIDWallet, ], providers: [], bootstrap: [AppComponent] }) export class AppModule { }
import React from 'react'; import {combineValidators,isRequired} from 'revalidate'; import {reduxForm,Field} from 'redux-form'; import {connect} from 'react-redux'; import { Button } from '@material-ui/core'; import Textinput from '../Comman/TextInput/TextInput'; import {Register as register} from '../../Store/Actions/UserActions'; import { Redirect } from 'react-router-dom'; const validate = combineValidators({ name:isRequired({message:'PLEASE ENTER NAME'}), email:isRequired({message:'PLEASE ENTER A VALID EMAIL ADDRESS'}), password:isRequired({message:'PLEASE ENTER A PASSWORD'}) }) const Register = (props) => { const submitVal = val => { props.Register(val); } if(props.auth){ return <Redirect to="/" /> } return ( <div className="bg-primary Register"> <div className="s008"> <form onSubmit={props.handleSubmit(submitVal)}> <div className="inner-form"> <div className="basic-search"> <div className="input-field"> {/* <input id="search" type="text" placeholder="NAME" autoComplete="off"/> */} <Field name="name" component={Textinput} placeholder="NAME" type="text" /> <div className="icon-wrap"> <i className="fa fa-user" aria-hidden="true"></i> </div> </div> <div className="input-field"> <Field name="email" component={Textinput} placeholder="EMAIL" type="email" /> <div className="icon-wrap"> <i className="fa fa-envelope" aria-hidden="true"></i> </div> </div> <div className="input-field"> <Field name="password" component={Textinput} placeholder="PASSWORD" type="password" /> <div className="icon-wrap"> <i className="fa fa-lock" aria-hidden="true"></i> </div> </div> </div> </div> <div className="btnCon"> <Button color="primary" type="submit">Register</Button> </div> </form> </div> </div> ) } const mapDispatch = dispatch => { return { Register:cred => dispatch(register(cred)) } } const mapState = state => { return { auth:state.firebase.auth.uid } } export default connect(mapState,mapDispatch)(reduxForm({form:'RegisterForm',validate})((Register)));
/** * @author ooooo * @date 2020/12/24 17:12 */ #ifndef CPP_0135__SOLUTION2_H_ #define CPP_0135__SOLUTION2_H_ #include <iostream> #include <map> #include <vector> #include <numeric> using namespace std; class Solution { public: int candy(vector<int> &ratings) { int n = ratings.size(); vector<int> candy(n, 1); for (int i = 1; i < n; ++i) { if (ratings[i] > ratings[i - 1]) { candy[i] = max(candy[i], candy[i - 1] + 1); } } for (int i = n - 2; i >= 0; --i) { if (ratings[i] > ratings[i + 1]) { candy[i] = max(candy[i], candy[i + 1] + 1); } } return accumulate(candy.begin(), candy.end(), 0); } }; #endif //CPP_0135__SOLUTION2_H_
def linear_search(arr, target): for i in range(len(arr)): if arr[i] == target: return i return -1 arr = [10, 40, 600, 7] target = 40 result = linear_search(arr, target) if result != -1: print ("Element is present at index", result ) else: print ("Element is not present in the array")
<filename>attributes/default.rb # # Copyright:: 2015 Chef Software, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # include_attribute 'push-jobs' override['push_jobs']['allow_unencrypted'] = true if platform_family?('windows') default['push_jobs']['package_url'] = 'https://packages.chef.io/files/stable/push-jobs-client/2.1.4/windows/2012/push-jobs-client-2.1.4-1-x86.msi' default['push_jobs']['package_checksum'] = '3b979f8d362738c8ac126ace0e80122a4cbc53425d5f8cf9653cdd79eca16d62' end
<reponame>xidongc/mongo_ebenchmark<filename>model/payment/service/payment.go /* * mongodb_ebenchmark - Mongodb grpc proxy benchmark for e-commerce workload (still in dev) * Copyright (c) 2020 - <NAME> <<EMAIL>> * * All rights reserved. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. * */ package service import ( "context" "errors" log "github.com/sirupsen/logrus" "github.com/xidongc/mongo_ebenchmark/model/payment/paymentpb" "github.com/xidongc/mongo_ebenchmark/model/payment/service/provider" "github.com/xidongc/mongo_ebenchmark/pkg/cfg" "github.com/xidongc/mongo_ebenchmark/pkg/proxy" ) const ns = "payment" type Service struct { Storage proxy.Client Amplifier cfg.Amplifier } // New Charge func (s Service) NewCharge(ctx context.Context, req *paymentpb.ChargeRequest) (*paymentpb.Charge, error) { providerId := req.GetPaymentProviderId() var provide Provider // TODO add more switch providerId { case paymentpb.PaymentProviderId_AliPay: provide = &provider.AliPay{} default: provide = &provider.AliPay{} } charge, err := provide.Charge(req) if err != nil { log.Warning("do refund") // TODO do refund } if charge == nil { return nil, errors.New("error") } var docs []interface{} docs = append(docs, *charge) param := &proxy.InsertParam{ Docs: docs, Amp: s.Amplifier, } if err := s.Storage.Insert(ctx, param); err != nil { log.Error(err) return nil, err } return charge, nil } // Refund Charge func (s Service) RefundCharge(ctx context.Context, req *paymentpb.RefundRequest) (charge *paymentpb.Charge, err error) { return } // Get Charge func (s Service) Get(ctx context.Context, req *paymentpb.GetRequest) (charge *paymentpb.Charge, err error) { return } // Create Payment Service client func NewClient(config *cfg.ProxyConfig, cancel context.CancelFunc) (client *proxy.Client) { client, _ = proxy.NewClient(config, ns, cancel) return }
package com.comandulli.engine.panoramic.playback.entity.focus; import com.comandulli.engine.panoramic.playback.engine.core.Engine; import com.comandulli.engine.panoramic.playback.engine.core.Entity; import com.comandulli.engine.panoramic.playback.engine.core.Time; import com.comandulli.engine.panoramic.playback.engine.math.Color; import com.comandulli.engine.panoramic.playback.engine.math.Vector3; import com.comandulli.engine.panoramic.playback.engine.physics.Physics; import com.comandulli.engine.panoramic.playback.engine.physics.Ray; import com.comandulli.engine.panoramic.playback.engine.render.camera.Camera; import com.comandulli.engine.panoramic.playback.engine.render.material.Material; import com.comandulli.engine.panoramic.playback.engine.render.renderer.MeshRenderer; import java.util.List; public class FocusInteraction extends Entity { private final Camera camera; private InteractiveObject focusedObject; private InteractiveObject potentialFocus; private float timemark; private final int mesh; private final int shader; public FocusInteraction(String name, Camera camera, int mesh, int shader) { super(name); this.camera = camera; this.mesh = mesh; this.shader = shader; } @Override public void start() { Entity smallBox = new Entity("Crosshair"); smallBox.addComponent(new MeshRenderer(mesh, new Material(shader, Color.WHITE))); smallBox.transform.translate(new Vector3(0.0f, 0.0f, 2.0f)); smallBox.transform.parent = camera.entity.transform; smallBox.transform.scale(new Vector3(0.01f, 0.01f, 0.01f)); Engine.getScene().addEntity(smallBox); } @Override public void update() { Vector3 origin = camera.entity.transform.position; Vector3 direction = camera.entity.transform.getForward(); Ray ray = new Ray(origin, direction); List<Entity> entityList = Physics.raycast(ray); if (entityList.isEmpty()) { if (focusedObject != null) { focusedObject.FocusOut(); } if (potentialFocus != null) { potentialFocus.FocusCanceled(); } focusedObject = null; potentialFocus = null; } for (Entity entity : entityList) { if (entity instanceof InteractiveObject) { if (focusedObject != null) { if (entity != focusedObject) { focusedObject.FocusOut(); timemark = Time.time; potentialFocus = (InteractiveObject) entity; potentialFocus.FocusStarted(); } else { focusedObject.FocusUpdate(Time.time - (timemark + focusedObject.getTimeToFocus())); } } else { if (potentialFocus != null) { if (entity == potentialFocus) { if (Time.time > timemark + potentialFocus.getTimeToFocus()) { ((InteractiveObject) entity).FocusIn(); focusedObject = potentialFocus; potentialFocus = null; } } else { potentialFocus.FocusCanceled(); potentialFocus = (InteractiveObject) entity; potentialFocus.FocusStarted(); } } else { timemark = Time.time; potentialFocus = (InteractiveObject) entity; potentialFocus.FocusStarted(); } } break; } } super.update(); } }
<filename>src/main/java/org/olat/modules/bigbluebutton/ui/BigBlueButtonMeetingDataSource.java /** * <a href="http://www.openolat.org"> * OpenOLAT - Online Learning and Training</a><br> * <p> * Licensed under the Apache License, Version 2.0 (the "License"); <br> * you may not use this file except in compliance with the License.<br> * You may obtain a copy of the License at the * <a href="http://www.apache.org/licenses/LICENSE-2.0">Apache homepage</a> * <p> * Unless required by applicable law or agreed to in writing,<br> * software distributed under the License is distributed on an "AS IS" BASIS, <br> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br> * See the License for the specific language governing permissions and <br> * limitations under the License. * <p> * Initial code contributed and copyrighted by<br> * frentix GmbH, http://www.frentix.com * <p> */ package org.olat.modules.bigbluebutton.ui; import java.util.Collections; import java.util.List; import org.olat.core.CoreSpringFactory; import org.olat.core.commons.persistence.DefaultResultInfos; import org.olat.core.commons.persistence.ResultInfos; import org.olat.core.commons.persistence.SortKey; import org.olat.core.gui.components.form.flexible.elements.FlexiTableFilter; import org.olat.core.gui.components.form.flexible.impl.elements.table.FlexiTableDataSourceDelegate; import org.olat.core.util.StringHelper; import org.olat.modules.bigbluebutton.BigBlueButtonManager; import org.olat.modules.bigbluebutton.model.BigBlueButtonMeetingAdminInfos; import org.olat.modules.bigbluebutton.model.BigBlueButtonMeetingsSearchParameters; import org.olat.modules.bigbluebutton.model.BigBlueButtonMeetingsSearchParameters.OrderBy; /** * * Initial date: 2 déc. 2020<br> * @author srosse, stephane.rosse<EMAIL>, http://www.frentix.com * */ public class BigBlueButtonMeetingDataSource implements FlexiTableDataSourceDelegate<BigBlueButtonMeetingAdminInfos> { private Integer count; private final BigBlueButtonManager bigBlueButtonManager; private BigBlueButtonMeetingsSearchParameters searchParams = new BigBlueButtonMeetingsSearchParameters(); public BigBlueButtonMeetingDataSource() { bigBlueButtonManager = CoreSpringFactory.getImpl(BigBlueButtonManager.class); } public void resetCount() { count = null; } @Override public int getRowCount() { if(count == null) { count = bigBlueButtonManager.countMeetings(searchParams); } return count; } @Override public List<BigBlueButtonMeetingAdminInfos> reload(List<BigBlueButtonMeetingAdminInfos> rows) { return Collections.emptyList(); } @Override public ResultInfos<BigBlueButtonMeetingAdminInfos> getRows(String query, List<FlexiTableFilter> filters, int firstResult, int maxResults, SortKey... orderBy) { if(StringHelper.containsNonWhitespace(query)) { searchParams.setSearchString(query); } else { searchParams.setSearchString(null); } Boolean recordings = null; if(filters != null && !filters.isEmpty()) { String filter = filters.get(0).getFilter(); if("with-recordings".equals(filter)) { recordings = Boolean.TRUE; } else if("no-recordings".equals(filter)) { recordings = Boolean.FALSE; } } searchParams.setRecordings(recordings); if(orderBy != null && orderBy.length > 0 && orderBy[0] != null) { searchParams.setOrder(OrderBy.secureValueOf(orderBy[0].getKey())); searchParams.setOrderAsc(orderBy[0].isAsc()); } else { searchParams.setOrder(null); } List<BigBlueButtonMeetingAdminInfos> viewResults = bigBlueButtonManager.searchMeetings(searchParams, firstResult, maxResults); if(firstResult == 0 && viewResults.size() < maxResults) { count = Integer.valueOf(viewResults.size()); } return new DefaultResultInfos<>(firstResult + viewResults.size(), -1, viewResults); } }
package http import ( "context" "errors" hackmiddleware "github.com/jensneuse/graphql-go-tools/hack/middleware" "github.com/jensneuse/graphql-go-tools/pkg/middleware" "github.com/jensneuse/graphql-go-tools/pkg/proxy" "io/ioutil" "net/http" "net/http/httptest" "net/url" "strings" "testing" ) // ProxyTestCase is a human understandable proxy test type ProxyTestCase struct { // Schema is the schema exposed to the client Schema string // ClientRequest is the request from the client in front of the proxy ClientRequest string // ClientHeaders are the additional headers to be set on the client request ClientHeaders map[string]string // ExpectedProxiedRequest is the rewritten request that is proxied to the backend (origin graphql server) ExpectedProxiedRequest string // MiddleWares are the proxy middlewares to test MiddleWares []middleware.GraphqlMiddleware // BackendStatusCode is the status code returned by the backend BackendStatusCode int // BackendResponse is the response from the backend to the proxy BackendResponse string // WantClientResponseStatusCode is the http status code we expect the proxy to return WantClientResponseStatusCode int // WantClientResponseBody is the body we're expecting the proxy to return to the proxy WantClientResponseBody string // WantProxyErrorHandlerInvocation indicates if the proxy error handler should be invoced during the test WantProxyErrorHandlerInvocation bool // ProxyOnBeforeRequestMiddleware is the middleWares invoked before the proxy http handler ProxyOnBeforeRequestMiddleware HttpMiddleware // BackendOnBeforeRequestMiddleware is the middleware invoked before the backend handler BackendOnBeforeRequestMiddleware HttpMiddleware // BackendHeaders are the headers that should be statically set on requests to the backend BackendHeaders map[string][]string // RequestConfigProviderFactory if set (optional) could override the request config provider RequestConfigProviderFactory func(config proxy.RequestConfig) proxy.RequestConfigProvider } /* HttpMiddleware wraps a http handler to add additional logic for certain tests Minimum Example: f := func(w http.ResponseWriter, r *http.Request) { handler.ServeHTTP(w, r) } return http.HandlerFunc(f) */ type HttpMiddleware func(handler http.Handler) http.Handler func TestProxy(t *testing.T) { t.Run("asset middleware", func(t *testing.T) { RunTestCase(t, ProxyTestCase{ Schema: assetSchema, MiddleWares: []middleware.GraphqlMiddleware{ &hackmiddleware.AssetUrlMiddleware{}, }, ClientRequest: assetInput, ExpectedProxiedRequest: assetOutput, BackendStatusCode: http.StatusOK, WantClientResponseStatusCode: http.StatusOK, WantProxyErrorHandlerInvocation: false, }) }) t.Run("with backend response", func(t *testing.T) { RunTestCase(t, ProxyTestCase{ Schema: assetSchema, ClientRequest: assetInput, ExpectedProxiedRequest: assetInput, BackendStatusCode: http.StatusOK, WantClientResponseStatusCode: http.StatusOK, WantProxyErrorHandlerInvocation: false, BackendResponse: "testPayload", WantClientResponseBody: "testPayload", }) }) t.Run("handle backend error correctly", func(t *testing.T) { RunTestCase(t, ProxyTestCase{ Schema: assetSchema, MiddleWares: []middleware.GraphqlMiddleware{ &hackmiddleware.AssetUrlMiddleware{}, }, ClientRequest: assetInput, ExpectedProxiedRequest: assetOutput, BackendStatusCode: http.StatusInternalServerError, WantClientResponseStatusCode: http.StatusOK, WantProxyErrorHandlerInvocation: true, }) }) t.Run("handle request with variables", func(t *testing.T) { RunTestCase(t, ProxyTestCase{ Schema: assetSchema, MiddleWares: []middleware.GraphqlMiddleware{ &hackmiddleware.AssetUrlMiddleware{}, }, ClientRequest: variableAssetInput, ExpectedProxiedRequest: variableAssetOutput, BackendStatusCode: http.StatusOK, WantClientResponseStatusCode: http.StatusOK, WantProxyErrorHandlerInvocation: false, }) }) t.Run("failing request config provider", func(t *testing.T) { RunTestCase(t, ProxyTestCase{ Schema: assetSchema, ClientRequest: variableAssetInput, ExpectedProxiedRequest: variableAssetOutput, WantClientResponseStatusCode: http.StatusOK, WantProxyErrorHandlerInvocation: true, RequestConfigProviderFactory: func(config proxy.RequestConfig) proxy.RequestConfigProvider { return failingRequestConfigProvider{} }, }) }) t.Run("handle request response e2e", func(t *testing.T) { RunTestCase(t, ProxyTestCase{ Schema: publicSchema, MiddleWares: []middleware.GraphqlMiddleware{ &middleware.ContextMiddleware{}, }, ClientRequest: publicQuery, ExpectedProxiedRequest: privateQuery, BackendStatusCode: http.StatusOK, WantClientResponseStatusCode: http.StatusOK, WantProxyErrorHandlerInvocation: false, ClientHeaders: map[string]string{ userKey: userValue, }, BackendHeaders: map[string][]string{ Authorization: {privateAuthHeader}, }, BackendResponse: backendResponse, WantClientResponseBody: backendResponse, ProxyOnBeforeRequestMiddleware: func(handler http.Handler) http.Handler { f := func(w http.ResponseWriter, r *http.Request) { headerUserValue := r.Header.Get(userKey) if len(headerUserValue) == 0 { t.Fatal("want value for header key 'user', missing!") } ctx := context.WithValue(r.Context(), "user", headerUserValue) handler.ServeHTTP(w, r.WithContext(ctx)) } return http.HandlerFunc(f) }, BackendOnBeforeRequestMiddleware: func(handler http.Handler) http.Handler { f := func(w http.ResponseWriter, r *http.Request) { authHeader := r.Header.Get(Authorization) if authHeader != privateAuthHeader { t.Fatalf("want header for key 'Authorization': '%s', got: '%s'", privateAuthHeader, authHeader) } handler.ServeHTTP(w, r) } return http.HandlerFunc(f) }, }) }) } // RunTestCase starts a backend server + a proxy and tests a client request against it func RunTestCase(t *testing.T, testCase ProxyTestCase) { backendHandler := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { body, err := ioutil.ReadAll(r.Body) if err != nil { t.Fatal(err) } if strings.TrimSpace(string(body)) != testCase.ExpectedProxiedRequest { t.Fatalf("Expected:\n%s\ngot\n%s", testCase.ExpectedProxiedRequest, strings.TrimSpace(string(body))) } w.WriteHeader(testCase.BackendStatusCode) if len(testCase.BackendResponse) != 0 { _, err = w.Write([]byte(testCase.BackendResponse)) if err != nil { t.Fatal(err) } } }) var backendGraphqlServer *httptest.Server if testCase.BackendOnBeforeRequestMiddleware != nil { backendGraphqlServer = httptest.NewServer(testCase.BackendOnBeforeRequestMiddleware(backendHandler)) } else { backendGraphqlServer = httptest.NewServer(backendHandler) } defer backendGraphqlServer.Close() backendURL, err := url.Parse(backendGraphqlServer.URL) if err != nil { t.Fatal(err) } schema := []byte(testCase.Schema) requestConfig := proxy.RequestConfig{ Schema: &schema, BackendURL: *backendURL, BackendHeaders: testCase.BackendHeaders, } var requestConfigProvider proxy.RequestConfigProvider if testCase.RequestConfigProviderFactory != nil { requestConfigProvider = testCase.RequestConfigProviderFactory(requestConfig) } else { requestConfigProvider = proxy.NewStaticRequestConfigProvider(requestConfig) } graphqlProxy := NewDefaultProxy(requestConfigProvider, testCase.MiddleWares...) errorHandlerInvoked := false graphqlProxy.HandleError = func(err error, w http.ResponseWriter) { errorHandlerInvoked = true w.WriteHeader(http.StatusOK) if !testCase.WantProxyErrorHandlerInvocation { t.Fatal(err) } } var graphqlProxyHttpServer *httptest.Server if testCase.ProxyOnBeforeRequestMiddleware != nil { graphqlProxyHttpServer = httptest.NewServer(testCase.ProxyOnBeforeRequestMiddleware(graphqlProxy)) } else { graphqlProxyHttpServer = httptest.NewServer(graphqlProxy) } defer graphqlProxyHttpServer.Close() request, err := http.NewRequest(http.MethodPost, graphqlProxyHttpServer.URL, strings.NewReader(testCase.ClientRequest)) if err != nil { t.Error(err) } request.Header.Set("Content-Type", "application/graphql") if testCase.ClientHeaders != nil { for key, value := range testCase.ClientHeaders { request.Header.Set(key, value) } } response, err := http.DefaultClient.Do(request) if err != nil { t.Fatal(err) } if testCase.WantProxyErrorHandlerInvocation != errorHandlerInvoked { t.Fatalf("want proxy error handler invocation: %t, got: %t", testCase.WantProxyErrorHandlerInvocation, errorHandlerInvoked) } if response == nil { t.Fatal("response must not be nil") } if response.StatusCode != testCase.WantClientResponseStatusCode { t.Fatalf("want proxy status code: %d, got: %d", testCase.WantClientResponseStatusCode, response.StatusCode) } responseBody, _ := ioutil.ReadAll(response.Body) actualClientResponseBody := string(responseBody) if testCase.WantClientResponseBody != actualClientResponseBody { t.Errorf("want response body:\n'%s'\ngot:\n'%s'", testCase.WantClientResponseBody, actualClientResponseBody) } } const assetSchema = ` schema { query: Query } type Query { assets(first: Int): [Asset] } type Asset implements Node { status: Status! updatedAt: DateTime! createdAt: DateTime! id: ID! handle: String! fileName: String! height: Float width: Float size: Float mimeType: String url: String! }` const assetInput = `{"query":"query testQueryWithoutHandle {assets(first:1) {id fileName url(transformation:{image:{resize:{width:100,height:100}}})}}"}` const assetOutput = `{"query":"query testQueryWithoutHandle {assets(first:1) {id fileName handle}}"}` const variableAssetInput = `{"query":"query testQueryWithoutHandle {assets(first: 1) { id fileName url(transformation: {image: {resize: {width: 100, height: 100}}})}}","variables":{"id":1}}` const variableAssetOutput = `{"query":"query testQueryWithoutHandle {assets(first:1) {id fileName handle}}","variables":{"id":1}}` /* the public schema for reference schema { query: Query } type Query { documents(user: String!): [Document] } type Document implements Node { owner: String sensitiveInformation: String } */ // e2e test data const ( publicSchema = ` directive @addArgumentFromContext( name: String! contextKey: String! ) on FIELD_DEFINITION scalar String schema { query: Query } type Query { documents: [Document] @addArgumentFromContext(name: "user",contextKey: "user") } type Document implements Node { owner: String sensitiveInformation: String } ` publicQuery = `{"query":"query myDocuments {documents {sensitiveInformation}}"}` privateQuery = `{"query":"query myDocuments {documents(user:\"<EMAIL>\") {sensitiveInformation}}"}` privateAuthHeader = "testAuth" backendResponse = `{"data":{"documents":[{"sensitiveInformation":"jsmith"},{"sensitiveInformation":"got proxied"}]}}` Authorization = "Authorization" userKey = "user" userValue = "<EMAIL>" ) // failing request config provider type failingRequestConfigProvider struct{} func (failingRequestConfigProvider) GetRequestConfig(ctx context.Context) (*proxy.RequestConfig, error) { return nil, errors.New("failing") }
class AnimationManager: def __init__(self): self.animations = {} def cancel_all(self, animation_type): if animation_type in self.animations: del self.animations[animation_type] def create_animation(self, animation_type, duration): self.animations[animation_type] = duration
require 'json' require 'i18n' require 'dkdeploy/i18n' require 'capistrano/dsl' include Capistrano::DSL include Dkdeploy::RollbackManager namespace :maintenance do # Remote maintenance config file path # # @return [String] def maintenance_config_file_path File.join shared_path, 'config/maintenance.json' end desc 'Enables the maintenance mode. In a regular case it should be disabled after deployment.' task :enable do # Define rollback behaviour add_rollback_task 'maintenance:disable' on release_roles :app, :web do # create remote maintenance config file (and directory if not exists) execute :mkdir, '-p', File.dirname(maintenance_config_file_path) content_json = JSON.pretty_generate enabled_permanent: false # scp-net::upload! expects StringIO object content = StringIO.new content_json upload! content, maintenance_config_file_path info I18n.t('tasks.maintenance.enabled', mode: 'regular', scope: :dkdeploy) end end desc "Enables the maintenance permanent mode. The 'maintenance:disable' will require 'maintenance:disable_permanent'." task :enable_permanent do invoke 'maintenance:enable' on release_roles :app, :web do content_json = JSON.pretty_generate enabled_permanent: true # scp-net::upload! expects StringIO object content = StringIO.new content_json upload! content, maintenance_config_file_path info I18n.t('tasks.maintenance.enabled', mode: 'permanent', scope: :dkdeploy) end end desc "Disables the maintenance mode, if the 'maintenance:enabled_permanent' has not been enabled." task :disable do on release_roles :app, :web do # noinspection RubyArgCount if test %([ -f "#{maintenance_config_file_path}" ]) if test "[ -s #{maintenance_config_file_path} ]" config_file_content = download! maintenance_config_file_path config_file_content = JSON.parse config_file_content if config_file_content.fetch 'enabled_permanent' warn I18n.t('tasks.maintenance.can_not_disable_by_reason_of_permanent', scope: :dkdeploy) next end end execute :rm, '-f', maintenance_config_file_path end info I18n.t('tasks.maintenance.disabled', mode: 'regular', scope: :dkdeploy) end end desc "Disables the maintenance permanent mode. The 'maintenance:disable' will work in a regular way again." task :disable_permanent do on release_roles :app, :web do execute :rm, '-f', maintenance_config_file_path info I18n.t('tasks.maintenance.disabled', mode: 'permanent', scope: :dkdeploy) end end end
# Aliases in this file are bash and zsh compatible # Don't change. The following determines where YADR is installed. yadr=$HOME/.yadr # YADR support alias yav='yadr vim-add-plugin' alias ydv='yadr vim-delete-plugin' alias ylv='yadr vim-list-plugin' alias yup='yadr update-plugins' alias yip='yadr init-plugins' # PS alias psa="ps aux" alias psg="ps aux | grep " alias psr='ps aux | grep ruby' # Moving around alias cdb='cd -' # Show human friendly numbers and colors alias df='df -h' alias ll='ls -alGh' alias ls='ls -Gh' alias du='du -h -d 2' # show me files matching "ls grep" alias lsg='ll | grep' # Alias Editing alias ae='vim $yadr/zsh/aliases.zsh' #alias edit alias ar='source $yadr/zsh/aliases.zsh' #alias reload # vim using mvim --version > /dev/null 2>&1 MACVIM_INSTALLED=$? if [ $MACVIM_INSTALLED -eq 0 ]; then alias vim="mvim -v" fi # vimrc editing alias ve='vim ~/.vimrc' # zsh profile editing alias ze='vim ~/.zshrc' alias zr='source ~/.zshrc' # Git Aliases alias gs='git status' alias gstsh='git stash' alias gst='git stash' alias gsp='git stash pop' alias gsa='git stash apply' alias gsh='git show' alias gshw='git show' alias gshow='git show' alias gcm='git ci -m' alias gcim='git ci -m' alias gci='git ci' alias gco='git co' alias gcp='git cp' alias ga='git add -A' alias guns='git unstage' alias gunc='git uncommit' alias gm='git merge' alias gms='git merge --squash' alias gam='git amend --reset-author' alias grv='git remote -v' alias grr='git remote rm' alias grad='git remote add' alias gr='git rebase' alias gra='git rebase --abort' alias ggrc='git rebase --continue' alias gbi='git rebase --interactive' alias gl='git l' alias glg='git l' alias glog='git l' alias co='git co' alias gf='git fetch' alias gfch='git fetch' alias gd='git diff' alias gb='git b' alias gbd='git b -D -w' alias gdc='git diff --cached -w' alias gpub='grb publish' alias gtr='grb track' alias gpl='git pull' alias gplr='git pull --rebase' alias gps='git push' alias gpsh='git push' alias gnb='git nb' # new branch aka checkout -b alias grs='git reset' alias grsh='git reset --hard' alias gcln='git clean' alias gclndf='git clean -df' alias gclndfx='git clean -dfx' alias gsm='git submodule' alias gsmi='git submodule init' alias gsmu='git submodule update' alias gt='git t' alias gbg='git bisect good' alias gbb='git bisect bad' # Common shell functions alias less='less -r' alias tf='tail -f' alias lh='ls -alt | head' # see the last modified files alias screen='TERM=screen screen' alias cl='clear' # Zippin alias gz='tar -zcvf' # Ruby alias ts='thin start' alias ms='mongrel_rails start' alias tfdl='tail -f log/development.log' alias tftl='tail -f log/test.log' # Vim/ctags "mctags = make ctags", using the ruby specific version # to save some time alias mctags=~/.bin/run_tags.rb #'/opt/local/bin/ctags -Rf ./tags *' alias ka9='killall -9' alias k9='kill -9' # Gem install alias sgi='sudo gem install --no-ri --no-rdoc' # TODOS # This uses NValt (NotationalVelocity alt fork) - http://brettterpstra.com/project/nvalt/ # to find the note called 'todo' alias todo='open nvalt://find/todo' # Forward port 80 to 3000 alias portforward='sudo ipfw add 1000 forward 127.0.0.1,3000 ip from any to any 80 in' alias rdm='rake db:migrate' alias rdmr='rake db:migrate:redo' # Zeus alias zs='zeus server' alias zc='zeus console' # Rspec alias rs='rspec spec' # Sprintly - https://github.com/nextbigsoundinc/Sprintly-GitHub alias sp='sprintly' # spb = sprintly branch - create a branch automatically based on the bug you're working on alias spb="git checkout -b \`sp | tail -2 | grep '#' | sed 's/^ //' | sed 's/[^A-Za-z0-9 ]//g' | sed 's/ /-/g' | cut -d"-" -f1,2,3,4,5\`" for file in ~/.yadr/zsh/aliases/*; do source $file; done source ~/.yadr/zsh/zzzz_after.zsh export SHELL=/bin/zsh
#!/bin/bash # Copyright © 2021 Pittsburgh Supercomputing Center. # All Rights Reserved. IMAGE=singularity-flac-1.3.3.sif DEFINITION=Singularity if [ -f $IMAGE ]; then rm -fv $IMAGE fi singularity build --remote $IMAGE $DEFINITION if [ -f $IMAGE ]; then exit 0 else exit 1 fi
def find_missing_elements(arr): arr.sort() missing = [] for i in range(arr[0],arr[-1] + 1): if i not in arr: missing.append(i) return missing arr = [9, 6, 4, 2, 3, 5, 7, 0, 1] print(find_missing_elements(arr))
bnc_name="ASCI_Purple" ; lnk_name="$bnc_name.rbc" ; prf_name="$bnc_name.ibc" ; obj_name="$bnc_name.o" ; exe_name="$bnc_name.exe" ; source_files=($(ls *.c)) ; CXXFLAGS=" -I. -lm -D_POSIX_SOURCE -DHYPRE_TIMING -DHYPRE_SEQUENTIAL" ; if [[ -n $LARGE_PROBLEM_SIZE ]]; then RUN_OPTIONS=" -n 100 40 100 -c 0.1 1.0 10.0 " ; else RUN_OPTIONS=" -n 30 15 30 -c 0.1 1.0 10.0 " ; fi
<gh_stars>1-10 /** * Bolt * MiniSqlParserTest * * Copyright (c) 2017 <NAME> * * This software is released under the MIT License. * http://opensource.org/licenses/mit-license.php * * @author <NAME> */ package com.sopranoworks.bolt import java.io.ByteArrayInputStream import java.util import com.google.cloud.spanner.{DatabaseClient, ResultSet} import com.sopranoworks.bolt.Bolt.Nut import com.sopranoworks.bolt.statements._ import com.sopranoworks.bolt.values._ import org.antlr.v4.runtime.{ANTLRInputStream, BailErrorStrategy, CommonTokenStream} import org.specs2.mutable.Specification import scala.collection.JavaConversions._ /** * Created by takahashi on 2017/07/17. */ class MiniSqlParserTest extends Specification { class DummyDatabase extends Database { var tables = Map.empty[String,Table] override def table(name: String): Option[Table] = tables.get(name) } class DummyNut(dbClient:DatabaseClient = null) extends Nut(dbClient) { private val _database = new DummyDatabase override def database: Database = _database var queryString = "" override def executeNativeQuery(sql: String): ResultSet = { queryString = sql null } override def executeNativeAdminQuery(admin: Admin, sql: String): Unit = { queryString = sql } override def execute(stmt: NoResult): Unit = { stmt match { case SimpleInsert(_,_,_,_,values) => queryString = values.map(_.text).mkString(",") case InsertSelect(_,_,_,_,subquery) => queryString = subquery.text case SimpleUpdate(_,_,_,keysAndValues,_,_) => queryString = keysAndValues.get(0).value.eval.asValue.text case Delete(_,_,_,w,_) => queryString = w.whereStmt } } } private def _createParser(sql:String) = { val source = new ByteArrayInputStream(sql.getBytes("UTF8")) val input = new ANTLRInputStream(source) val lexer = new MiniSqlLexer(input) val tokenStream = new CommonTokenStream(lexer) val parser = new MiniSqlParser(tokenStream) val nut = new DummyNut() parser.setErrorHandler(new BailErrorStrategy()) parser.nut = nut (parser,nut) } "SELECT" should { "SELECT * FROM (SELECT \"apple\" AS fruit, \"carrot\" AS vegetable)" in { val sql = "SELECT * FROM (SELECT \"apple\" AS fruit, \"carrot\" AS vegetable)" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT STRUCT(1, 2) FROM Users" in { val sql = "SELECT STRUCT(1, 2) FROM Users" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT ARRAY(SELECT STRUCT(1, 2)) FROM Users" in { val sql = "SELECT ARRAY(SELECT STRUCT(1, 2)) FROM Users" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT * FROM Roster" in { val sql = "SELECT * FROM Roster" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT * FROM db.Roster" in { val sql = "SELECT * FROM db.Roster" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT s.SingerId, s.FirstName, s.LastName, s.SingerInfo\nFROM Singers@{FORCE_INDEX=SingersByFirstLastName} AS s\nWHERE s.FirstName = \"Catalina\" AND s.LastName > \"M\"" in { val sql = "SELECT s.SingerId, s.FirstName, s.LastName, s.SingerInfo FROM Singers@{FORCE_INDEX=SingersByFirstLastName} AS s WHERE s.FirstName = \"Catalina\" AND s.LastName > \"M\"" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT s.SingerId, s.FirstName, s.LastName, s.SingerInfo, c.ConcertDate\nFROM Singers@{FORCE_INDEX=SingersByFirstLastName} AS s JOIN\n Concerts@{FORCE_INDEX=ConcertsBySingerId} AS c ON s.SingerId = c.SingerId\nWHERE s.FirstName = \"Catalina\" AND s.LastName > \"M\"" in { val sql = "SELECT s.SingerId, s.FirstName, s.LastName, s.SingerInfo, c.ConcertDate FROM Singers@{FORCE_INDEX=SingersByFirstLastName} AS s JOIN Concerts@{FORCE_INDEX=ConcertsBySingerId} AS c ON s.SingerId = c.SingerId WHERE s.FirstName = \"Catalina\" AND s.LastName > \"M\"" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT * FROM T1 t1, t1.array_column" in { val sql = "SELECT * FROM T1 t1, t1.array_column" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT * FROM T1 t1, t1.struct_column.array_field" in { val sql = "SELECT * FROM T1 t1, t1.struct_column.array_field" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT (SELECT ARRAY_AGG(c) FROM t1.array_column c) FROM T1 t1" in { val sql = "SELECT (SELECT ARRAY_AGG(c) FROM t1.array_column c) FROM T1 t1" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT a.struct_field1 FROM T1 t1, t1.array_of_structs a" in { val sql = "SELECT a.struct_field1 FROM T1 t1, t1.array_of_structs a" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT (SELECT STRING_AGG(a.struct_field1) FROM t1.array_of_structs a) FROM T1 t1" in { val sql = "SELECT (SELECT STRING_AGG(a.struct_field1) FROM t1.array_of_structs a) FROM T1 t1" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT * FROM UNNEST ([1, 2, 3])" in { val sql = "SELECT * FROM UNNEST ([1, 2, 3])" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT x\nFROM mytable AS t,\n t.struct_typed_column.array_typed_field1 AS x" in { val sql = "SELECT x FROM mytable AS t, t.struct_typed_column.array_typed_field1 AS x" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT * FROM UNNEST ( ) WITH OFFSET AS num" in { val sql = "SELECT * FROM UNNEST ( ) WITH OFFSET AS num" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT AVG ( PointsScored )\nFROM\n( SELECT PointsScored\n FROM Stats\n WHERE SchoolID = 77 )" in { val sql = "SELECT AVG ( PointsScored ) FROM ( SELECT PointsScored FROM Stats WHERE SchoolID = 77 )" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT r.LastName\nFROM\n( SELECT * FROM Roster) AS r" in { val sql = "SELECT r.LastName FROM ( SELECT * FROM Roster) AS r" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT r.LastName, r.SchoolId,\n ARRAY(SELECT AS STRUCT p.OpponentID, p.PointsScored\n FROM PlayerStats AS p\n WHERE p.LastName = r.LastName) AS PlayerStats\nFROM Roster AS r" in { val sql = "SELECT r.LastName, r.SchoolId, ARRAY(SELECT AS STRUCT p.OpponentID, p.PointsScored FROM PlayerStats AS p WHERE p.LastName = r.LastName) AS PlayerStats FROM Roster AS r" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT * FROM Roster, TeamMascot" in { val sql = "SELECT * FROM Roster, TeamMascot" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT * FROM Roster CROSS JOIN TeamMascot" in { val sql = "SELECT * FROM Roster CROSS JOIN TeamMascot" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT * FROM Roster INNER JOIN PlayerStats\nON Roster.LastName = PlayerStats.LastName" in { val sql = "SELECT * FROM Roster INNER JOIN PlayerStats ON Roster.LastName = PlayerStats.LastName" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT FirstName\nFROM Roster INNER JOIN PlayerStats\nUSING (LastName)" in { val sql = "SELECT FirstName FROM Roster INNER JOIN PlayerStats USING (LastName)" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT FirstName\nFROM Roster INNER JOIN PlayerStats\nON Roster.LastName = PlayerStats.LastName" in { val sql = "SELECT FirstName FROM Roster INNER JOIN PlayerStats ON Roster.LastName = PlayerStats.LastName" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT * FROM Roster INNER JOIN PlayerStats\nUSING (LastName)" in { val sql = "SELECT * FROM Roster INNER JOIN PlayerStats USING (LastName)" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT * FROM Roster INNER JOIN PlayerStats\nON Roster.LastName = PlayerStats.LastName" in { val sql = "SELECT * FROM Roster INNER JOIN PlayerStats ON Roster.LastName = PlayerStats.LastName" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT * FROM a LEFT JOIN b ON TRUE LEFT JOIN c ON TRUE" in { val sql = "SELECT * FROM a LEFT JOIN b ON TRUE LEFT JOIN c ON TRUE" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT * FROM Roster FULL JOIN TeamMascot USING (SchoolID)\nFULL JOIN PlayerStats USING (LastName)" in { val sql = "SELECT * FROM Roster FULL JOIN TeamMascot USING (SchoolID) FULL JOIN PlayerStats USING (LastName)" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT * FROM ( (Roster FULL JOIN TeamMascot USING (SchoolID))\nFULL JOIN PlayerStats USING (LastName))" in { val sql = "SELECT * FROM ( (Roster FULL JOIN TeamMascot USING (SchoolID)) FULL JOIN PlayerStats USING (LastName))" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT * FROM (Roster FULL JOIN (TeamMascot FULL JOIN PlayerStats USING\n(LastName)) USING (SchoolID))" in { val sql = "SELECT * FROM (Roster FULL JOIN (TeamMascot FULL JOIN PlayerStats USING (LastName)) USING (SchoolID))" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT * FROM a JOIN b ON TRUE, b JOIN c ON TRUE" in { val sql = "SELECT * FROM a JOIN b ON TRUE, b JOIN c ON TRUE" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT * FROM ((a JOIN b ON TRUE) CROSS JOIN b) JOIN c ON TRUE" in { val sql = "SELECT * FROM ((a JOIN b ON TRUE) CROSS JOIN b) JOIN c ON TRUE" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT * FROM Roster\nWHERE SchoolID = 52" in { val sql = "SELECT * FROM Roster WHERE SchoolID = 52" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT * FROM Roster\nWHERE STARTS_WITH(LastName, \"Mc\") OR STARTS_WITH(LastName, \"Mac\")" in { val sql = "SELECT * FROM Roster WHERE STARTS_WITH(LastName, \"Mc\") OR STARTS_WITH(LastName, \"Mac\")" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT * FROM Roster INNER JOIN TeamMascot\nON Roster.SchoolID = TeamMascot.SchoolID" in { val sql = "SELECT * FROM Roster INNER JOIN TeamMascot ON Roster.SchoolID = TeamMascot.SchoolID" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT * FROM Roster CROSS JOIN TeamMascot\nWHERE Roster.SchoolID = TeamMascot.SchoolID" in { val sql = "SELECT * FROM Roster CROSS JOIN TeamMascot WHERE Roster.SchoolID = TeamMascot.SchoolID" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT SUM(PointsScored), LastName\nFROM PlayerStats\nGROUP BY LastName" in { val sql = "SELECT SUM(PointsScored), LastName FROM PlayerStats GROUP BY LastName" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT SUM(PointsScored), LastName, FirstName\nFROM PlayerStats\nGROUP BY LastName, FirstName" in { val sql = "SELECT SUM(PointsScored), LastName, FirstName FROM PlayerStats GROUP BY LastName, FirstName" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT SUM(PointsScored), LastName, FirstName\nFROM PlayerStats\nGROUP BY 2, FirstName" in { val sql = "SELECT SUM(PointsScored), LastName, FirstName FROM PlayerStats GROUP BY 2, FirstName" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT SUM(PointsScored), LastName as last_name\nFROM PlayerStats\nGROUP BY last_name" in { val sql = "SELECT SUM(PointsScored), LastName as last_name FROM PlayerStats GROUP BY last_name" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT LastName\nFROM Roster\nGROUP BY LastName\nHAVING SUM(PointsScored) > 15" in { val sql = "SELECT LastName FROM Roster GROUP BY LastName HAVING SUM(PointsScored) > 15" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT LastName, SUM(PointsScored) AS ps\nFROM Roster\nGROUP BY LastName\nHAVING ps > 0" in { val sql = "SELECT LastName, SUM(PointsScored) AS ps FROM Roster GROUP BY LastName HAVING ps > 0" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT LastName, SUM(PointsScored) AS total\nFROM PlayerStats\nGROUP BY LastName\nHAVING total > 15" in { val sql = "SELECT LastName, SUM(PointsScored) AS total FROM PlayerStats GROUP BY LastName HAVING total > 15" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT LastName\nFROM PlayerStats\nGROUP BY LastName\nHAVING SUM(PointsScored) > 15" in { val sql = "SELECT LastName FROM PlayerStats GROUP BY LastName HAVING SUM(PointsScored) > 15" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT LastName, COUNT(*)\nFROM PlayerStats\nGROUP BY LastName\nHAVING SUM(PointsScored) > 15" in { val sql = "SELECT LastName, COUNT(*) FROM PlayerStats GROUP BY LastName HAVING SUM(PointsScored) > 15" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT LastName, PointsScored, OpponentID\nFROM PlayerStats\nORDER BY SchoolID, LastName" in { val sql = "SELECT LastName, PointsScored, OpponentID FROM PlayerStats ORDER BY SchoolID, LastName" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT * FROM Roster\nUNION ALL\nSELECT * FROM TeamMascot\nORDER BY SchoolID" in { val sql = "SELECT * FROM Roster UNION ALL SELECT * FROM TeamMascot ORDER BY SchoolID" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "( SELECT * FROM Roster\n UNION ALL\n SELECT * FROM TeamMascot )\nORDER BY SchoolID" in { val sql = "( SELECT * FROM Roster UNION ALL SELECT * FROM TeamMascot ) ORDER BY SchoolID" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT * FROM Roster\nUNION ALL\n( SELECT * FROM TeamMascot\n ORDER BY SchoolID )" in { val sql = "SELECT * FROM Roster UNION ALL ( SELECT * FROM TeamMascot ORDER BY SchoolID )" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT SUM(PointsScored), LastName\nFROM PlayerStats\nORDER BY LastName" in { val sql = "SELECT SUM(PointsScored), LastName FROM PlayerStats ORDER BY LastName" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT SUM(PointsScored), LastName\nFROM PlayerStats\nORDER BY 2" in { val sql = "SELECT SUM(PointsScored), LastName FROM PlayerStats ORDER BY 2" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT s.FirstName, s2.SongName\nFROM Singers AS s, (SELECT * FROM Songs) AS s2" in { val sql = "SELECT s.FirstName, s2.SongName FROM Singers AS s, (SELECT * FROM Songs) AS s2" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT s.FirstName AS name, LOWER(s.FirstName) AS lname\nFROM Singers s" in { val sql = "SELECT s.FirstName AS name, LOWER(s.FirstName) AS lname FROM Singers s" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT FirstName\nFROM Singers AS s, s.Concerts" in { val sql = "SELECT FirstName FROM Singers AS s, s.Concerts" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT FirstName, s.ReleaseDate\nFROM Singers s WHERE ReleaseDate = 1975" in { val sql = "SELECT FirstName, s.ReleaseDate FROM Singers s WHERE ReleaseDate = 1975" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT * FROM Singers as s, Songs as s2\nORDER BY s.LastName" in { val sql = "SELECT * FROM Singers as s, Songs as s2 ORDER BY s.LastName" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT LastName AS last, SingerID\nFROM Singers\nORDER BY last" in { val sql = "SELECT LastName AS last, SingerID FROM Singers ORDER BY last" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT SingerID AS sid, COUNT(Songid) AS s2id\nFROM Songs\nGROUP BY 1\nORDER BY 2 DESC" in { val sql = "SELECT SingerID AS sid, COUNT(Songid) AS s2id FROM Songs GROUP BY 1 ORDER BY 2 DESC" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT SingerID AS sid, COUNT(Songid) AS s2id\nFROM Songs\nGROUP BY sid\nORDER BY s2id DESC" in { val sql = "SELECT SingerID AS sid, COUNT(Songid) AS s2id FROM Songs GROUP BY sid ORDER BY s2id DESC" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT SingerID FROM Singers, Songs" in { val sql = "SELECT SingerID FROM Singers, Songs" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT FirstName AS name, LastName AS name,\nFROM Singers\nGROUP BY name" in { val sql = "SELECT FirstName AS name, LastName AS name FROM Singers GROUP BY name" val (parser, nut) = _createParser(sql) parser.minisql() must throwA[RuntimeException] // duplicate alias } "SELECT UPPER(LastName) AS LastName\nFROM Singers\nGROUP BY LastName" in { val sql = "SELECT UPPER(LastName) AS LastName FROM Singers GROUP BY LastName" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT x, z AS T\nFROM some_table T\nGROUP BY T.x" in { // NOTICE: This test will occur error only after resolving references val sql = "SELECT x, z AS T FROM some_table T GROUP BY T.x" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT LastName, BirthYear AS BirthYear\nFROM Singers\nGROUP BY BirthYear" in { val sql = "SELECT LastName, BirthYear AS BirthYear FROM Singers GROUP BY BirthYear" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT * FROM Roster JOIN TeamMascot\nON Roster.SchoolID = TeamMascot.SchoolID" in { val sql = "SELECT * FROM Roster JOIN TeamMascot ON Roster.SchoolID = TeamMascot.SchoolID" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT * FROM Roster CROSS JOIN TeamMascot\nON Roster.SchoolID = TeamMascot.SchoolID" in { val sql = "SELECT * FROM Roster CROSS JOIN TeamMascot ON Roster.SchoolID = TeamMascot.SchoolID" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT * FROM Roster FULL JOIN TeamMascot\nON Roster.SchoolID = TeamMascot.SchoolID" in { val sql = "SELECT * FROM Roster FULL JOIN TeamMascot ON Roster.SchoolID = TeamMascot.SchoolID" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT * FROM Roster LEFT JOIN TeamMascot\nON Roster.SchoolID = TeamMascot.SchoolID" in { val sql = "SELECT * FROM Roster LEFT JOIN TeamMascot ON Roster.SchoolID = TeamMascot.SchoolID" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT * FROM Roster RIGHT JOIN TeamMascot\nON Roster.SchoolID = TeamMascot.SchoolID" in { val sql = "SELECT * FROM Roster RIGHT JOIN TeamMascot ON Roster.SchoolID = TeamMascot.SchoolID" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT LastName, SUM(PointsScored)\nFROM PlayerStats\nGROUP BY LastName" in { val sql = "SELECT LastName, SUM(PointsScored) FROM PlayerStats GROUP BY LastName" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT Mascot AS X, SchoolID AS Y\nFROM TeamMascot\nUNION ALL\nSELECT LastName, PointsScored\nFROM PlayerStats" in { val sql = "SELECT Mascot AS X, SchoolID AS Y FROM TeamMascot UNION ALL SELECT LastName, PointsScored FROM PlayerStats" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } // Array "SELECT [1, 2, 3] as numbers" in { val sql = "SELECT [1, 2, 3] as numbers" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT [\"apple\", \"pear\", \"orange\"] as fruit" in { val sql = "SELECT [\"apple\", \"pear\", \"orange\"] as fruit" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT [true, false, true] as booleans" in { val sql = "SELECT [true, false, true] as booleans" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT [a, b, c]\nFROM\n (SELECT 5 AS a,\n 37 AS b,\n 406 AS c)" in { val sql = "SELECT [a, b, c] FROM (SELECT 5 AS a, 37 AS b, 406 AS c)" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT [a, b, c]\nFROM\n (SELECT CAST(5 AS INT64) AS a,\n CAST(37 AS FLOAT64) AS b,\n 406 AS c)" in { val sql = "SELECT [a, b, c] FROM (SELECT CAST(5 AS INT64) AS a, CAST(37 AS FLOAT64) AS b, 406 AS c)" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT ARRAY<FLOAT64>[1, 2, 3] as floats" in { val sql = "SELECT ARRAY<FLOAT64>[1, 2, 3] as floats" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT [1, 2, 3] as numbers" in { val sql = "SELECT [1, 2, 3] as numbers" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT some_numbers,\n some_numbers[OFFSET(1)] AS offset_1,\n some_numbers[ORDINAL(1)] AS ordinal_1\nFROM sequences" in { val sql = "SELECT some_numbers, some_numbers[OFFSET(1)] AS offset_1, some_numbers[ORDINAL(1)] AS ordinal_1 FROM sequences" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT some_numbers,\n ARRAY_LENGTH(some_numbers) AS len\nFROM sequences" in { val sql = "SELECT some_numbers, ARRAY_LENGTH(some_numbers) AS len FROM sequences" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT race,\n participant.name,\n participant.splits\nFROM\n (SELECT \"800M\" AS race,\n [STRUCT(\"Rudisha\" as name, [23.4, 26.3, 26.4, 26.1] AS splits),\n STRUCT(\"Makhloufi\" AS name, [24.5, 25.4, 26.6, 26.1] AS splits),\n STRUCT(\"Murphy\" AS name, [23.9, 26.0, 27.0, 26.0] AS splits),\n STRUCT(\"Bosse\" AS name, [23.6, 26.2, 26.5, 27.1] AS splits),\n STRUCT(\"Rotich\" AS name, [24.7, 25.6, 26.9, 26.4] AS splits),\n STRUCT(\"Lewandowski\" AS name, [25.0, 25.7, 26.3, 27.2] AS splits),\n STRUCT(\"Kipketer\" AS name, [23.2, 26.1, 27.3, 29.4] AS splits),\n STRUCT(\"Berian\" AS name, [23.7, 26.1, 27.0, 29.3] as splits)]\n AS participants\n ) AS r\nCROSS JOIN UNNEST(r.participants) AS participant" in { val sql = "SELECT race, participant.name, participant.splits FROM (SELECT \"800M\" AS race, [STRUCT(\"Rudisha\" as name, [23.4, 26.3, 26.4, 26.1] AS splits), STRUCT(\"Makhloufi\" AS name, [24.5, 25.4, 26.6, 26.1] AS splits), STRUCT(\"Murphy\" AS name, [23.9, 26.0, 27.0, 26.0] AS splits), STRUCT(\"Bosse\" AS name, [23.6, 26.2, 26.5, 27.1] AS splits), STRUCT(\"Rotich\" AS name, [24.7, 25.6, 26.9, 26.4] AS splits), STRUCT(\"Lewandowski\" AS name, [25.0, 25.7, 26.3, 27.2] AS splits), STRUCT(\"Kipketer\" AS name, [23.2, 26.1, 27.3, 29.4] AS splits), STRUCT(\"Berian\" AS name, [23.7, 26.1, 27.0, 29.3] as splits)] AS participants) AS r CROSS JOIN UNNEST(r.participants) AS participant" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT race,\n (SELECT name\n FROM UNNEST(participants)\n ORDER BY (\n SELECT SUM(duration)\n FROM UNNEST(splits) AS duration) ASC\n LIMIT 1) AS fastest_racer\nFROM\n (SELECT \"800M\" AS race,\n [STRUCT(\"Rudisha\" as name, [23.4, 26.3, 26.4, 26.1] AS splits),\n STRUCT(\"Makhloufi\" AS name, [24.5, 25.4, 26.6, 26.1] AS splits),\n STRUCT(\"Murphy\" AS name, [23.9, 26.0, 27.0, 26.0] AS splits),\n STRUCT(\"Bosse\" AS name, [23.6, 26.2, 26.5, 27.1] AS splits),\n STRUCT(\"Rotich\" AS name, [24.7, 25.6, 26.9, 26.4] AS splits),\n STRUCT(\"Lewandowski\" AS name, [25.0, 25.7, 26.3, 27.2] AS splits),\n STRUCT(\"Kipketer\" AS name, [23.2, 26.1, 27.3, 29.4] AS splits),\n STRUCT(\"Berian\" AS name, [23.7, 26.1, 27.0, 29.3] as splits)]\n AS participants\n ) AS r" in { val sql = "SELECT race, (SELECT name FROM UNNEST(participants) ORDER BY ( SELECT SUM(duration) FROM UNNEST(splits) AS duration) ASC LIMIT 1) AS fastest_racer FROM (SELECT \"800M\" AS race, [STRUCT(\"Rudisha\" as name, [23.4, 26.3, 26.4, 26.1] AS splits), STRUCT(\"Makhloufi\" AS name, [24.5, 25.4, 26.6, 26.1] AS splits), STRUCT(\"Murphy\" AS name, [23.9, 26.0, 27.0, 26.0] AS splits), STRUCT(\"Bosse\" AS name, [23.6, 26.2, 26.5, 27.1] AS splits), STRUCT(\"Rotich\" AS name, [24.7, 25.6, 26.9, 26.4] AS splits), STRUCT(\"Lewandowski\" AS name, [25.0, 25.7, 26.3, 27.2] AS splits), STRUCT(\"Kipketer\" AS name, [23.2, 26.1, 27.3, 29.4] AS splits), STRUCT(\"Berian\" AS name, [23.7, 26.1, 27.0, 29.3] as splits)] AS participants) AS r" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT race,\n (SELECT name\n FROM UNNEST(participants),\n UNNEST(splits) AS duration\n ORDER BY duration ASC LIMIT 1) AS runner_with_fastest_lap\nFROM\n (SELECT \"800M\" AS race,\n [STRUCT(\"Rudisha\" as name, [23.4, 26.3, 26.4, 26.1] AS splits),\n STRUCT(\"Makhloufi\" AS name, [24.5, 25.4, 26.6, 26.1] AS splits),\n STRUCT(\"Murphy\" AS name, [23.9, 26.0, 27.0, 26.0] AS splits),\n STRUCT(\"Bosse\" AS name, [23.6, 26.2, 26.5, 27.1] AS splits),\n STRUCT(\"Rotich\" AS name, [24.7, 25.6, 26.9, 26.4] AS splits),\n STRUCT(\"Lewandowski\" AS name, [25.0, 25.7, 26.3, 27.2] AS splits),\n STRUCT(\"Kipketer\" AS name, [23.2, 26.1, 27.3, 29.4] AS splits),\n STRUCT(\"Berian\" AS name, [23.7, 26.1, 27.0, 29.3] as splits)]\n AS participants\n ) AS r" in { val sql = "SELECT race, (SELECT name FROM UNNEST(participants), UNNEST(splits) AS duration ORDER BY duration ASC LIMIT 1) AS runner_with_fastest_lap FROM (SELECT \"800M\" AS race, [STRUCT(\"Rudisha\" as name, [23.4, 26.3, 26.4, 26.1] AS splits), STRUCT(\"Makhloufi\" AS name, [24.5, 25.4, 26.6, 26.1] AS splits), STRUCT(\"Murphy\" AS name, [23.9, 26.0, 27.0, 26.0] AS splits), STRUCT(\"Bosse\" AS name, [23.6, 26.2, 26.5, 27.1] AS splits), STRUCT(\"Rotich\" AS name, [24.7, 25.6, 26.9, 26.4] AS splits), STRUCT(\"Lewandowski\" AS name, [25.0, 25.7, 26.3, 27.2] AS splits), STRUCT(\"Kipketer\" AS name, [23.2, 26.1, 27.3, 29.4] AS splits), STRUCT(\"Berian\" AS name, [23.7, 26.1, 27.0, 29.3] as splits)] AS participants) AS r" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT race,\n (SELECT name\n FROM UNNEST(participants)\n CROSS JOIN UNNEST(splits) AS duration\n ORDER BY duration ASC LIMIT 1) AS runner_with_fastest_lap\nFROM\n (SELECT \"800M\" AS race,\n [STRUCT(\"Rudisha\" as name, [23.4, 26.3, 26.4, 26.1] AS splits),\n STRUCT(\"Makhloufi\" AS name, [24.5, 25.4, 26.6, 26.1] AS splits),\n STRUCT(\"Murphy\" AS name, [23.9, 26.0, 27.0, 26.0] AS splits),\n STRUCT(\"Bosse\" AS name, [23.6, 26.2, 26.5, 27.1] AS splits),\n STRUCT(\"Rotich\" AS name, [24.7, 25.6, 26.9, 26.4] AS splits),\n STRUCT(\"Lewandowski\" AS name, [25.0, 25.7, 26.3, 27.2] AS splits),\n STRUCT(\"Kipketer\" AS name, [23.2, 26.1, 27.3, 29.4] AS splits),\n STRUCT(\"Berian\" AS name, [23.7, 26.1, 27.0, 29.3] as splits)]\n AS participants\n ) AS r" in { val sql = "SELECT race, (SELECT name FROM UNNEST(participants) CROSS JOIN UNNEST(splits) AS duration ORDER BY duration ASC LIMIT 1) AS runner_with_fastest_lap FROM (SELECT \"800M\" AS race, [STRUCT(\"Rudisha\" as name, [23.4, 26.3, 26.4, 26.1] AS splits), STRUCT(\"Makhloufi\" AS name, [24.5, 25.4, 26.6, 26.1] AS splits), STRUCT(\"Murphy\" AS name, [23.9, 26.0, 27.0, 26.0] AS splits), STRUCT(\"Bosse\" AS name, [23.6, 26.2, 26.5, 27.1] AS splits), STRUCT(\"Rotich\" AS name, [24.7, 25.6, 26.9, 26.4] AS splits), STRUCT(\"Lewandowski\" AS name, [25.0, 25.7, 26.3, 27.2] AS splits), STRUCT(\"Kipketer\" AS name, [23.2, 26.1, 27.3, 29.4] AS splits), STRUCT(\"Berian\" AS name, [23.7, 26.1, 27.0, 29.3] as splits)] AS participants) AS r" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT\n name, sum(duration) as duration\nFROM\n (SELECT \"800M\" AS race,\n [STRUCT(\"Rudisha\" AS name, [23.4, 26.3, 26.4, 26.1] AS splits),\n STRUCT(\"Makhloufi\" AS name, [24.5, 25.4, 26.6, 26.1] AS splits),\n STRUCT(\"Murphy\" AS name, [23.9, 26.0, 27.0, 26.0] AS splits),\n STRUCT(\"Bosse\" AS name, [23.6, 26.2, 26.5, 27.1] AS splits),\n STRUCT(\"Rotich\" AS name, [24.7, 25.6, 26.9, 26.4] AS splits),\n STRUCT(\"Lewandowski\" AS name, [25.0, 25.7, 26.3, 27.2] AS splits),\n STRUCT(\"Kipketer\" AS name, [23.2, 26.1, 27.3, 29.4] AS splits),\n STRUCT(\"Nathan\" as name, ARRAY<FLOAT64>[] as splits),\n STRUCT(\"David\" as name, NULL as splits)]\n AS participants) AS races,\n races.participants LEFT JOIN participants.splits duration\nGROUP BY name" in { val sql = "SELECT name, sum(duration) as duration FROM (SELECT \"800M\" AS race, [STRUCT(\"Rudisha\" AS name, [23.4, 26.3, 26.4, 26.1] AS splits), STRUCT(\"Makhloufi\" AS name, [24.5, 25.4, 26.6, 26.1] AS splits), STRUCT(\"Murphy\" AS name, [23.9, 26.0, 27.0, 26.0] AS splits), STRUCT(\"Bosse\" AS name, [23.6, 26.2, 26.5, 27.1] AS splits), STRUCT(\"Rotich\" AS name, [24.7, 25.6, 26.9, 26.4] AS splits), STRUCT(\"Lewandowski\" AS name, [25.0, 25.7, 26.3, 27.2] AS splits), STRUCT(\"Kipketer\" AS name, [23.2, 26.1, 27.3, 29.4] AS splits), STRUCT(\"Nathan\" as name, ARRAY<FLOAT64>[] as splits), STRUCT(\"David\" as name, NULL as splits)] AS participants) AS races, races.participants LEFT JOIN participants.splits duration GROUP BY name" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT some_numbers,\n ARRAY(SELECT x * 2\n FROM UNNEST(some_numbers) AS x) AS doubled\nFROM sequences" in { val sql = "SELECT some_numbers, ARRAY(SELECT x * 2 FROM UNNEST(some_numbers) AS x) AS doubled FROM sequences" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT\n ARRAY(SELECT x * 2\n FROM UNNEST(some_numbers) AS x\n WHERE x < 5) AS doubled_less_than_five\nFROM sequences" in { val sql = "SELECT ARRAY(SELECT x * 2 FROM UNNEST(some_numbers) AS x WHERE x < 5) AS doubled_less_than_five FROM sequences" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT ARRAY(SELECT DISTINCT x\n FROM UNNEST(some_numbers) AS x) AS unique_numbers\nFROM sequences\nWHERE id = 1" in { val sql = "SELECT ARRAY(SELECT DISTINCT x FROM UNNEST(some_numbers) AS x) AS unique_numbers FROM sequences WHERE id = 1" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT\n ARRAY(SELECT x\n FROM UNNEST(some_numbers) AS x\n WHERE 2 IN UNNEST(some_numbers)) AS contains_two\nFROM sequences" in { val sql = "SELECT ARRAY(SELECT x FROM UNNEST(some_numbers) AS x WHERE 2 IN UNNEST(some_numbers)) AS contains_two FROM sequences" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT ARRAY_AGG(fruit) AS fruit_basket\nFROM fruits" in { val sql = "SELECT ARRAY_AGG(fruit) AS fruit_basket FROM fruits" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT some_numbers,\n (SELECT SUM(x)\n FROM UNNEST(s.some_numbers) x) AS sums\nFROM sequences s" in { val sql = "SELECT some_numbers, (SELECT SUM(x) FROM UNNEST(s.some_numbers) x) AS sums FROM sequences s" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT ARRAY(\n SELECT STRUCT(point)\n FROM points)\n AS coordinates" in { val sql = "SELECT ARRAY( SELECT STRUCT(point) FROM points) AS coordinates" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } // Information schema "SELECT\n t.table_name\nFROM\n information_schema.tables AS t\nWHERE\n t.table_catalog = '' and t.table_schema = ''" in { val sql = "SELECT t.table_name FROM information_schema.tables AS t WHERE t.table_catalog = '' and t.table_schema = ''" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT\n t.table_name,\n t.parent_table_name\nFROM\n information_schema.tables AS t\nWHERE\n t.table_catalog = ''\n AND\n t.table_schema = ''\nORDER BY\n t.table_catalog,\n t.table_schema,\n t.table_name" in { val sql = "SELECT t.table_name, t.parent_table_name FROM information_schema.tables AS t WHERE t.table_catalog = '' AND t.table_schema = '' ORDER BY t.table_catalog, t.table_schema, t.table_name" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT\n t.column_name,\n t.spanner_type,\n t.is_nullable\nFROM\n information_schema.columns AS t\nWHERE\n t.table_catalog = ''\n AND\n t.table_schema = ''\n AND\n t.table_name = 'MyTable'\nORDER BY\n t.table_catalog,\n t.table_schema,\n t.table_name,\n t.ordinal_position" in { val sql = "SELECT t.column_name, t.spanner_type, t.is_nullable FROM information_schema.columns AS t WHERE t.table_catalog = '' AND t.table_schema = '' AND t.table_name = 'MyTable' ORDER BY t.table_catalog, t.table_schema, t.table_name, t.ordinal_position" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT\n t.table_name,\n t.index_name,\n t.parent_table_name\nFROM\n information_schema.indexes AS t\nWHERE\n t.table_catalog = ''\n AND\n t.table_schema = ''\n AND\n t.index_type != 'PRIMARY_KEY'\nORDER BY\n t.table_catalog,\n t.table_schema,\n t.table_name,\n t.index_name" in { val sql = "SELECT t.table_name, t.index_name, t.parent_table_name FROM information_schema.indexes AS t WHERE t.table_catalog = '' AND t.table_schema = '' AND t.index_type != 'PRIMARY_KEY' ORDER BY t.table_catalog, t.table_schema, t.table_name, t.index_name" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } // "SELECT 1+1" in { val sql = "SELECT 1+1" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT 1+-1" in { val sql = "SELECT 1+-1" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT 1++1" in { val sql = "SELECT 1++1" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT COLUMN_NAME FROM INFORMATION_SCHEMA.INDEX_COLUMNS WHERE TABLE_NAME='ACCOUNT_TABLE' AND INDEX_NAME='DEVICE_ID_INDEX' AND COLUMN_ORDERING IS NULL" in { val sql = "SELECT COLUMN_NAME FROM INFORMATION_SCHEMA.INDEX_COLUMNS WHERE TABLE_NAME='ACCOUNT_TABLE' AND INDEX_NAME='DEVICE_ID_INDEX' AND COLUMN_ORDERING IS NULL" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT (SELECT * FROM UNNEST([0,2,3])) AS x" in { val sql = "SELECT (SELECT * FROM UNNEST([0,2,3])) AS x" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT MAX(x) AS max FROM UNNEST([8, NULL, 37, 4, NULL, 55]) AS x" in { val sql = "SELECT MAX(x) AS max FROM UNNEST([8, NULL, 37, 4, NULL, 55]) AS x" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } "SELECT item_id, SUM(CAST (price AS FLOAT64)), item_type FROM item_table WHERE item_sub_id IS NOT NULL AND (item_count > 0 OR back_order IS NOT NULL) AND item_id IN (101,102,103,104) GROUP BY item_id, item_type" in { val sql = "SELECT item_id, SUM(CAST (price AS FLOAT64)), item_type FROM item_table WHERE item_sub_id IS NOT NULL AND (item_count > 0 OR back_order IS NOT NULL) AND item_id IN (101,102,103,104)"// GROUP BY item_id, item_type" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } } "INSERT" should { "INSERT INTO test_tbl VALUES(3, IF( (select count from test_tbl where id=2) > 0, 1, 0))" in { val sql = "insert into test_tbl values(3, IF( (select count from test_tbl where id=2) > 0, 1, 0))" val (parser,nut) = _createParser(sql) parser.minisql() println(nut.queryString) nut.queryString must_== "3,IF((select count from test_tbl where id=2 > 0),1,0)" } "INSERT INTO TEST_TABLE SELECT x,y FROM UNNEST([0,1,2,3]) AS x, UNNEST(['A','B','C','D']) AS y" in { val sql = "SELECT x,y FROM UNNEST([0,1,2,3]) as x, UNNEST(['A','B','C','D']) as y" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } } "DELETE" should { "DELETE FROM test_tbl WHERE id > 0" in { val sql = "DELETE FROM test_tbl WHERE id > 0" val (parser,nut) = _createParser(sql) parser.minisql() nut.queryString must_== "WHERE id > 0" } } "array_path" should { "OFFSET" in { val sql = "UPDATE test_tbl SET count=ARRAY<INT64>[0,1,2][OFFSET(0)] WHERE ID=0" val (parser,nut) = _createParser(sql) parser.minisql() nut.queryString must_== "0" } "OFFSET out of range" in { val sql = "UPDATE test_tbl SET count=ARRAY<INT64>[0,1,2][OFFSET(3)] WHERE ID=0" val (parser,nut) = _createParser(sql) parser.minisql() must throwA[RuntimeException] } "ORDINAL" in { val sql = "UPDATE test_tbl SET count=ARRAY<INT64>[0,1,2][ORDINAL(1)] WHERE ID=0" val (parser,nut) = _createParser(sql) parser.minisql() nut.queryString must_== "0" } "ORDINAL out of range" in { val sql = "UPDATE test_tbl SET count=ARRAY<INT64>[0,1,2][ORDINAL(0)] WHERE ID=0" val (parser,nut) = _createParser(sql) parser.minisql() must throwA[RuntimeException] } } "escaped string" should { "normally success" in { val sql = "INSERT INTO TEST_TABLE VALUES(\"\\\"[\\u0085]\\n'\")" val (parser,nut) = _createParser(sql) parser.minisql() nut.queryString must_== "\"[\u0085]\n'" } } "create" should { "create table with string(MAX)" in { val sql = "CREATE TABLE TEST_TABLE ( str STRING(MAX) NOT NULL) PRIMARY KEY (str)" val (parser, nut) = _createParser(sql) parser.minisql() nut.queryString must_== sql } } }
// // IRExpenseHeaderTableViewCell.h // ExpenseManager // // Created by <NAME> on 08/09/14. // Copyright (c) 2014 Shibin. All rights reserved. // #import <UIKit/UIKit.h> @interface IRExpenseHeaderTableViewCell : UITableViewCell @property (weak, nonatomic) IBOutlet UILabel *categoryName; @property (weak, nonatomic) IBOutlet UILabel *totalExpense; @end
<reponame>hugleMr/Eazax-cases<gh_stars>0 /** * 扩展 cc 模块,声明一些 creator.d.ts 中没有声明(但实际上有)的东西~ * @author 陈皮皮(ifaswind) * @see https://gitee.com/ifaswind/eazax-ccc/blob/master/declarations/cc.d.ts * @version 20210603 */ declare module cc { interface Node { _renderFlag: number; _touchListener?: TouchOneByOne; setLocalDirty(flag: Node._LocalDirtyFlag): void; } interface TouchOneByOne { _claimedTouches: Array; swallowTouches: boolean; onTouchBegan: Function; onTouchMoved: Function; onTouchEnded: Function; onTouchCancelled: Function; setSwallowTouches(needSwallow: boolean): void; isSwallowTouches(): boolean; clone(): TouchOneByOne; checkAvailable(): boolean; } interface Color { _val: number; } interface RenderTexture { updateSize(width?: number, height?: number): void; } interface RenderComponent { _materials: Material[], _assembler: Assembler; _vertsDirty: any; _resetAssembler(): void; __preload(): void; setVertsDirty(): void; _on3DNodeChanged(): void; _validateRender(): void; markForValidate(): void; markForRender(enable): void; disableRender(): void; _getDefaultMaterial(): Material; _activateMaterial(): void; _updateMaterial(): void; _updateColor(): void; _checkBacth(renderer, cullingMask): void; } class Assembler { _renderData: RenderData; _renderComp: RenderComponent; register(renderCompCtor, assembler): void; init(renderComp): void; updateRenderData(comp): void; fillBuffers(comp, renderer): void; getVfmt(): gfx.VertexFormat; } class Assembler2D extends Assembler { /** 每个顶点的数据数量 */ floatsPerVert: number = 5; /** 顶点数量 */ verticesCount: number = 4; /** 索引数量 */ indicesCount: number = 6; /** UV 偏移 */ uvOffset: number = 2; /** 颜色偏移 */ colorOffset: number = 4; /** 所有顶点的数据总数量 */ get verticesFloats(): number; initData(): void; initLocal(): void; updateColor(comp: RenderComponent, color: Color): void; getBuffer(): MeshBuffer; updateWorldVerts(comp: RenderComponent): void; packToDynamicAtlas(comp: RenderComponent, frame: SpriteFrame): void; } class RenderData { vDatas: Float32Array[]; uintVDatas: Uint32Array[]; iDatas: Uint16Array[]; meshCount: number; _infos: any[]; _flexBuffer: any; init(assembler: Assembler): void; clear(): void; updateMesh(index: number, vertices: number, indices: number): void; updateMeshRange(verticesCount: number, indicesCount: number): void; createData(index: number, verticesFloats: number, indicesCount: number): void; createQuadData(index: number, verticesFloats: number, indicesCount: number): void; createFlexData(index: number, verticesFloats: number, indicesCount: number, vfmt: gfx.VertexFormat): void; initQuadIndices(indices): void; } class FlexBuffer { _handler: any; _index: any; _vfmt: gfx.VertexFormat; _verticesBytes: any; _initVerticesCount: number; _initIndicesCount: number; _reallocVData(floatsCount, oldData): void; _reallocIData(indicesCount, oldData): void; reserve(verticesCount, indicesCount): void; used(verticesCount, indicesCount): void; reset(): void; } class MeshBuffer { byteOffset: number; indiceOffset: number; indiceStart: number; vertexOffset: number; _arrOffset: number; _offsetInfo: { byteOffset: number, vertexOffset: number, indiceOffset: number }; _vData: Float32Array; _iData: Uint16Array; _uintVData: Uint32Array; _batcher: any; _vb: gfx.VertexBuffer; _vbArr: []; _ib: gfx.IndexBuffer; _ibArr: []; _vertexFormat: gfx.VertexFormat; constructor(batcher, vertexFormat): MeshBuffer; init(batcher, vertexFormat): void; request(vertexCount, indiceCount): { byteOffset: number, vertexOffset: number, indiceOffset: number }; } class QuadBuffer extends MeshBuffer { } module gfx { const ATTR_POSITION: any; const ATTR_UV0: any; const ATTR_COLOR: any; const ATTR_TYPE_UINT8: any; const ATTR_TYPE_FLOAT32: any; class VertexFormat { constructor(infos): VertexFormat; element(attrName): string; getHash(): string; } class VertexBuffer { constructor(device, format, usage, data): VertexBuffer; } class IndexBuffer { constructor(device, format, usage, data): IndexBuffer; } } module renderer { const canvas: any; const device: any; const scene: any; const drawCalls: any; const InputAssembler: any; const _handle: ModelBatcher; const _cameraNode: any; const _camera: any; const _forward: any; const _flow: any; function render(ecScene, dt): void; function clear(): void; } class RenderFlow { static FLAG_DONOTHING: number; static FLAG_BREAK_FLOW: number; static FLAG_LOCAL_TRANSFORM: number; static FLAG_WORLD_TRANSFORM: number; static FLAG_TRANSFORM: number; static FLAG_OPACITY: number; static FLAG_COLOR: number; static FLAG_OPACITY_COLOR: number; static FLAG_UPDATE_RENDER_DATA: number; static FLAG_RENDER: number; static FLAG_CHILDREN: number; static FLAG_POST_RENDER: number; static FLAG_FINAL: number; render(rootNode, dt): void; renderCamera(camera, rootNode): void; getBachther(): any; } /** * 资源库模块 * @deprecated cc.AssetLibrary 已在 v2.4 中被移除 */ module AssetLibrary { function loadAsset(uuid: string, callback: (err, result) => void, options?: { existingAsset?: any }): void; function queryAssetInfo(uuid: string, callback: (err, result) => void): void; function getAssetByUuid(uuid: string): any; } } interface ModelBatcher { _quadBuffer: cc.QuadBuffer; _meshBuffer: cc.MeshBuffer; }
package com.biu.wiki.filter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Component; import javax.servlet.*; import javax.servlet.http.HttpServletRequest; import java.io.IOException; /** * @Class_name LogFilter * @Description …… * @Author longbiu * @Date 5/4/2022 10:39 PM **/ @Component public class LogFilter implements Filter { private static final Logger LOG = LoggerFactory.getLogger(LogFilter.class); @Override public void init(FilterConfig filterConfig) throws ServletException { // TODO document why this method is empty } @Override public void doFilter(ServletRequest servletRequest, ServletResponse servletResponse, FilterChain filterChain) throws IOException, ServletException { // 打印请求信息 HttpServletRequest request = (HttpServletRequest) servletRequest; LOG.info("------------- LogFilter 开始 -------------"); LOG.info("请求地址: {} {}", request.getRequestURI().toString(), request.getMethod()); LOG.info("远程地址: {}", request.getRemoteAddr()); long startTime = System.currentTimeMillis(); filterChain.doFilter(servletRequest, servletResponse); LOG.info("------------- LogFilter 结束 耗时:{} ms -------------", System.currentTimeMillis() - startTime); } }
#!/bin/bash set -e set -x curl -sfL https://get.k3s.io | sh -s - --write-kubeconfig-mode 644 export KUBECONFIG=/etc/rancher/k3s/k3s.yaml ( set +x ; while true ; do if kubectl get nodes | tee /dev/stderr | grep -q '\bReady\b' ; then break ; else sleep 5 ; fi ; done ) if [ -n "$2" ] ; then sudo k3s ctr images import "$2" fi kubectl get pods --all-namespaces kubectl create -f <( sed "s#image:.*#image: $1#" tests/freeipa-k3s.yaml ) ( set +x ; while kubectl get pod/freeipa-server | tee /dev/stderr | grep -Eq '\bPending\b|\bContainerCreating\b' ; do sleep 5 ; done ) if ! kubectl get pod/freeipa-server | grep -q '\bRunning\b' ; then kubectl describe pod/freeipa-server kubectl logs pod/freeipa-server exit 1 fi kubectl logs -f pod/freeipa-server & ( set +x ; while true ; do if kubectl get pod/freeipa-server | grep -q '\b1/1\b' ; then kill $! ; break ; else sleep 5 ; fi ; done ) kubectl describe pod/freeipa-server ls -la /var/lib/rancher/k3s/storage/pvc-* IPA_SERVER_HOSTNAME=$( kubectl get -o=jsonpath='{.spec.containers[0].env[?(@.name=="IPA_SERVER_HOSTNAME")].value}' pod freeipa-server ) # echo $( kubectl get -o=jsonpath='{.spec.clusterIP}' service freeipa-server-service ) $IPA_SERVER_HOSTNAME >> /etc/hosts if ! test -f /etc/resolv.conf.backup ; then sudo mv /etc/resolv.conf /etc/resolv.conf.backup fi sudo systemctl stop systemd-resolved.service || : echo nameserver $( kubectl get -o=jsonpath='{.spec.clusterIP}' service freeipa-server-service ) | sudo tee /etc/resolv.conf curl -Lk https://$IPA_SERVER_HOSTNAME/ | grep -E 'IPA: Identity Policy Audit|Identity Management'