text
stringlengths
1
1.05M
#!/bin/sh set -e set -u set -o pipefail function on_error { echo "$(realpath -mq "${0}"):$1: error: Unexpected failure" } trap 'on_error $LINENO' ERR if [ -z ${FRAMEWORKS_FOLDER_PATH+x} ]; then # If FRAMEWORKS_FOLDER_PATH is not set, then there's nowhere for us to copy # frameworks to, so exit 0 (signalling the script phase was successful). exit 0 fi echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" COCOAPODS_PARALLEL_CODE_SIGN="${COCOAPODS_PARALLEL_CODE_SIGN:-false}" SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}" BCSYMBOLMAP_DIR="BCSymbolMaps" # This protects against multiple targets copying the same framework dependency at the same time. The solution # was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????") # Copies and strips a vendored framework install_framework() { if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then local source="${BUILT_PRODUCTS_DIR}/$1" elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")" elif [ -r "$1" ]; then local source="$1" fi local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" if [ -L "${source}" ]; then echo "Symlinked..." source="$(readlink "${source}")" fi if [ -d "${source}/${BCSYMBOLMAP_DIR}" ]; then # Locate and install any .bcsymbolmaps if present, and remove them from the .framework before the framework is copied find "${source}/${BCSYMBOLMAP_DIR}" -name "*.bcsymbolmap"|while read f; do echo "Installing $f" install_bcsymbolmap "$f" "$destination" rm "$f" done rmdir "${source}/${BCSYMBOLMAP_DIR}" fi # Use filter instead of exclude so missing patterns don't throw errors. echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --links --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\"" rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --links --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}" local basename basename="$(basename -s .framework "$1")" binary="${destination}/${basename}.framework/${basename}" if ! [ -r "$binary" ]; then binary="${destination}/${basename}" elif [ -L "${binary}" ]; then echo "Destination binary is symlinked..." dirname="$(dirname "${binary}")" binary="${dirname}/$(readlink "${binary}")" fi # Strip invalid architectures so "fat" simulator / device frameworks work on device if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then strip_invalid_archs "$binary" fi # Resign the code if required by the build settings to avoid unstable apps code_sign_if_enabled "${destination}/$(basename "$1")" # Embed linked Swift runtime libraries. No longer necessary as of Xcode 7. if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then local swift_runtime_libs swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u) for lib in $swift_runtime_libs; do echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\"" rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}" code_sign_if_enabled "${destination}/${lib}" done fi } # Copies and strips a vendored dSYM install_dsym() { local source="$1" warn_missing_arch=${2:-true} if [ -r "$source" ]; then # Copy the dSYM into the targets temp dir. echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${DERIVED_FILES_DIR}\"" rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${DERIVED_FILES_DIR}" local basename basename="$(basename -s .dSYM "$source")" binary_name="$(ls "$source/Contents/Resources/DWARF")" binary="${DERIVED_FILES_DIR}/${basename}.dSYM/Contents/Resources/DWARF/${binary_name}" # Strip invalid architectures from the dSYM. if [[ "$(file "$binary")" == *"Mach-O "*"dSYM companion"* ]]; then strip_invalid_archs "$binary" "$warn_missing_arch" fi if [[ $STRIP_BINARY_RETVAL == 0 ]]; then # Move the stripped file into its final destination. echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --links --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${DERIVED_FILES_DIR}/${basename}.framework.dSYM\" \"${DWARF_DSYM_FOLDER_PATH}\"" rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --links --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${DERIVED_FILES_DIR}/${basename}.dSYM" "${DWARF_DSYM_FOLDER_PATH}" else # The dSYM was not stripped at all, in this case touch a fake folder so the input/output paths from Xcode do not reexecute this script because the file is missing. touch "${DWARF_DSYM_FOLDER_PATH}/${basename}.dSYM" fi fi } # Used as a return value for each invocation of `strip_invalid_archs` function. STRIP_BINARY_RETVAL=0 # Strip invalid architectures strip_invalid_archs() { binary="$1" warn_missing_arch=${2:-true} # Get architectures for current target binary binary_archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | awk '{$1=$1;print}' | rev)" # Intersect them with the architectures we are building for intersected_archs="$(echo ${ARCHS[@]} ${binary_archs[@]} | tr ' ' '\n' | sort | uniq -d)" # If there are no archs supported by this binary then warn the user if [[ -z "$intersected_archs" ]]; then if [[ "$warn_missing_arch" == "true" ]]; then echo "warning: [CP] Vendored binary '$binary' contains architectures ($binary_archs) none of which match the current build architectures ($ARCHS)." fi STRIP_BINARY_RETVAL=1 return fi stripped="" for arch in $binary_archs; do if ! [[ "${ARCHS}" == *"$arch"* ]]; then # Strip non-valid architectures in-place lipo -remove "$arch" -output "$binary" "$binary" stripped="$stripped $arch" fi done if [[ "$stripped" ]]; then echo "Stripped $binary of architectures:$stripped" fi STRIP_BINARY_RETVAL=0 } # Copies the bcsymbolmap files of a vendored framework install_bcsymbolmap() { local bcsymbolmap_path="$1" local destination="${BUILT_PRODUCTS_DIR}" echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}"" rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}" } # Signs a framework with the provided identity code_sign_if_enabled() { if [ -n "${EXPANDED_CODE_SIGN_IDENTITY:-}" -a "${CODE_SIGNING_REQUIRED:-}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then # Use the current code_sign_identity echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}" local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS:-} --preserve-metadata=identifier,entitlements '$1'" if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then code_sign_cmd="$code_sign_cmd &" fi echo "$code_sign_cmd" eval "$code_sign_cmd" fi } if [[ "$CONFIGURATION" == "Debug" ]]; then install_framework "${BUILT_PRODUCTS_DIR}/AYEmptyDataView/AYEmptyDataView.framework" install_framework "${BUILT_PRODUCTS_DIR}/RxCocoa/RxCocoa.framework" install_framework "${BUILT_PRODUCTS_DIR}/RxSwift/RxSwift.framework" install_framework "${BUILT_PRODUCTS_DIR}/SnapKit/SnapKit.framework" fi if [[ "$CONFIGURATION" == "Release" ]]; then install_framework "${BUILT_PRODUCTS_DIR}/AYEmptyDataView/AYEmptyDataView.framework" install_framework "${BUILT_PRODUCTS_DIR}/RxCocoa/RxCocoa.framework" install_framework "${BUILT_PRODUCTS_DIR}/RxSwift/RxSwift.framework" install_framework "${BUILT_PRODUCTS_DIR}/SnapKit/SnapKit.framework" fi if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then wait fi
package com.knavic.nehakakkar; import android.content.Context; import android.content.Intent; import android.graphics.Bitmap; import android.graphics.drawable.Drawable; import android.net.Uri; import android.os.Environment; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.ImageButton; import android.widget.ImageView; import android.widget.Toast; import androidx.recyclerview.widget.RecyclerView; import com.google.android.gms.ads.AdListener; import com.google.android.gms.ads.AdRequest; import com.google.android.gms.ads.InterstitialAd; import com.squareup.picasso.Picasso; import com.squareup.picasso.Target; import java.io.File; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.util.ArrayList; import java.util.Date; public class DataAdapter extends RecyclerView.Adapter<DataAdapter.ViewHolder> { private ArrayList<AndroidVersion> android_versions; private Context context; private InterstitialAd mInterstitialAd; public DataAdapter(Context context, ArrayList<AndroidVersion> android_versions) { this.context = context; this.android_versions = android_versions; } @Override public DataAdapter.ViewHolder onCreateViewHolder(ViewGroup viewGroup, int i) { View view = LayoutInflater.from(viewGroup.getContext()).inflate(R.layout.raw_layout, viewGroup, false); return new ViewHolder(view); } @Override public void onBindViewHolder(ViewHolder viewHolder, final int i) { Picasso.with(context).load(android_versions.get(i).getAndroid_image_url()).into(viewHolder.img_android); viewHolder.sharebutton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { Picasso.with(context.getApplicationContext()).load(android_versions.get(i).getAndroid_image_url()).into(new Target() { @Override public void onBitmapLoaded(Bitmap bitmap, Picasso.LoadedFrom from) { Intent intent = new Intent("android.intent.action.SEND"); intent.setType("image/*"); intent.putExtra("android.intent.extra.STREAM", getlocalBitmapUri(bitmap)); final String packageName = context.getPackageName(); intent.putExtra(Intent.EXTRA_TEXT," Share This App: http://play.google.com/store/apps/details?id="+packageName); context.startActivity(Intent.createChooser(intent, "share")); } @Override public void onBitmapFailed(Drawable errorDrawable) { } @Override public void onPrepareLoad(Drawable placeHolderDrawable) { } }); } }); mInterstitialAd = new InterstitialAd(context); mInterstitialAd.setAdUnitId("ca-app-pub-9382057421484176/4482281355"); mInterstitialAd.loadAd(new AdRequest.Builder().build()); viewHolder.savebutton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { Picasso.with(context.getApplicationContext()) .load(android_versions.get(i).getAndroid_image_url()) .into(new Target() { @Override public void onBitmapLoaded(Bitmap bitmap, Picasso.LoadedFrom from) { try { File mydie = new File(Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_PICTURES).toString() + "/Neha Kakkar"); if (!mydie.exists()) { mydie.mkdirs(); } FileOutputStream fileOutputStream = new FileOutputStream(new File(mydie, new Date().toString() + ".jpg")); bitmap.compress(Bitmap.CompressFormat.JPEG, 100, fileOutputStream); fileOutputStream.flush(); fileOutputStream.close(); if (mInterstitialAd.isLoaded()) { mInterstitialAd.show(); } Toast.makeText(context.getApplicationContext(), "Saved in Picture/Neha Kakkar", Toast.LENGTH_LONG).show(); } catch (FileNotFoundException e) { e.printStackTrace(); } catch (IOException e2) { e2.printStackTrace(); } } @Override public void onBitmapFailed(Drawable errorDrawable) { } @Override public void onPrepareLoad(Drawable placeHolderDrawable) { } }); } }); } @Override public int getItemCount() { return android_versions.size(); } public class ViewHolder extends RecyclerView.ViewHolder{ ImageButton savebutton; ImageButton sharebutton; ImageView img_android; public ViewHolder(View view) { super(view); savebutton =(ImageButton)view.findViewById(R.id.imageButton); sharebutton=(ImageButton)view.findViewById(R.id.imageButton2); img_android = (ImageView)view.findViewById(R.id.img_android); } } private Uri getlocalBitmapUri(Bitmap bitmap) { Uri bmuri = null; try { File file = new File(Environment.getExternalStorageDirectory() + File.separator + "image.jpg"); FileOutputStream fileOutputStream = new FileOutputStream(file); bitmap.compress(Bitmap.CompressFormat.JPEG, 100, fileOutputStream); fileOutputStream.close(); bmuri = Uri.fromFile(file); } catch (FileNotFoundException e) { e.printStackTrace(); } catch (IOException e2) { e2.printStackTrace(); } return bmuri; } }
#!/bin/bash lua main.lua '(' lua main.lua '(())' lua main.lua '()()' lua main.lua '(((' lua main.lua '(()(()(' lua main.lua '))(((((' lua main.lua '())' lua main.lua '))(' lua main.lua ')))' lua main.lua ')())())'
import tensorflow as tf # Define model hyperparameters batch_size = 128 embedding_dim = 16 hidden_dim = 32 # Define input and output shapes text_input = tf.keras.layers.Input(shape=(None,)) category_input = tf.keras.layers.Input(shape=(10,)) # Embedding layer embedding_layer = tf.keras.layers.Embedding(batch_size, embedding_dim)(text_input) # First hidden layer h1 = tf.keras.layers.Dense(hidden_dim, activation='relu')(embedding_layer) # Second hidden layer h2 = tf.keras.layers.Dense(hidden_dim, activation='relu')(h1) # Output layer output = tf.keras.layers.Dense(10, activation='softmax')(h2) model = tf.keras.Model(inputs=[text_input, category_input], outputs=output) # Compile model model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'])
<gh_stars>0 import {Pipe, PipeTransform} from '@angular/core' @Pipe({ name: 'sort' }) export class SortPipe implements PipeTransform { transform<A>(array: Array<A>, f: (a: A, b: A) => number): Array<A> { if (!Array.isArray(array)) { return [] } array.sort(f) return array } }
package graph // ShortestPath computes a shortest path from v to w. // Only edges with non-negative costs are included. // The number dist is the length of the path, or -1 if w cannot be reached. // // The time complexity is O((|E| + |V|)⋅log|V|), where |E| is the number of edges // and |V| the number of vertices in the graph. func ShortestPath(g Iterator, v, w int) (path []int, dist int64) { q := &dijkstraQueue{} return ShortestPathWithQueue(g, q, v, w) } // ShortestPaths computes the shortest paths from v to all other vertices. // Only edges with non-negative costs are included. // The number parent[w] is the predecessor of w on a shortest path from v to w, // or -1 if none exists. // The number dist[w] equals the length of a shortest path from v to w, // or is -1 if w cannot be reached. // // The time complexity is O((|E| + |V|)⋅log|V|), where |E| is the number of edges // and |V| the number of vertices in the graph. func ShortestPaths(g Iterator, v int) (parent []int, dist []int64) { n := g.Order() dist = make([]int64, n) parent = make([]int, n) for i := range dist { dist[i], parent[i] = -1, -1 } q := &dijkstraQueue{} q.SetDist(dist) q.Push(v, 0) p := &pathFinder{dist: dist, parent: parent, q: q} do := p.Do for q.Len() > 0 { v = q.Pop() p.v = v g.Visit(v, do) } return } type DistQueue interface { // SetDist sets the dist slice to the queue // as the priority slice. The queue should // use the dist slice as a shared slice, it // should not be copied. SetDist(dist []int64) // Push push v to the queue with // cost priority. Push(v int, cost int64) // Fix changes the cost of v to the // new cost priority. Fix(v int, cost int64) // Pop removes the first element of // queue and return it. Pop() int // Len is the queue's length. Len() int } func ShortestPathWithQueue(g Iterator, q DistQueue, v, w int) (path []int, dist int64) { parent, distances := shortestPathWithQueue(g, q, v, w) path, dist = []int{}, distances[w] if dist == -1 { return } for v := w; v != -1; v = parent[v] { path = append(path, v) } for i, j := 0, len(path)-1; i < j; i, j = i+1, j-1 { path[i], path[j] = path[j], path[i] } return } func shortestPathWithQueue(g Iterator, q DistQueue, v, w int) (parent []int, dist []int64) { n := g.Order() dist = make([]int64, n) parent = make([]int, n) for i := range dist { dist[i], parent[i] = -1, -1 } q.SetDist(dist) q.Push(v, 0) p := &pathFinder{dist: dist, parent: parent, q: q} do := p.Do for q.Len() > 0 { v = q.Pop() if v == w { return } p.v = v g.Visit(v, do) } return } type pathFinder struct { dist []int64 parent []int q DistQueue v int } func (p *pathFinder) Do(w int, d int64) (skip bool) { if d < 0 { return } alt := p.dist[p.v] + d switch { case p.dist[w] == -1: p.parent[w] = p.v p.q.Push(w, alt) case alt < p.dist[w]: p.parent[w] = p.v p.q.Fix(w, alt) } return }
#! /bin/sh # Copyright (C) 2010-2017 Free Software Foundation, Inc. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2, or (at your option) # any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # TAGS_DEPENDENCIES only make sense if other tag-worthy things (such as # sources) exist. . test-init.sh cat >> configure.ac << 'END' AC_PROG_CC AC_OUTPUT END cat >Makefile.am << 'END' TAGS_DEPENDENCIES = foo END $ACLOCAL AUTOMAKE_fails grep 'define.*TAGS_DEPENDENCIES.*without' stderr cat >>Makefile.am << 'END' bin_PROGRAMS = bar END AUTOMAKE_run grep 'define.*TAGS_DEPENDENCIES.*without' stderr && exit 1 :
<reponame>foxdog-studios/pitch-shifter-chrome-extension function sendMessageToActiveTab(message, callback) { chrome.tabs.query({active: true, currentWindow: true}, function(tabs) { tab = tabs[0]; chrome.tabs.sendMessage(tab.id, message, callback); }); }; function init() { var transpose = false; var enabled = document.getElementById('enabled'); var pitch = document.getElementById('pitch'); var pitchValue = document.getElementById('pitch-value'); var pitchShiftTypeSelect = document.getElementById('pitch-shift-type'); var pitchReset = document.getElementById('pitch-reset'); var playbackRate = document.getElementById('playback-rate'); var playbackRateValue = document.getElementById('playback-rate-value'); var playbackRateReset = document.getElementById('playback-rate-reset'); function setPitchValue(_pitchValue) { pitch.value = _pitchValue; pitchValue.textContent = _pitchValue; } function setPlaybackRate(_playbackRate) { playbackRate.value = _playbackRate; playbackRateValue.textContent = _playbackRate; } function setPitchShiftTypeSmooth() { pitch.max = 1; pitch.min = -1; pitch.step = 0.01; pitchShiftTypeSelect.selectedIndex = 0; transpose = false; } function setPitchShiftTypeSemiTone() { pitch.max = 24; pitch.min = -24; pitch.step = 1; pitchShiftTypeSelect.selectedIndex = 1; transpose = true; } sendMessageToActiveTab({type: 'get'}, function (values) { if (values === undefined) { return; } if (values.transpose !== undefined && values.transpose !== null) { if (values.transpose) { setPitchShiftTypeSemiTone(); } else { setPitchShiftTypeSmooth(); } } if (values.pitch !== undefined && values.pitch !== null) { setPitchValue(values.pitch); } if (values.playbackRate !== undefined && values.playbackRate !== null) { setPlaybackRate(values.playbackRate); } if (values.enabled !== undefined && values.enabled !== null) { enabled.checked = values.enabled; } }); enabled.addEventListener('change', function(event) { sendMessageToActiveTab({enabled: enabled.checked}); }, false); pitch.addEventListener('input', function(event) { sendMessageToActiveTab({pitch: pitch.value}); setPitchValue(pitch.value); }, false); pitchShiftTypeSelect.addEventListener('change', function(event) { var opt = pitchShiftTypeSelect.options[pitchShiftTypeSelect.selectedIndex] if (opt.value == 'smooth') { setPitchShiftTypeSmooth(); setPitchValue(0); } else if (opt.value == 'semi-tone') { setPitchShiftTypeSemiTone(); setPitchValue(0); } sendMessageToActiveTab({transpose: transpose, pitch: pitch.value}); }, false); pitchReset.addEventListener('click', function(event) { sendMessageToActiveTab({pitch: 0}); setPitchValue(0); }, false); playbackRate.addEventListener('input', function(event) { sendMessageToActiveTab({playbackRate: playbackRate.value}); setPlaybackRate(playbackRate.value); }, false); playbackRateReset.addEventListener('click', function(event) { sendMessageToActiveTab({playbackRate: 1}); setPlaybackRate(1); }, false); } var readyStateCheckInterval = setInterval(function() { if (document.readyState === "complete") { clearInterval(readyStateCheckInterval); init(); } }, 10);
import time # Function to calculate the Nth Fibonacci number using recursion with memoization def fib_recursion_memo(n, memo={}): if n in memo: return memo[n] if n <= 2: return 1 memo[n] = fib_recursion_memo(n-1, memo) + fib_recursion_memo(n-2, memo) return memo[n] # Function to calculate the Nth Fibonacci number using memoization def fib_memo(n): if n <= 2: return 1 memo = [0] * (n+1) memo[1] = 1 memo[2] = 1 for i in range(3, n+1): memo[i] = memo[i-1] + memo[i-2] return memo[n] # Function to calculate the Nth Fibonacci number using bottom-up approach def fib_bup(n): if n <= 2: return 1 a, b = 1, 1 for _ in range(3, n+1): a, b = b, a + b return b N = 1000 print("\n%dth value of the Fibonacci sequence :" % N) t = time.time() * 10e6 print("{:<15}{:<8}{:>12}{:>12} µs".format("Recursion memo", "O(2^n)", fib_recursion_memo(N), time.time() * 10e6 - t)) t = time.time() * 10e6 print("{:<15}{:<8}{:>12}{:>12} µs".format("Memoization", "O(2n)", fib_memo(N), time.time() * 10e6 - t)) t = time.time() * 10e6 print("{:<15}{:<8}{:>12}{:>12} µs".format("Bottom-up", "O(n)", fib_bup(N), time.time() * 10e6 - t))
#!/bin/bash cd "$(dirname "$0")" while : do name=$(osascript GetNameAndTitleOfActiveWindow.scpt) echo "name=\"$name\";" > name.js sleep 1 done
#!/bin/bash dieharder -d 4 -g 30 -S 980071245
<filename>FuzzTest/FuzzTest/FTImageTableViewCell.h // // FTImageTableViewCell.h // FuzzTest // // Created by <NAME> on 3/4/15. // Copyright (c) 2015 <NAME>. All rights reserved. // #import <UIKit/UIKit.h> #import "FTObject.h" @interface FTImageTableViewCell : UITableViewCell @property (strong, nonatomic) FTObject *item; @end
#!/bin/bash NRT_SITE="http://nrt-status.gina.alaska.edu/products.txt?" QUERY="" usage() { cat << EOF usage: $0 options This script mirrors data from the NRT Status available products OPTIONS: -h Show this message -s Fetch data for SATELLITE -i Fetch data for SENSOR -f Fetch data for FACILITY -p Fetch data for PROCESSING_LEVEL -n Namespace the data (Place in sub-directorys for each pass) -o Path to write data to (Default: .) -z Create done files. EOF } while getopts "h:s:i:f:p:o:d:n:z" OPTION; do case $OPTION in h) usage exit 1 ;; s) QUERY="${QUERY}satellites\[\]=${OPTARG}&" # SATELLITES=$OPTARG ;; i) QUERY="${QUERY}sensors\[\]=${OPTARG}&" # SENSORS=$OPTARG ;; f) QUERY="${QUERY}facilities\[\]=${OPTARG}&" # FACILITY=$OPTARG ;; p) QUERY="${QUERY}processing_levels\[\]=${OPTARG}&" # PROCESSING_LEVELS=$OPTARG PLEVEL=${OPTARG} ;; d) DURATION=$OPTARG ;; o) OUTPUT_PATH=$OPTARG ;; n) NRT_NAMESPACE="yes" ;; z) DONE_FILE="yes" ;; ?) usage exit ;; esac done OUTPUT_PATH="${OUTPUT_PATH:-.}" DURATION="${DURATION:-1}" case $(uname -s) in Linux) START_DATE=$(date +"%Y-%m-%d" -d "${DURATION} days ago") END_DATE=$(date +"%Y-%m-%d" -d "tomorrow") ;; Darwin) START_DATE=$(date -j -v-"${DURATION}"d +"%Y-%m-%d") END_DATE=$(date -j -v+1d +"%Y-%m-%d") ;; ?) echo "UNKNOWN Platform" exit 1 ;; esac TMPFILE="/tmp/nrt-mirror-$(whoami)-$(date +%Y%m%d%H%M%S%N)-$RANDOM" #Get the products list curl -s "${NRT_SITE}start_date=${START_DATE}&end_date=${END_DATE}&${QUERY}" -o "$TMPFILE" for file in $(cat "$TMPFILE"); do filename=$(basename "$file") passId=$(basename $(dirname "$file")) if [ "${filename}" == "leapsec.dat" ]; then continue fi #Get the file if it doesn't exist _OUTPUT_PATH=$OUTPUT_PATH if [ ! -z ${NRT_NAMESPACE+x} ]; then OUTPUT_PATH="${OUTPUT_PATH}/${passId}" fi wget -nc -q -P "$OUTPUT_PATH" "$file" OUTPUT_PATH=$_OUTPUT_PATH done if [ -z ${DONE_FILE+x} ]; then for file in $(cat "$TMPFILE"); do filename=$(basename "$file") passId=$(basename $(dirname "$file")) if [ "${filename}" == "leapsec.dat" ]; then continue fi #Get the file if it doesn't exist _OUTPUT_PATH=$OUTPUT_PATH if [ ! -z ${NRT_NAMESPACE+x} ]; then OUTPUT_PATH="${OUTPUT_PATH}/${passId}" fi if [ ! -e "$OUTPUT_PATH"/done_"$PLEVEL" ]; then echo "Making done file $OUTPUT_PATH/done_$PLEVEL" touch "$OUTPUT_PATH/done_$PLEVEL" fi OUTPUT_PATH=$_OUTPUT_PATH done fi rm "$TMPFILE"
<filename>components/Form.tsx import React, { useState } from "react"; type Props = { contactId: string; createComment: Function; }; const Form: React.FC<Props> = (props) => { const [title, setTitle] = useState(''); const [content, setContent] = useState(''); const reset = () => { setContent(''); setTitle(''); } return ( <div className="flex items-center justify-center shadow-lg mt-10 mx-8 mb-4 max-w-lg"> <form className="w-full max-w-xl bg-white rounded-lg px-4 pt-2" onSubmit={(e) => props.createComment(e, props.contactId, reset)}> <div className="flex flex-wrap -mx-3 "> <label className="px-4 pt-1 pb-2 text-gray-800 text-lg" htmlFor="title">Title</label> <input id="title" className="leading-none md:w-full text-gray-900 p-3 focus:outline-none focus:border-blue-700 m-4 mt-2 bg-gray-100 border rounded border-gray-200" name="title" type="text" placeholder="Enter a title" value={title} onChange={e => setTitle(e.target.value)} required /> </div> <div className="flex flex-wrap -mx-3 mb-6"> <label className="px-4 pt-3 pb-2 text-gray-800 text-lg" htmlFor="content">Comment</label> <textarea id="content" name="content" placeholder="Enter a comment" className="h-40 md:w-full mx-4 text-base leading-none text-gray-900 p-3 focus:oultine-none focus:border-blue-700 bg-gray-100 border rounded border-gray-200" value={content} onChange={e => setContent(e.target.value)} required> </textarea> </div> <div className="w-fullflex items-start md:w-full px-3"> <div className=""> <button className="bg-purple-500 text-white active:bg-purple-600 font-bold uppercase text-xs px-4 py-2 rounded-full shadow hover:shadow-md outline-none focus:outline-none mr-1 mb-4 ease-linear transition-all duration-150" type="submit"> Add </button> </div> </div> </form> </div> ) } export default Form;
sudo rm -fr /bin/ideone /bin/ideoneSearcher
<reponame>kallsave/vue-router-cache<filename>examples/base/src/plugins/vi-ui/common/helpers/dom.js import { camelize } from './utils.js' export function hasClass(el, className) { const reg = new RegExp('(^|\\s)' + className + '(\\s|$)') return reg.test(el.className) } export function addClass(el, className) { /* istanbul ignore if */ if (hasClass(el, className)) { return } const newClass = el.className.split(' ') newClass.push(className) el.className = newClass.join(' ') } export function removeClass(el, className) { /* istanbul ignore if */ if (!hasClass(el, className)) { return } const reg = new RegExp('(^|\\s)' + className + '(\\s|$)', 'g') el.className = el.className.replace(reg, ' ') } export function getData(el, name) { const prefix = 'data-' return el.getAttribute(prefix + name) } export function setData(el, name, value) { const prefix = 'data-' el.setAttribute(prefix + name, value) } // getRect是获取相对父元素的位置,如果想获取相对页面的位置 // 请使用getBoundingClientRect export function getRect(el) { return { top: el.offsetTop, left: el.offsetLeft, width: el.offsetWidth, height: el.offsetHeight } } const elementStyle = document.createElement('div').style const endEventListenerList = ['transitionend', 'animationend'] const browserPrefix = { standard: '', webkit: 'webkit', Moz: 'Moz', O: 'O', ms: 'ms', } const endEventListenerPrefixList = { transition: { transition: 'transitionend', webkitTransition: 'webkitTransitionEnd', MozTransition: 'transitionend', OTransition: 'oTransitionEnd', msTransition: 'msTransitionEnd' }, animation: { animation: 'animationend', webkitAnimation: 'webkitAnimationEnd', MozAnimation: 'animationend', OAnimation: 'oAnimationEnd', msAnimation: 'msAnimationEnd' } } export function prefixStyle(style) { let baseStyle = '' if (endEventListenerList.indexOf(style) !== -1) { baseStyle = style.replace(/end/i, '') } for (let key in browserPrefix) { if (baseStyle) { let cssPrefixStyle = browserPrefix[key] ? browserPrefix[key] + '-' + baseStyle : baseStyle let keyName = camelize(cssPrefixStyle) if (elementStyle[keyName] !== undefined) { return endEventListenerPrefixList[baseStyle][keyName] } } else { let cssPrefixStyle = browserPrefix[key] ? browserPrefix[key] + '-' + style : style let keyName = camelize(cssPrefixStyle) if (elementStyle[keyName] !== undefined) { return keyName } } } return '' } export function getMatchedTarget(e, targetClassName) { let el = e.target while (el && !hasClass(el, targetClassName)) { if (el === e.currentTarget) return null el = el.parentNode } return el } export function dispatchEvent(el, name, { type = 'Event', bubbles = true, cancelable = true } = {}) { const e = document.createEvent(type) e.initEvent(name, bubbles, cancelable) el.dispatchEvent(e) } // 得到transform上的rotate,其他值不准确 export function getTransformAngle(dom) { const transform = prefixStyle('transform') let matrix = getComputedStyle(dom).getPropertyValue(transform) let angle = 0 if (matrix && matrix !== 'none') { let values = matrix.split('(')[1].split(')')[0].split(',') let a = values[0] let b = values[1] angle = Math.round(Math.atan2(b, a) * (180 / Math.PI)) } return angle }
<reponame>Akad1070/ipl_gerasmus package domaine.bizz; import core.exceptions.AppException; import core.exceptions.BizzException; import dal.dao.core.DbEntity; import dal.dao.core.DbEntityColumn; import dal.dao.core.DbEntityColumnTransient; import dal.dao.core.DbEntityFk; import domaine.bizz.interfaces.UserBizz; import domaine.dto.DepartementDto; import util.AppUtil; import java.time.LocalDate; @DbEntity(schema = "gerasmus", table = "utilisateurs") public class User extends BaseEntiteImpl implements UserBizz { private String nom; @DbEntityColumnTransient private String identifiant; private String prenom; private String pseudo; private String mdp; private String email; @DbEntityColumn("prof") private boolean estProf; @DbEntityColumn("departement") private Integer fkDepartement; @DbEntityFk private DepartementDto departement; @DbEntityColumn("date_inscription") private LocalDate dateInscription; @Override public String getNom() { return nom; } @Override public String getPrenom() { return prenom; } @Override public String getPseudo() { return pseudo; } @Override public DepartementDto getDepartement() { return departement; } @Override public String getMdp() { return mdp; } @Override public void setMdp(String mdp) { AppUtil.checkString(mdp, "Il faut un mot de passe."); this.mdp = mdp; } @Override public String getMail() { return email; } @Override public LocalDate getDateInscription() { return dateInscription; } @Override public void setNom(String nom) { AppUtil.checkString(nom, "Le nom ne peut pas être null."); this.nom = nom; } @Override public void setPrenom(String prenom) { AppUtil.checkString(prenom, "Le prénom ne peut pas être null."); this.prenom = prenom; } @Override public void setPseudo(String pseudo) { AppUtil.checkString(pseudo, "Le pseudo ne peut pas être null."); this.pseudo = pseudo; } @Override public void setMail(String mail) { AppUtil.checkString(mail, "L'e-mail ne peut pas être null."); email = mail; } @Override public void setProf(boolean estProf) { this.estProf = estProf; } @Override public boolean isProf() { return estProf; } @Override public void setDepartement(DepartementDto dep) { AppUtil.checkObject(dep, "Le departement ne peut pas être null."); departement = dep; } @Override public void setDateInscription(LocalDate date) { AppUtil.checkObject(date, "La date d'inscription ne peut pas être nulle."); dateInscription = date; } @Override public String getIdentifiant() { return identifiant; } @Override public void setIdentifiant(String identifiant) { AppUtil.checkString(identifiant, "L'identifiant ne peut pas être null."); this.identifiant = identifiant; } @Override public void checkBeforeInscription() { AppUtil.checkPseudo(pseudo); AppUtil.checkMail(email); AppUtil.checkMdp(mdp); AppUtil.checkNomOuPrenom(nom); AppUtil.checkNomOuPrenom(prenom); AppUtil.checkObject(departement); } @Override public boolean verifierMail() { try { return AppUtil.checkMail(identifiant); } catch (AppException exception) { return false; } } @Override public void verifierIdentifiants() { AppUtil.checkString(identifiant, "L'identifiant ne peut pas être null."); AppUtil.checkString(mdp, "Le mot de passe ne peut pas être null."); } @Override public void determinerType() { boolean estMailDeProf = false; try { estMailDeProf = AppUtil.checkMailProf(getMail()); } catch (AppException excep) { try { estMailDeProf = !AppUtil.checkMailEtud(getMail()); } catch (AppException exception) { throw new BizzException("Le mail introduit ne correspond pas au format de la Haute-Ecole."); } } setProf(estMailDeProf); } }
#!/bin/sh getRamMb() { ram_kb=$(grep MemTotal /proc/meminfo | awk '{print $2}') ram_mb=$(( ram_kb / 1024 )) echo "${ram_mb}" } getMaxConcurrentProc() { ram_mb=$(getRamMb) proc_count=$(( (ram_mb - 400) / 200 )) proc_count=$(( proc_count > 0 ? proc_count : 1 )) echo "${proc_count}" } # first arg is `-f` or `--some-option` or no argument provided if [ "${1#-}" != "$1" ] || [ "${1}" = "" ]; then c_opt_found="false" for opt in "$@"; do if [ "${opt}" != "${opt#-c}" ]; then c_opt_found="true" fi done if [ "${c_opt_found}" = "false" ]; then max_proc=$(getMaxConcurrentProc) if [ "${1}" = "" ]; then set -- "-c${max_proc}" else set -- "-c${max_proc}" "$@" fi fi set -- /usr/local/bin/php /app/bin/console app:client:run "$@" fi exec "$@"
<reponame>MuhammedOzby/GraphQL-Konu-islenmesi-NestJS import { Field, ID, InputType } from '@nestjs/graphql'; @InputType() export class UpdateLocationInput { @Field(() => ID) id: number; @Field() name: string; @Field() desc: string; @Field() lat: number; @Field() lng: number; }
from typing import List, Dict class IoTComponentManager: def __init__(self): self.components = {} def add_component(self, component_name: str) -> None: if component_name not in self.components: self.components[component_name] = set() def remove_component(self, component_name: str) -> None: if component_name in self.components: del self.components[component_name] for component, interactions in self.components.items(): if component_name in interactions: interactions.remove(component_name) def get_all_components(self) -> List[str]: return list(self.components.keys()) def get_interacting_components(self, component_name: str) -> List[str]: return list(self.components.get(component_name, set()))
import { async, ComponentFixture, TestBed } from '@angular/core/testing'; import { LoginFormComponent } from './login-form.component'; import { ReactiveFormsModule, FormsModule } from '@angular/forms'; import { AngularFireModule } from '@angular/fire'; import { AngularFireAuthModule } from '@angular/fire/auth'; import { environment } from 'src/environments/environment'; import { HttpClientModule } from '@angular/common/http'; import { RouterTestingModule } from '@angular/router/testing'; import { RouterModule } from '@angular/router'; import { StoreModule } from '@ngrx/store'; describe('LoginFormComponent', () => { let component: LoginFormComponent; let fixture: ComponentFixture<LoginFormComponent>; beforeEach(async(() => { TestBed.configureTestingModule({ imports: [ FormsModule, ReactiveFormsModule, AngularFireModule.initializeApp(environment.firebase), AngularFireAuthModule, HttpClientModule, RouterModule.forRoot([]), RouterTestingModule, StoreModule.forRoot({}), ], declarations: [LoginFormComponent] }).compileComponents(); })); beforeEach(() => { fixture = TestBed.createComponent(LoginFormComponent); component = fixture.componentInstance; fixture.detectChanges(); }); it('should create', () => { expect(component).toBeTruthy(); }); });
public class BMICalculator { public static void main(String[] args) { double height = 180.0; // in cm double weight = 75.0; // in kg double bmi = (weight / (height * height)) * 10000; System.out.println("Your BMI is: " + bmi); } }
#!/bin/sh set -e echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}" install_framework() { if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then local source="${BUILT_PRODUCTS_DIR}/$1" elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")" elif [ -r "$1" ]; then local source="$1" fi local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" if [ -L "${source}" ]; then echo "Symlinked..." source="$(readlink "${source}")" fi # use filter instead of exclude so missing patterns dont' throw errors echo "rsync -av --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\"" rsync -av --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}" local basename basename="$(basename -s .framework "$1")" binary="${destination}/${basename}.framework/${basename}" if ! [ -r "$binary" ]; then binary="${destination}/${basename}" fi # Strip invalid architectures so "fat" simulator / device frameworks work on device if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then strip_invalid_archs "$binary" fi # Resign the code if required by the build settings to avoid unstable apps code_sign_if_enabled "${destination}/$(basename "$1")" # Embed linked Swift runtime libraries. No longer necessary as of Xcode 7. if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then local swift_runtime_libs swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u && exit ${PIPESTATUS[0]}) for lib in $swift_runtime_libs; do echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\"" rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}" code_sign_if_enabled "${destination}/${lib}" done fi } # Signs a framework with the provided identity code_sign_if_enabled() { if [ -n "${EXPANDED_CODE_SIGN_IDENTITY}" -a "${CODE_SIGNING_REQUIRED}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then # Use the current code_sign_identitiy echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}" local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS} --preserve-metadata=identifier,entitlements '$1'" if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then code_sign_cmd="$code_sign_cmd &" fi echo "$code_sign_cmd" eval "$code_sign_cmd" fi } # Strip invalid architectures strip_invalid_archs() { binary="$1" # Get architectures for current file archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | rev)" stripped="" for arch in $archs; do if ! [[ "${VALID_ARCHS}" == *"$arch"* ]]; then # Strip non-valid architectures in-place lipo -remove "$arch" -output "$binary" "$binary" || exit 1 stripped="$stripped $arch" fi done if [[ "$stripped" ]]; then echo "Stripped $binary of architectures:$stripped" fi } if [[ "$CONFIGURATION" == "Debug" ]]; then install_framework "$BUILT_PRODUCTS_DIR/segment-ios-integration-webengage-test/segment_ios_integration_webengage_test.framework" fi if [[ "$CONFIGURATION" == "Release" ]]; then install_framework "$BUILT_PRODUCTS_DIR/segment-ios-integration-webengage-test/segment_ios_integration_webengage_test.framework" fi if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then wait fi
// Copyright 2020-2021 <NAME>, <NAME>, and other contributors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. export class SPITabBarElement extends HTMLElement { constructor() { super() const tabLinkElements = this.querySelectorAll('[data-tab]') tabLinkElements.forEach((tabLinkElement) => { tabLinkElement.addEventListener('click', (event) => { // Assign a new tab to be active and switch to it. this.activateTab(tabLinkElement, tabLinkElements) this.showPage(event.srcElement.dataset.tab) // Only when explicitly clicked, change the page anchor. const currentLocationUrl = new URL(window.location) currentLocationUrl.hash = `#${tabLinkElement.dataset.tab}` window.history.pushState({}, '', currentLocationUrl) }) }) this.syncTabs() } syncTabs() { const locationUrlHash = new URL(window.location).hash const tabLinkElements = this.querySelectorAll('[data-tab]') tabLinkElements.forEach((tabLinkElement) => { // Make any tab active where the identifier matches an anchor in the location. if (locationUrlHash === `#${tabLinkElement.dataset.tab}`) { this.activateTab(tabLinkElement, tabLinkElements) } }) // Show the page which has the active class. const activeTabLinkElement = this.querySelector('[data-tab].active') this.showPage(activeTabLinkElement.dataset.tab) } showPage(tabId) { const tabPageElements = document.querySelectorAll('[data-tab-page]') tabPageElements.forEach((tabPageElement) => { if (tabPageElement.dataset.tabPage == tabId) { tabPageElement.classList.remove('hidden') } else { tabPageElement.classList.add('hidden') } }) } activateTab(tabLinkElement, tabLinkElements) { tabLinkElements.forEach((tabLinkElement) => { tabLinkElement.classList.remove('active') }) tabLinkElement.classList.add('active') } }
<?php class DomainRegistrar { public function processRegistrationData($inputData) { if (isset($inputData['domain']) && isset($inputData['owner']) && isset($inputData['registrar']) && isset($inputData['referrer'])) { $domain = trim($inputData['domain']); $owner = trim($inputData['owner']); $registrar = trim($inputData['registrar']); $referrer = trim($inputData['referrer']); if (!empty($domain) && !empty($owner)) { return array( 'domain' => $domain, 'owner' => $owner, 'registrar' => $registrar, 'referrer' => $referrer ); } } return array(); // Return empty array for invalid input data } } // Example usage $registrar = new DomainRegistrar(); $inputData = array( 'domain' => 'example.com', 'owner' => 'John Doe', 'registrar' => 'IT-Nic', 'referrer' => 'http://www.nic.it/' ); $result = $registrar->processRegistrationData($inputData); print_r($result); ?>
def find_divisors(num): divisors = [] for i in range(2,num+1): if num % i == 0: divisors.append(i) return divisors num = 48 divisors = find_divisors(num) print("The divisors of {} are: {}".format(num, divisors))
def evaluate_expressions(n, expressions): results = [] for exp in expressions: try: exp = exp.replace('^', '**') result = eval(exp) results.append(str(result)) except: results.append("Invalid expression") return results # Test the function n = 3 expressions = ['2^3', '5^2', '4*3'] print(evaluate_expressions(n, expressions)) # Output: ['8', '25', 'Invalid expression']
// // Copyright 2020 <NAME> // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // #ifndef AGE_DOWNSAMPLER_HPP #define AGE_DOWNSAMPLER_HPP //! //! \file //! #include <functional> #include <vector> #include <age_types.hpp> #include <pcm/age_pcm_frame.hpp> namespace age { class downsampler { AGE_DISABLE_COPY(downsampler); AGE_DISABLE_MOVE(downsampler); public: downsampler(int input_sampling_rate, int output_sampling_rate); virtual ~downsampler() = default; [[nodiscard]] const pcm_vector& get_output_samples() const; void clear_output_samples(); void set_volume(float volume); virtual void add_input_samples(const pcm_vector& samples) = 0; protected: void add_output_samples(int16_t left_sample, int16_t right_sample); void add_output_samples(pcm_frame frame); //! //! This variable contains the number of input samples required for //! a single output sample. The unsigned integer is treated as fixed //! point value with the lower 16 bits treated as fraction. //! const int m_input_output_ratio; private: static int calculate_ratio(int input_sampling_rate, int output_sampling_rate); pcm_vector m_output_samples; float m_volume = 1; }; //! //! \brief A downsampler_linear resamples audio data by interpolating an output //! sample from the two nearest input samples. //! class downsampler_linear : public downsampler { AGE_DISABLE_COPY(downsampler_linear); AGE_DISABLE_MOVE(downsampler_linear); public: using downsampler::downsampler; ~downsampler_linear() override = default; void add_input_samples(const pcm_vector& samples) override; private: void add_output_sample(const pcm_frame& left_frame, const pcm_frame& right_frame); int m_right_sample_index = 1; // 0 = use last sample as left, 1 = use first sample of new samples as left int m_right_sample_fraction = 0; pcm_frame m_last_input_sample; }; class downsampler_low_pass : public downsampler { AGE_DISABLE_COPY(downsampler_low_pass); AGE_DISABLE_MOVE(downsampler_low_pass); public: using downsampler::downsampler; ~downsampler_low_pass() override = default; void add_input_samples(const pcm_vector& samples) override; [[nodiscard]] size_t get_fir_size() const; protected: void create_windowed_sinc(double transition_frequency, int filter_order, const std::function<double(double, int)>& window_weight); private: double calculate_sinc(double n, int filter_order, double transition_frequency); std::vector<int32_t> m_fir_values; pcm_vector m_prev_samples; int m_next_output_index = 0; int m_next_output_fraction = 0; }; class downsampler_kaiser_low_pass : public downsampler_low_pass { AGE_DISABLE_COPY(downsampler_kaiser_low_pass); AGE_DISABLE_MOVE(downsampler_kaiser_low_pass); public: downsampler_kaiser_low_pass(int input_sampling_rate, int output_sampling_rate, double ripple); ~downsampler_kaiser_low_pass() override = default; private: static int calculate_filter_order(double A, double tw); static double calculate_beta(double A); static double calculate_bessel(double value); }; } // namespace age #endif // AGE_DOWNSAMPLER_HPP
#!/bin/bash # Example: # ./launch-suite.sh 06:00.0,07:00.0,85:00.0,86:00.0 1m 0,0,1,1 # Arguments time=$1 # e.g. 1m ids=$2 # e.g. 05:00.0,06:00.0,55:0.0,56:00.0 numa_nodes=$3 # e.g. 0,0,1,1 superpage_sizes=(32Ki 64Ki 128Ki 256Ki 512Ki 1Mi 2Mi 4Mi 8Mi 16Mi 32Mi 64Mi 128Mi 256Mi 512Mi 1Gi) for superpage_size in "${superpage_sizes[@]}"; do echo "Benchmarking with superpage size ${superpage_size}" ./launch-multi-bench.sh ${superpage_size} ${time} ${ids} ${numa_nodes} done
<filename>sshd-common/src/main/java/org/apache/sshd/client/auth/AuthenticationIdentitiesProvider.java /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.sshd.client.auth; import java.io.IOException; import java.security.GeneralSecurityException; import java.security.KeyPair; import java.util.Comparator; import java.util.List; import org.apache.sshd.client.auth.password.PasswordIdentityProvider; import org.apache.sshd.common.config.keys.KeyUtils; import org.apache.sshd.common.keyprovider.KeyIdentityProvider; import org.apache.sshd.common.session.SessionContext; import org.apache.sshd.common.util.helper.LazyMatchingTypeIterable; /** * @author <a href="mailto:<EMAIL>">Apache MINA SSHD Project</a> */ public interface AuthenticationIdentitiesProvider extends KeyIdentityProvider, PasswordIdentityProvider { /** * Compares 2 password identities - returns zero ONLY if <U>both</U> compared objects are {@link String}s and equal * to each other */ Comparator<Object> PASSWORD_IDENTITY_COMPARATOR = (o1, o2) -> { if (!(o1 instanceof String) || !(o2 instanceof String)) { return -1; } else { return ((String) o1).compareTo((String) o2); } }; /** * Compares 2 {@link KeyPair} identities - returns zero ONLY if <U>both</U> compared objects are {@link KeyPair}s * and equal to each other */ Comparator<Object> KEYPAIR_IDENTITY_COMPARATOR = (o1, o2) -> { if ((!(o1 instanceof KeyPair)) || (!(o2 instanceof KeyPair))) { return -1; } else if (KeyUtils.compareKeyPairs((KeyPair) o1, (KeyPair) o2)) { return 0; } else { return 1; } }; /** * @param session The {@link SessionContext} for invoking this load command - may be {@code null} * if not invoked within a session context (e.g., offline tool). * @return All the currently available identities - passwords, keys, etc... * @throws IOException If failed to load the identities * @throws GeneralSecurityException If some security issue with the identities (e.g., keys) */ Iterable<?> loadIdentities(SessionContext session) throws IOException, GeneralSecurityException; static int findIdentityIndex(List<?> identities, Comparator<? super Object> comp, Object target) { for (int index = 0; index < identities.size(); index++) { Object value = identities.get(index); if (comp.compare(value, target) == 0) { return index; } } return -1; } /** * @param identities The {@link Iterable} identities - OK if {@code null}/empty * @return An {@link AuthenticationIdentitiesProvider} wrapping the identities */ static AuthenticationIdentitiesProvider wrapIdentities(Iterable<?> identities) { return new AuthenticationIdentitiesProvider() { @Override public Iterable<KeyPair> loadKeys(SessionContext session) { return LazyMatchingTypeIterable.lazySelectMatchingTypes(identities, KeyPair.class); } @Override public Iterable<String> loadPasswords(SessionContext session) { return LazyMatchingTypeIterable.lazySelectMatchingTypes(identities, String.class); } @Override public Iterable<?> loadIdentities(SessionContext session) { return LazyMatchingTypeIterable.lazySelectMatchingTypes(identities, Object.class); } }; } }
#!/bin/bash source ../setup.env CONNECT_NAME=${CONNECT_NAMES[0]} num=0 echo ========================== $CONNECT_NAME PUBLIC_IPS=`curl -sX GET http://$RESTSERVER:1024/spider/publicip/publicipt${num}-powerkim?connection_name=$CONNECT_NAME |json_pp |grep "\"PublicIP\"" |awk '{print $3}' |sed 's/"//g' |sed 's/,//g'` # 137.135.167.9 for PUBLIC_IP in ${PUBLIC_IPS} do echo $CONNECT_NAME : copy testsvc into ${PUBLIC_IP} ... ssh-keygen -f "/root/.ssh/known_hosts" -R ${PUBLIC_IP} scp -i ../keypair/${CONNECT_NAME}.key -o "StrictHostKeyChecking no" ./testsvc/TESTSvc ./testsvc/setup.env cb-user@$PUBLIC_IP:/tmp scp -i ../keypair/${CONNECT_NAME}.key -o "StrictHostKeyChecking no" -r ./testsvc/conf cb-user@$PUBLIC_IP:/tmp done
package io.opensphere.core.help.data; import java.util.ArrayList; import java.util.List; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlType; /** * This class holds information pertaining to a table of contents entry for a * java help file. */ @XmlAccessorType(XmlAccessType.NONE) @XmlType(name = "tocitem") public class HelpTOCEntry extends AbstractHelpEntry { /** The list of sub entries. */ @XmlElement(name = "tocitem", required = false) private List<HelpTOCEntry> myTOCEntries; /** * Default Constructor (for JAXB). */ public HelpTOCEntry() { myTOCEntries = new ArrayList<>(); } /** * Constructor. * * @param title The title attribute value. * @param targetID The target attribute value. */ public HelpTOCEntry(String title, String targetID) { setTitle(title); setTarget(targetID); myTOCEntries = new ArrayList<>(); } /** * Add a table of contents entry to my list of sub entries. * * @param item The table of contents entry to add. */ public void addTOCItem(HelpTOCEntry item) { if (myTOCEntries == null) { myTOCEntries = new ArrayList<>(); } myTOCEntries.add(item); } /** * Get table of contents sub entry at specific index. * * @param index The index. * @return The sub entry. */ public HelpTOCEntry getEntry(int index) { return myTOCEntries.get(index); } /** * Accessor for the last table of contents sub entry. * * @return The last sub entry. */ public HelpTOCEntry getLastEntry() { return myTOCEntries.get(myTOCEntries.size() - 1); } /** * Accessor for my list of sub entries. * * @return The list of table of contents sub entries. */ public List<HelpTOCEntry> getTocItems() { return myTOCEntries; } /** * Mutator for my list of sub entries. * * @param tocItems The list of table of contents sub entries. */ public void setTocItems(List<HelpTOCEntry> tocItems) { myTOCEntries = tocItems; } }
<reponame>mannyatico/urban-octo-carnival import React from 'react'; import { BookCover } from './BookCover' import { BookInfo } from './BookInfo' export const Book = (props) => { return ( <div className="media"> <BookCover cover={props.cover} title={props.title} /> <BookInfo title={props.title} author={props.author} rating={props.rating} /> </div> ) } Book.propTypes = { cover: React.PropTypes.string.isRequired, title: React.PropTypes.string.isRequired, author: React.PropTypes.string.isRequired, rating: React.PropTypes.number.isRequired }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package arq; import java.util.Iterator ; import jena.cmd.ArgDecl; import jena.cmd.CmdLineArgs; import org.apache.jena.atlas.junit.BaseTest ; import org.junit.Test ; public class TestCmdLine extends BaseTest { @Test public void test_Simple1() { String args[] = new String[]{""} ; CmdLineArgs cl = new CmdLineArgs(args) ; cl.process() ; } @Test public void test_Flag1() { String args[] = new String[]{ ""} ; CmdLineArgs cl = new CmdLineArgs(args) ; ArgDecl argA = new ArgDecl(false, "-a") ; cl.add(argA) ; cl.process() ; assertTrue("-a argument found" , ! cl.contains(argA) ) ; } @Test public void test_Flag2() { String args[] = new String[]{ "-a"} ; CmdLineArgs cl = new CmdLineArgs(args) ; ArgDecl argA = new ArgDecl(false, "-a") ; cl.add(argA) ; cl.process() ; assertTrue("No -a argument found" , cl.contains(argA) ) ; } @Test public void test_Flag3() { String args[] = new String[]{ "-a", "filename"} ; CmdLineArgs cl = new CmdLineArgs(args) ; ArgDecl argA = new ArgDecl(false, "-a") ; cl.add(argA) ; cl.process() ; assertTrue("No -a argument found" , cl.contains(argA) ) ; } @Test public void test_Arg1() { String args[] = new String[]{ ""} ; CmdLineArgs cl = new CmdLineArgs(args) ; ArgDecl argA = new ArgDecl(true, "-arg") ; cl.add(argA) ; cl.process() ; assertTrue("-arg argument found" , ! cl.contains(argA) ) ; } @Test public void test_Arg2() { String args[] = new String[]{ "-arg=ARG", "filename"} ; CmdLineArgs cl = new CmdLineArgs(args) ; ArgDecl argA = new ArgDecl(true, "arg") ; cl.add(argA) ; cl.process() ; assertTrue("No -arg= argument found" , cl.contains(argA) ) ; assertEquals("", cl.getValue(argA) , "ARG") ; assertEquals("", cl.getArg("arg").getValue() , "ARG") ; } @Test public void test_nArg1() { String args[] = new String[]{ "-arg=V1", "--arg=V2", "-v"} ; CmdLineArgs cl = new CmdLineArgs(args) ; ArgDecl argA = new ArgDecl(true, "-arg") ; cl.add(argA) ; ArgDecl argV = new ArgDecl(false, "-v") ; cl.add(argV) ; cl.process() ; assertTrue("No -arg= argument found" , cl.contains(argA) ) ; Iterator<String> iter = cl.getValues("arg").iterator() ; assertEquals("Argument 1", iter.next() , "V1") ; assertEquals("Argument 2", iter.next() , "V2") ; } }
<reponame>tombartkowski/phonestreamer-server import { Repository } from '../../core/repository'; import { Result, unwrap } from '../../core/resolve'; import { Session } from './session'; import { MongooseQueryParser, QueryOptions } from 'mongoose-query-parser'; import { HTTP } from '../../networking/http'; import { Api } from '../../networking/api.virtuaMachine'; import { Device } from '../devices/device'; import { Simulator } from '../simulators/simulator'; import { VirtualMachine } from '../virtualMachines/virtualMachine'; import { AppError, ErrorCode, ErrorMessage, ErrorType } from '../../core/AppError'; import { RequestInput } from '../../core/controller'; export interface SessionService { createSession: (body: any) => Promise<Result<Session>>; findSessions: (input: RequestInput) => Promise<Result<Session[]>>; findSessionById: (id: string, input: RequestInput) => Promise<Result<Session>>; deleteSession: (id: string) => Promise<Result<Session>>; patchSession: (id: string, body: any) => Promise<Result<Session>>; } export default ( sessionRepository: Repository<Session>, deviceRepository: Repository<Device>, simulatorRepository: Repository<Simulator>, virtualMachineRepository: Repository<VirtualMachine>, httpClient: HTTP.Client ): SessionService => { const parser = new MongooseQueryParser(); const findAvailableVirtualMachine = async (): Promise<Result<VirtualMachine>> => { const virtualMachines = await unwrap( virtualMachineRepository.find({ filter: {}, limit: 1, }) ); const [virtualMachine, ..._rest] = virtualMachines!; if (!virtualMachine) return [ null, new AppError( ErrorMessage.NoVirtualMachinesAvailable, ErrorType.INTERNAL, ErrorCode.NO_VIRTUAL_MACHINES_AVAILABLE, 503 ), ]; return [virtualMachine, null]; }; const createSimulator = async ( vm: VirtualMachine, device: Device ): Promise<Result<Simulator>> => { const [simulatorCandidate, simulatorCandidateError] = Simulator.create({ virtualMachine: vm.id, device: device.id, }); if (simulatorCandidateError) return [null, simulatorCandidateError]; return simulatorRepository.save(simulatorCandidate!); }; const createSession = async ( device: Device, virtualMachine: VirtualMachine, simulator: Simulator ): Promise<Result<Session>> => { const [sessionCandidate, sessionCandidateError] = Session.create({ device: device.id, virtualMachine: virtualMachine.id, simulator: simulator.id, }); if (sessionCandidateError) { return [null, sessionCandidateError]; } return sessionRepository.save(sessionCandidate!); }; return { createSession: async (body: any) => { try { const device = await unwrap(deviceRepository.findById(body.deviceId)); if (!device) { throw new AppError( ErrorMessage.InvalidField('deviceId'), ErrorType.USER, ErrorCode.VALIDATION_ERROR, 400 ); } const virtualMachine = await unwrap(findAvailableVirtualMachine()); const simulator = await unwrap(createSimulator(virtualMachine, device!)); await unwrap( httpClient.request( Api.VirtualMachine.startSimulator(virtualMachine.remoteUrl, { simulatorIdentifier: simulator.identifier, deviceTypeIdentifier: device!.typeIdentifier, }) ) ); await unwrap( httpClient.request( Api.VirtualMachine.startSession(virtualMachine.remoteUrl, { simulatorIdentifier: simulator.identifier, }) ) ); const session = await unwrap( createSession(device!, virtualMachine, simulator) ); return [session, null]; } catch (error) { return [null, error]; } }, findSessions: async (input: RequestInput) => sessionRepository.find(parser.parse(input.query)), findSessionById: async (id: string, input: RequestInput) => sessionRepository.findById(id, parser.parse(input.query)), deleteSession: async (id: string) => sessionRepository.delete(id), patchSession: async (id: string, body: any) => sessionRepository.updateOne(id, body), }; };
<filename>src/main/java/io/github/rcarlosdasilva/weixin/core/cache/Lookup.java<gh_stars>1-10 package io.github.rcarlosdasilva.weixin.core.cache; public interface Lookup<V extends Cacheable> { boolean isYou(String key, V obj); }
/** * Created by <NAME> on 05.08.2014. */ import { IPersistable } from '../base/IPersistable'; import { IDataType } from '../data/datatype'; import { Range } from '../range/Range'; import { IEventHandler, EventHandler } from '../base/event'; import { ITransform } from './ITransform'; import { ILocateAble } from './ILocateAble'; export interface IVisInstanceOptions { rotate?: number; scale?: [number, number]; } /** * basic interface of an visualization instance */ export interface IVisInstance extends IPersistable, IEventHandler, ILocateAble { /** * the unique id of this vis instance */ readonly id: number; /** * the base element of this vis */ readonly node: Element; /** * the represented data */ readonly data: IDataType; /** * current size of this vis * @returns [width, height] */ readonly size: [number, number]; /** * the size without transformation applied */ readonly rawSize: [number, number]; /** * flag whether the vis if fully built, if not wait for the 'ready' event */ readonly isBuilt: boolean; /** * returns the current transformation */ transform(): ITransform; /** * sets the transformation * @param scale [w,h] * @param rotate */ transform(scale: [number, number], rotate: number): ITransform; /** * option getter * @param name */ option(name: string): any; /** * option setter * @param name * @param value */ option(name: string, value: any): any; /** * updates this vis */ update(): void; /** * destroy this vis and deregister handlers,... */ destroy(): void; } /** * base class for an visualization */ export declare class AVisInstance extends EventHandler { readonly id: number; private _built; option(name: string, value?: any): any; persist(): any; get isBuilt(): boolean; protected markReady(built?: boolean): void; locate(...range: Range[]): Promise<any>; locateById(...range: Range[]): Promise<any>; locateImpl(range: Range): Promise<any>; restore(persisted: any): Promise<AVisInstance>; update(): void; destroy(): void; transform(): ITransform; get rawSize(): number[]; get size(): [number, number]; }
<filename>src/main/java/chylex/hee/mechanics/enhancements/types/TNTEnhancements.java<gh_stars>10-100 package chylex.hee.mechanics.enhancements.types; import net.minecraft.block.Block; import net.minecraft.init.Blocks; import net.minecraft.item.Item; import chylex.hee.init.BlockList; import chylex.hee.mechanics.enhancements.EnhancementData; import chylex.hee.mechanics.enhancements.EnhancementRegistry; public enum TNTEnhancements{ NO_BLOCK_DAMAGE, NO_ENTITY_DAMAGE, EXTRA_POWER, TRAP, NOCLIP, FIRE, NO_FUSE; public static void register(){ EnhancementData<TNTEnhancements> data = EnhancementRegistry.registerEnhancement(new Block[]{ Blocks.tnt, BlockList.enhanced_tnt }, TNTEnhancements.class); data.setTransformationItem(Item.getItemFromBlock(BlockList.enhanced_tnt)); // } }
<filename>TripPlannerApp/controllers/saveItinerary.js /** * Created by rkapoor on 16/09/15. */ var getClient = require('../lib/UtilityFunctions/redisConnection'); var conn = require('../lib/database'); var insertItinerary = require('../lib/insertItinerary'); var updateItinerary = require('../lib/updateItinerary'); module.exports=function (app){ app.put('/saveItinerary/:itineraryID',function(req,res) { var itineraryData = req.body; var itineraryID = req.params.itineraryID; console.log(itineraryID); var redisClient = getClient.getClient(); redisClient.get(itineraryID, function(err, itinerary) { if (err || itinerary == null) { if (err) { console.log('Error in getting itinerary:' + err); } res.status(400).json({error: 'Itinerary ID Invalid'}) } else { var saveObject = JSON.parse(itinerary); saveObject.state = 6; saveObject.itineraryData = itineraryData; var connection=conn.conn('UserInteraction'); connection.connect(); if(saveObject.permalinkID != undefined){ //Permalink has already been generated updateItinerary.updateItinerary(connection, saveObject, saveObject.permalinkID, onInsert); } else { //Permalink has not been generated insertItinerary.insertItinerary(connection, saveObject, onInsert); } function onInsert(permalink, permalinkID){ saveObject.permalinkID = permalinkID; redisClient.set(itineraryID, JSON.stringify(saveObject), function(err){ if (err){ console.log('Error in setting itinerary:'+err); } else { console.log('Successfully set'); } }); res.json({ itineraryID: itineraryID, permalink: permalink }); } } }); }); };
/******************************************************************************* * Copyright (c) 2016 comtel inc. * * Licensed under the Apache License, version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. *******************************************************************************/ package org.jfxvnc.net.rfb.codec.security.vncauth; import java.util.Arrays; import org.jfxvnc.net.rfb.codec.security.RfbSecurityMessage; import org.jfxvnc.net.rfb.codec.security.SecurityType; import org.jfxvnc.net.rfb.render.ProtocolConfiguration; public class VncAuthSecurityMessage implements RfbSecurityMessage { private final byte[] challenge; private byte[] password; private ProtocolConfiguration config; public VncAuthSecurityMessage(byte[] challenge) { this.challenge = challenge; } public byte[] getChallenge() { return challenge; } public String getPassword() { return config.passwordProperty().get(); } @Override public SecurityType getSecurityType() { return SecurityType.VNC_Auth; } @Override public String toString() { return "VncAuthSecurityMessage [challenge=" + Arrays.toString(challenge) + ", password=" + Arrays.toString(password) + "]"; } @Override public void setCredentials(ProtocolConfiguration config) { this.config = config; } }
# Exception chaining is not supported, but check that basic # exception works as expected. try: raise Exception from None except Exception: print("Caught Exception")
#!/bin/bash #SBATCH --job-name=xlnet2-mask195 # create a short name for your job #SBATCH --output=log_xlnet2-mask195.txt #SBATCH --nodes=1 # node count #SBATCH --ntasks-per-node=1 # total number of tasks across all nodes #SBATCH --cpus-per-task=24 # cpu-cores per task (>1 if multi-threaded tasks) #SBATCH --mem=64G # total memory per node (4 GB per cpu-core is default) #SBATCH --gres=gpu:4 # number of gpus per node #SBATCH --time=202:00:00 # total run time limit (HH:MM:SS) module load nvidia/cuda/11.4 source /apps/local/conda_init.sh conda activate hao_vit rm -rf /l/users/hongyiwa/hao/vision_transformer/xlnet2-mask195 XLA_FLAGS=--xla_gpu_cuda_data_dir=/usr/lib/cuda python3 -m vit_jax.main --workdir=/l/users/hongyiwa/hao/vision_transformer/xlnet2-mask195 --config=/l/users/hongyiwa/hao/vision_transformer/vit_jax/configs/xlnet.py:b16 --config.dataset=/l/users/hongyiwa/datasets/ILSVRC2012 --config.model.encoder.predict_pos=True --config.out_dim=196 --config.sigma2=0.2 --config.batch=1024 --config.batch_eval=40 --config.num_mask=195 --config.flip=False
import importlib import json import logging def load_modules_from_config(): try: with open('module_config.json', 'r') as config_file: module_list = json.load(config_file) for module_name in module_list: try: module = importlib.import_module(module_name) print(f"Successfully imported module: {module_name}") except ImportError as e: logging.error(f"Failed to import module {module_name}: {e}") except FileNotFoundError: logging.error("Module configuration file not found.") except json.JSONDecodeError as e: logging.error(f"Error parsing module configuration file: {e}") if __name__ == "__main__": load_modules_from_config()
class RideQueue: def __init__(self, capacity): self.capacity = capacity self.queue = [] def join_queue(self, visitor_id): if len(self.queue) < self.capacity: self.queue.append(visitor_id) def next_group(self): if self.queue: group = self.queue[:self.capacity] self.queue = self.queue[self.capacity:] return group else: return []
#!/bin/bash #SBATCH --time=30:00:00 #SBATCH --nodes=1 #SBATCH --ntasks=1 #SBATCH --cpus-per-task=1 #SBATCH --mem-per-cpu=4gb #SBATCH --partition=bluemoon # Outputs ---------------------------------- #SBATCH --output=log/%x_%j.out #SBATCH --error=log/%x_%j.err # ------------------------------------------ pwd; hostname; date set -e # Submit the job using the variable DATA="data-name" # sbatch --job-name="jsons-ALC134" --export=DATA="ALC134" metadata_job.sh #==============Shell script==============# #Load the software needed module load python/python-miniconda3-rdchem-deepchem source activate /gpfs1/home/m/r/mriedel/pace/env/env_bidsify HOST_DIR="/gpfs1/home/m/r/mriedel" PROJECT="pace" DSETS_DIR="${HOST_DIR}/${PROJECT}/dsets" CODE_DIR="${DSETS_DIR}/code" BIDS_DIR="${DSETS_DIR}/dset-${DATA}" ANAT_TEMPLATE="None" FUNC_TEMPLATE="None" DWI_TEMPLATE="None" FMAP_TEMPLATE="None" MAG_TEMPLATE="None" MODE="default" #MODE="sequential" REF=1 MULTI_SES=False # Fix json files cmd="python ${CODE_DIR}/metadata_fix.py \ --bids_dir ${BIDS_DIR} \ --mode ${MODE} \ --ref ${REF} \ --templates ${ANAT_TEMPLATE} ${FUNC_TEMPLATE} ${DWI_TEMPLATE} ${FMAP_TEMPLATE} ${MAG_TEMPLATE} --multi_ses ${MULTI_SES}" # Setup done, run the command echo Commandline: $cmd eval $cmd echo "Fix json file for ${DATA} with exit code $exitcode" date exit $exitcode
<reponame>jianghw/DrugHousekeeper<gh_stars>0 package com.cjy.flb.activity; import android.app.Activity; import android.content.Context; import android.content.Intent; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.net.ConnectivityManager; import android.net.NetworkInfo; import android.net.wifi.WifiConfiguration; import android.net.wifi.WifiInfo; import android.net.wifi.WifiManager; import android.os.AsyncTask; import android.os.Bundle; import android.os.Handler; import android.os.Message; import android.text.TextUtils; import android.util.Log; import android.view.View; import android.widget.Button; import android.widget.RelativeLayout; import android.widget.TextView; import com.cjy.flb.R; import com.cjy.flb.bean.BoxVisitTimeBean; import com.cjy.flb.customView.OsiEditText; import com.cjy.flb.utils.MHttpUtils; import com.cjy.flb.utils.SharedPreUtil; import com.cjy.flb.utils.ToastUtil; import com.cjy.flb.wifi.AiLinkWrapper; import com.cjy.flb.wifi.WifiAdmin; import com.espressif.iot.esptouch.EsptouchTask; import com.espressif.iot.esptouch.IEsptouchResult; import com.espressif.iot.esptouch.IEsptouchTask; import com.google.gson.Gson; import com.google.gson.JsonElement; import com.google.gson.JsonObject; import com.google.gson.JsonParser; import com.lidroid.xutils.HttpUtils; import com.lidroid.xutils.exception.HttpException; import com.lidroid.xutils.http.RequestParams; import com.lidroid.xutils.http.ResponseInfo; import com.lidroid.xutils.http.callback.RequestCallBack; import com.lidroid.xutils.http.client.HttpRequest; import com.socks.library.KLog; import java.lang.ref.WeakReference; import java.text.SimpleDateFormat; import java.util.Date; import java.util.List; import java.util.Locale; /** * 欢迎界面 */ public class WifiChoiceActivity extends BaseActivity implements View.OnClickListener { private RelativeLayout ssid; private OsiEditText password; private TextView ssidTv; //wifi管理 private WifiManager mWifiManager; private WifiInfo mWifiInfo; private Button next; //A-Linik工具 private boolean mSmartLinkStarted = false; private AiLinkWrapper mAiLinkWrapper; private IEsptouchTask mEsptouchTask; private int seconds = 60;//倒计时秒数 private Thread timeThread; private final WifiChoseHandle mHandler = new WifiChoseHandle(this); static class WifiChoseHandle extends Handler { WeakReference<Activity> weakReference; public WifiChoseHandle(Activity activity) { weakReference = new WeakReference<>(activity); } @Override public void handleMessage(Message msg) { WifiChoiceActivity activity = (WifiChoiceActivity) weakReference.get(); if (activity != null) { switch (msg.what) { case 100: activity.endForOpenWifi(); break; case 200: activity.countdownDisplay(); break; case 300: activity.conSucceed(); break; default: break; } } super.handleMessage(msg); } } private void conSucceed() { next.setEnabled(true); seconds = 60; next.setText(getString(R.string.submit_ok)); String string = getIntent().getExtras().getString("from", "no"); if (string.equals("main")) { finish(); } else { startActivity(new Intent(context, AddFlbActivity.class)); } } /* NetConRun netConRun = new NetConRun(this); static class NetConRun implements Runnable { WeakReference<Activity> weakReference; public NetConRun(Activity activity) { weakReference = new WeakReference<>(activity); } @Override public void run() { WifiChoiceActivity activity = (WifiChoiceActivity) weakReference.get(); if (activity != null) { activity.mAiLinkWrapper.stop(); activity.openWifiConnection(); } } }*/ private void countdownDisplay() { next.setText(getString(R.string.connecting) + seconds + "s"); seconds--; if (seconds % 5 == 0) { onBoxVisitTime(); } /* if (seconds == 49 || seconds == 9) {//wifi连接 Thread thread = new Thread(netConRun); thread.start(); }*/ } private void endForOpenWifi() { // mAiLinkWrapper.stop(); next.setEnabled(true); seconds = 60; next.setText(getString(R.string.retry)); /* Thread thread = new Thread(netConRun); thread.start();*/ } @Override public void initView() { setContentView(R.layout.activity_wifi_choice); initBar(getString(R.string.set_network_connection), false, true); tvMore.setVisibility(View.VISIBLE); tvMore.setText(getString(R.string.skip)); ssid = (RelativeLayout) findViewById(R.id.rl_patient); ssidTv = (TextView) findViewById(R.id.tv_wifi_ssid); password = (OsiEditText) findViewById(R.id.et_wifi_password); next = (Button) findViewById(R.id.btn_next); } @Override public void initData() { password.initTail(); Bitmap bitmap = BitmapFactory.decodeResource(getResources(), R.drawable.ic_wifi_password); password.setTitleBitmap(bitmap); String string = getIntent().getExtras().getString("from", "no"); if (string.equals("main")) { tvMore.setVisibility(View.GONE); } mWifiManager = (WifiManager) getSystemService(Context.WIFI_SERVICE); if (mWifiManager.isWifiEnabled()) { mWifiInfo = mWifiManager.getConnectionInfo(); ConnectivityManager mConnectivityManager = (ConnectivityManager) getSystemService(Context.CONNECTIVITY_SERVICE); NetworkInfo mWifiNetworkInfo = mConnectivityManager.getNetworkInfo(ConnectivityManager.TYPE_WIFI); if (mWifiNetworkInfo.isConnected()) { String mSSID = mWifiInfo.getSSID(); if (mSSID.startsWith("\"") && mSSID.endsWith("\"")) { mSSID = mSSID.substring(1, mSSID.length() - 1); } ssidTv.setText((CharSequence) mSSID); password.getEtContentOnly().requestFocus(); } } } @Override public void initListener() { // ssid.setOnClickListener(this); next.setOnClickListener(this); tvMore.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { startActivity(new Intent(context, AddFlbActivity.class)); } }); } @Override public void onClick(View v) { switch (v.getId()) { case R.id.rl_patient: Intent openCameraIntent = new Intent(context, WifiListActivity.class); startActivityForResult(openCameraIntent, 100); break; case R.id.btn_next: if (TextUtils.isEmpty(password.getEditText())) { ToastUtil.showShort(getString(R.string.info_not_null)); } else { /* if (next.getText().toString().equals(getString(R.string.retry))) { onBoxVisitTime(); }*/ showEspTouchToCon(); next.setEnabled(false); } break; default: break; } } // get the wifi info which is "connected" in wifi-setting private String getConnectionInfo() { Log.d("JHW=",mWifiInfo.getBSSID()); return mWifiInfo.getBSSID(); } private void showEspTouchToCon() { new EsptouchAsyncTask3().execute(ssidTv.getText().toString(), getConnectionInfo(), password.getEditText(), "NO", String.valueOf(1)); } /* private IEsptouchListener myListener = new IEsptouchListener() { @Override public void onEsptouchResultAdded(final IEsptouchResult result) { onEsptoucResultAddedPerform(result); } }; private void onEsptoucResultAddedPerform(final IEsptouchResult result) { runOnUiThread(new Runnable() { @Override public void run() { String text = result.getBssid() + " is connected to the wifi"; } }); }*/ private class EsptouchAsyncTask3 extends AsyncTask<String, Void, List<IEsptouchResult>> { private IEsptouchTask mEsptouchTask; private final Object mLock = new Object(); @Override protected void onPreExecute() { timeThread = new Thread() { @Override public void run() { try { while (!isInterrupted() && seconds > 0) { mHandler.sendEmptyMessage(200); sleep(1000); } } catch (InterruptedException e) { e.printStackTrace(); } finally { mHandler.sendEmptyMessage(100); } } }; timeThread.start(); } @Override protected List<IEsptouchResult> doInBackground(String... params) { int taskResultCount = 0; synchronized (mLock) { String apSsid = params[0]; String apBssid = params[1]; String apPassword = params[2]; String isSsidHiddenStr = params[3]; String taskResultCountStr = params[4]; boolean isSsidHidden = false; if (isSsidHiddenStr.equals("YES")) { isSsidHidden = true; } taskResultCount = Integer.parseInt(taskResultCountStr); mEsptouchTask = new EsptouchTask(apSsid, apBssid, apPassword, isSsidHidden, WifiChoiceActivity.this); // mEsptouchTask.setEsptouchListener(myListener); } List<IEsptouchResult> resultList = mEsptouchTask.executeForResults(taskResultCount); for(IEsptouchResult iEsptouchResult:resultList){ KLog.json("xxxi"+new Gson().toJson(iEsptouchResult)); } return resultList; } @Override protected void onPostExecute(List<IEsptouchResult> result) { IEsptouchResult firstResult = result.get(0); if (!firstResult.isCancelled()) { int count = 0; final int maxDisplayCount = 5; if (firstResult.isSuc()) { StringBuilder sb = new StringBuilder(); for (IEsptouchResult resultInList : result) { sb.append("Esptouch success, bssid = " + resultInList.getBssid() + ",InetAddress = " + resultInList.getInetAddress().getHostAddress() + "\n"); count++; if (count >= maxDisplayCount) { break; } } if (count < result.size()) { sb.append("\nthere's " + (result.size() - count) + " more result(s) without showing\n"); } mHandler.sendEmptyMessage(300); } else { mHandler.sendEmptyMessage(100); } } } } private void showDialogToCon() { if (mSmartLinkStarted) { mSmartLinkStarted = false; if ("AI-LINK".equalsIgnoreCase("AI-LINK")) { mAiLinkWrapper.stop(); } else { if (mEsptouchTask != null) { mEsptouchTask.interrupt(); } } } else { if (mWifiManager.isWifiEnabled()) { if (ssidTv.getText().toString().isEmpty()) { ToastUtil.showShort(getString(R.string.select_wireless)); } else { if (!password.getEditText().isEmpty()) { if (password.getEditText().length() < 6) { ToastUtil.showShort(getString(R.string.regist_fail_password_long)); return; } } if (mAiLinkWrapper == null) { mAiLinkWrapper = new AiLinkWrapper( ssidTv.getText().toString().trim(), password.getEditText(), context); } else { mAiLinkWrapper.setSSID(ssidTv.getText().toString().trim()); mAiLinkWrapper.setPassword(password.getEditText()); } mAiLinkWrapper.start(); timeThread = new Thread() { @Override public void run() { try { while (!isInterrupted() && seconds > 0) { mHandler.sendEmptyMessage(200); sleep(1000); } } catch (InterruptedException e) { e.printStackTrace(); } finally { mHandler.sendEmptyMessage(100); } } }; timeThread.setPriority(10); timeThread.start(); } } else { mAiLinkWrapper.stop(); ToastUtil.showShort(getString(R.string.select_wireless)); } } } private void openWifiConnection() { Log.d("openWifi", Thread.currentThread().getName()); WifiAdmin.getInstance(context).openWifi(); WifiConfiguration wc = new WifiConfiguration(); String SSID = ssidTv.getText().toString(); wc.SSID = "\"" + SSID + "\""; //配置wifi的SSID,即该热点的名称,如:TP-link_xxx wc.preSharedKey = "\"" + <PASSWORD>() + "\""; wc.hiddenSSID = true; wc.status = WifiConfiguration.Status.ENABLED; wc.allowedGroupCiphers.set(WifiConfiguration.GroupCipher.TKIP); wc.allowedGroupCiphers.set(WifiConfiguration.GroupCipher.CCMP); wc.allowedKeyManagement.set(WifiConfiguration.KeyMgmt.WPA_PSK); wc.allowedPairwiseCiphers.set(WifiConfiguration.PairwiseCipher.TKIP); wc.allowedPairwiseCiphers.set(WifiConfiguration.PairwiseCipher.CCMP); wc.allowedProtocols.set(WifiConfiguration.Protocol.RSN); int res = mWifiManager.addNetwork(wc); Log.d("WifiPreference", "add Network returned " + res); boolean b = mWifiManager.enableNetwork(res, true); Log.d("WifiPreference", "enableNetwork returned " + b); } private void onBoxVisitTime() { RequestParams params = new RequestParams(); params.addHeader("Authorization", "Basic " + MHttpUtils.REGISTER_BASIC); String boxId = SharedPreUtil.getString("FlbID"); params.addBodyParameter("device_uid", boxId); HttpUtils httpUtils = new HttpUtils(); httpUtils.configDefaultHttpCacheExpiry(0); /* try { ResponseStream responseStream = httpUtils.sendSync(HttpRequest.HttpMethod.GET, MHttpUtils.VISIT_URL + boxId, params); String entity = responseStream.readString(); KLog.json(entity); whetherBoxConnected(entity); } catch (Exception e) { e.printStackTrace(); }*/ httpUtils.send(HttpRequest.HttpMethod.GET, MHttpUtils.VISIT_URL + boxId, params, new RequestCallBack<String>() { @Override public void onSuccess(ResponseInfo<String> responseInfo) { KLog.json(responseInfo.result); try { whetherBoxConnected(responseInfo); } catch (Exception e) { e.printStackTrace(); } } @Override public void onFailure(HttpException e, String s) { KLog.d(s); } }); } private void whetherBoxConnected(ResponseInfo<String> responseInfo) throws Exception { JsonElement jsonElement = new JsonParser().parse(responseInfo.result); if (jsonElement != null && jsonElement.isJsonObject()) { JsonObject jsonObject = jsonElement.getAsJsonObject(); if (jsonObject.has("failure_index")) { ToastUtil.showShort(jsonObject.get("failure_index").getAsString()); } else { // JsonElement e = jsonObject.get("response"); BoxVisitTimeBean bean = new Gson().fromJson(jsonElement, BoxVisitTimeBean.class); //2016-03-26T11:52:54.297+08:00 String lastVisit = bean.getResponse().getLast_visit(); KLog.i(lastVisit); String[] s = lastVisit.split("T"); String day = s[0]; String time = s[1].subSequence(0, 8).toString(); KLog.i(day + "====" + time); StringBuilder sb = new StringBuilder(); sb.append(day).append(time); SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-ddHH:mm:ss", Locale.SIMPLIFIED_CHINESE); long myTime = sdf.parse(sdf.format(new Date())).getTime(); long boxTime = sdf.parse(sb.toString()).getTime(); if ((myTime - boxTime) <= 1000 * 60 * 12) {//时间间隔在40s内有效 mHandler.sendEmptyMessage(300); } } } } @Override protected void onActivityResult(int requestCode, int resultCode, Intent data) { super.onActivityResult(requestCode, resultCode, data); //处理扫描结果(在界面上显示) if (resultCode == RESULT_OK && requestCode == 100 && data != null) { Bundle bundle = data.getExtras(); String scanResult = bundle.getString("result", "no ~ ~"); ssidTv.setText(scanResult); } } @Override protected void onResume() { super.onResume(); } @Override protected void onPause() { super.onPause(); if (timeThread != null) { timeThread.interrupt(); } } @Override protected void onDestroy() { super.onDestroy(); if (timeThread != null) { timeThread.interrupt(); } } }
'use strict'; N.wire.once('navigate.done:' + module.apiPath, function page_once() { // Enable "OK" button if checkbox checked // N.wire.on(module.apiPath + ':change', function checkbox_state_change(data) { $('#dialogs-unsubscribe').prop('disabled', !data.$this.prop('checked')); }); // Disable dialog notifications // N.wire.on(module.apiPath + ':confirm', function disable_notifications() { return Promise.resolve() .then(() => N.io.rpc('users.dialogs_root.unsubscribe.exec')) .then(() => N.wire.emit('navigate.to', { apiPath: 'users.dialogs_root.unsubscribe.done' })); }); });
<gh_stars>0 import helpers from typing import Iterable, Tuple import itertools import collections def sign(n: int) -> int: if n < 0: return -1 if n > 0: return 1 return 0 def trajectory(xv: int, yv: int) -> Iterable[Tuple[int, int, int]]: x, y = 0, 0 step = 0 while True: x += xv y += yv xv += -sign(xv) yv -= 1 step += 1 yield step, x, y def inside_box(min_x: int, x: int, max_x: int, min_y: int, y: int, max_y: int) -> bool: return (min_x <= x <= max_x) and (min_y <= y <= max_y) def viable( xv: int, min_x: int, x: int, max_x: int, yv: int, min_y: int, y: int, max_y: int ) -> bool: if xv == 0 and not (min_x <= x <= max_x): return False if x > max_x: return False if y < min_y: return False return True def main() -> None: min_x, max_x = 20, 30 min_y, max_y = -10, -5 min_x, max_x = 143, 177 min_y, max_y = -106, -71 candidates = set() for xv in range(0, 1000): for yv in range(-1000, 1000): for step, x, y in trajectory(xv, yv): if not viable(xv, min_x, x, max_x, yv, min_y, y, max_y): break if inside_box(min_x, x, max_x, min_y, y, max_y): candidates.add((step, x, y)) print(candidates) print(len(candidates)) main()
#!/bin/bash #bdereims@vmware.com helm repo add bitnami https://charts.bitnami.com/bitnami helm repo update kubectl create namespace kubeapps helm install kubeapps --namespace kubeapps bitnami/kubeapps kubectl -n kubeapps create serviceaccount kubeapps-operator kubectl create clusterrolebinding kubeapps-operator --clusterrole=cluster-admin --serviceaccount=kubeapps:kubeapps-operator
#!/bin/sh # Copyright (c) 2015, Plume Design Inc. All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # 3. Neither the name of the Plume Design Inc. nor the # names of its contributors may be used to endorse or promote products # derived from this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND # ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL Plume Design Inc. BE LIABLE FOR ANY # DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND # ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # FUT environment loading # shellcheck disable=SC1091 source /tmp/fut-base/shell/config/default_shell.sh [ -e "/tmp/fut-base/fut_set_env.sh" ] && source /tmp/fut-base/fut_set_env.sh source "${FUT_TOPDIR}/shell/lib/fsm_lib.sh" [ -e "${PLATFORM_OVERRIDE_FILE}" ] && source "${PLATFORM_OVERRIDE_FILE}" || raise "${PLATFORM_OVERRIDE_FILE}" -ofm [ -e "${MODEL_OVERRIDE_FILE}" ] && source "${MODEL_OVERRIDE_FILE}" || raise "${MODEL_OVERRIDE_FILE}" -ofm tc_name="fsm/$(basename "$0")" usage() { cat << usage_string ${tc_name} [-h] arguments Description: - Script checks existence of FSM Plugin lib file Arguments: -h show this help message \$1 (expected_user_agent) : Plugin lib file path : (string)(required) Script usage example: ./${tc_name} custom_user_agent usage_string } if [ -n "${1}" ]; then case "${1}" in help | \ --help | \ -h) usage && exit 1 ;; *) ;; esac fi # INPUT ARGUMENTS: NARGS=1 [ $# -lt ${NARGS} ] && raise "Requires at least '${NARGS}' input argument(s)" -arg plugin_lib_file_path=${1} log "Checking if FSM Plugin lib file exists" [ -f "${plugin_lib_file_path}" ] && log "FSM plugin lib ${plugin_lib_file_path} file exists - Success" || raise "Missing ${plugin_lib_file_path} FSM plugin lib file" -tc "${tc_name}" -s
function JSONParseSafe(jsonString: string, defaultValue: any = {}) { if (!jsonString) { return defaultValue; } try { return JSON.parse(jsonString); } catch (error) { return defaultValue; } } export { JSONParseSafe };
<reponame>maxime/blog-slice<filename>app/views/posts/trackback_failure.xml.builder xml.instruct! xml.response do |response| response.error '1' response.message "An error occured" end
<gh_stars>0 moreInfo New Window [ Name: "More Info"; Left: Get ( WindowDesktopWidth ) - (Get ( WindowDesktopWidth ) / 2 ) ] Go to Layout [ “moreInfo” (budget) ] Adjust Window [ Resize to Fit ] Pause/Resume Script [ Indefinitely ] February 6, 平成26 9:28:43 Budget Planner.fp7 - moreInfo -1-
/* * Copyright 2015 Textocat * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.textocat.textokit.commons.cas; import com.google.common.base.Joiner; import com.google.common.collect.Lists; import com.google.common.collect.Sets; import org.apache.uima.cas.Feature; import org.apache.uima.cas.Type; import org.apache.uima.cas.TypeSystem; import java.util.LinkedHashSet; import java.util.List; /** * Bunch of utility methods related to {@link Type} inspection. * * @author <NAME> */ public class FSTypeUtils { public static final String ANNOTATION_TYPE_NAME = "uima.tcas.Annotation"; private FSTypeUtils() { } public static Type getAnnotationType(TypeSystem ts) { Type result = ts.getType(ANNOTATION_TYPE_NAME); if (result == null) { throw new IllegalStateException(); } return result; } public static Feature getFeature(Type type, String featName, boolean mustExist) { Feature feature = type.getFeatureByBaseName(featName); if (feature == null && mustExist) { throw new IllegalArgumentException(String.format( "Feature %s#%s does not exist", type.getName(), featName)); } return feature; } public static Type getType(TypeSystem ts, String typeName, boolean mustExist) { Type result = ts.getType(typeName); if (result == null && mustExist) { throw new IllegalArgumentException(String.format( "Type %s does not exist", typeName)); } return result; } /** * Example: * <p> * Result for type 'org.test.internal.Foobar': * </p> * <p> * org, org.test, org.test.internal * </p> * * @param t * @return namespaces of given type name ordered ordered from top-level to * the lowest. */ public static LinkedHashSet<String> getNamespaces(Type t) { if (t == null) { throw new NullPointerException("type"); } LinkedHashSet<String> result = Sets.newLinkedHashSet(); String[] nameSplit = t.getName().split("\\."); List<String> packageNames = Lists.newArrayList(nameSplit); // remove type short name packageNames.remove(packageNames.size() - 1); // generate namespaces Joiner nsJoiner = Joiner.on('.'); for (int i = 0; i < packageNames.size(); i++) { String ns = nsJoiner.join(packageNames.subList(0, i + 1)); result.add(ns); } assert result.size() == packageNames.size(); return result; } }
import java.util.HashSet; import java.util.Set; public class Subsets { public static void main(String[] args) { Set<Integer> set = new HashSet<Integer>(); set.add(1); set.add(2); set.add(3); Set<Set<Integer>> subsets = new HashSet<Set<Integer>>(); subsets.add(set); int size = set.size(); for (int i = 0; i < (1 << size); i++) { Set<Integer> subset = new HashSet<Integer>(); int index = 0; for (Integer element : set) { if ((i & (1 << index)) > 0) subset.add(element); index++; } subsets.add(subset); } for (Set<Integer> subset : subsets) { System.out.println(subset); } } }
// Save dialog filter settings // 'use strict'; module.exports = function (N, apiPath) { N.validate(apiPath, { hide_answered: { format: 'boolean', required: true } }); // Check auth // N.wire.before(apiPath, function check_auth(env) { if (!env.user_info.is_member) throw N.io.FORBIDDEN; }); // Store setting if specified, fetch it otherwise // N.wire.on(apiPath, async function save_filter(env) { let store = N.settings.getStore('user'); await store.set({ dialogs_hide_answered: { value: env.params.hide_answered } }, { user_id: env.user_info.user_id }); }); };
#!/usr/bin/env bash JDK_VERSION=110 JDK_PKG=openjdk-11-jdk PROJECT_PATH=. PATH_PROFILE=/etc/profile ANDROID_HOME=$HOME/Android/Sdk ANDROID_HOME_CACHE=$ANDROID_HOME/.cache # Get JDK Version javaVersion() { echo $(java -version 2>&1 | sed -n ';s/.* version "\(.*\)\.\(.*\)\..*".*/\1\2/p;') } # Check and install package with APT checkAndInstall() { echo "Checking $1 ..." if [ "$(command -v $1)" = "" ]; then echo "Installing $1 now!" apt -qq install -y $1 echo "$1 installed!" fi } initEnv() { echo "Init environment" if [ ! -f $PATH_PROFILE ]; then echo "Path profile '$PATH_PROFILE' not found!" exit 1 fi echo "Updating APT index ..." apt -qq update checkAndInstall curl checkAndInstall wget checkAndInstall unzip } checkJDK() { echo "Checking JDK ..." if [ "$(javaVersion)" != "$JDK_VERSION" ]; then echo "Installing $JDK_PKG now!" apt -qq install -y $JDK_PKG echo "$JDK_PKG installed!" fi } checkAndroidCmdLineTools() { echo "Checking Android command line tools ..." if [ "$(command -v sdkmanager)" = "" ]; then echo "Installing Android command line tools now!" DOWNLOAD_LINK=$(curl -s "https://developer.android.com/studio" | grep -Eo "https://dl.google.com/android/repository/commandlinetools-linux-.*_latest.zip") DOWNLOAD_FILE_PATH=$ANDROID_HOME_CACHE/commandlinetools.zip wget -q --no-check-certificate -O "$DOWNLOAD_FILE_PATH" "$DOWNLOAD_LINK" if [ ! -f $DOWNLOAD_FILE_PATH ]; then echo "Failed to download Android command line tools!" exit 1 fi if [ -d $ANDROID_HOME/cmdline-tools ]; then echo "Removing old Android command line tools ..." rm -rf $ANDROID_HOME/cmdline-tools fi unzip -qq -d $ANDROID_HOME_CACHE/ $DOWNLOAD_FILE_PATH rm -rf $DOWNLOAD_FILE_PATH mkdir -p $ANDROID_HOME/cmdline-tools mv $ANDROID_HOME_CACHE/cmdline-tools $ANDROID_HOME/cmdline-tools/latest echo "" >> /etc/profile echo "# Android command line tools" >> /etc/profile echo "export PATH=\$PATH:$ANDROID_HOME/cmdline-tools/latest/bin" >> $PATH_PROFILE . $PATH_PROFILE echo "Android command line tools installed!" fi } checkAndroidSdkPath() { echo "Checking Android sdk path ..." if [ ! -d $HOME/.android ]; then mkdir $HOME/.android fi if [ ! -f $HOME/.android/repositories.cfg ]; then touch $HOME/.android/repositories.cfg fi if [ ! -d $ANDROID_HOME ]; then mkdir -p $ANDROID_HOME fi if [ ! -d $ANDROID_HOME_CACHE ]; then mkdir -p $ANDROID_HOME_CACHE fi if [ "$ANDROID_SDK_ROOT" != "$ANDROID_HOME" ]; then echo "" >> /etc/profile echo "# Android SDK" >> /etc/profile echo "export ANDROID_SDK_ROOT=$ANDROID_HOME" >> $PATH_PROFILE . $PATH_PROFILE fi } checkAndroidSdk() { SDK_MANAGER=$ANDROID_HOME/cmdline-tools/latest/bin/sdkmanager echo "Checking Android SDK Licenses ..." yes | $SDK_MANAGER --licenses > /dev/null echo "Checking Android tools ..." $SDK_MANAGER "tools" > /dev/null echo "Checking Android platform-tools ..." $SDK_MANAGER "platform-tools" > /dev/null } checkLocalProperties() { echo "Checking project local.properties ..." if [ -f $PROJECT_PATH/local.properties ]; then echo "Removing old local.properties ..." rm -rf $PROJECT_PATH/local.properties fi echo "# This profile is generated by script." >> $PROJECT_PATH/local.properties echo "# Location of the SDK. This is only used by Gradle." >> $PROJECT_PATH/local.properties echo "# $(date)" >> $PROJECT_PATH/local.properties echo "sdk.dir=$ANDROID_HOME" >> $PROJECT_PATH/local.properties } initEnv checkJDK checkAndroidSdkPath checkAndroidCmdLineTools checkAndroidSdk checkLocalProperties echo "Setup complete!"
from enum import Enum class ProjectTypeAPI(str, Enum): all = "all" template = "template" user = "user" @classmethod def get_available_project_types(cls): return [project_type.value for project_type in cls]
<filename>orchestrate/orchestrate/handler/rendezvous_test.go package handler import ( "testing" "github.com/jmoiron/sqlx" "gopkg.in/DATA-DOG/go-sqlmock.v1" ) func TestGetTestHelpers(t *testing.T) { mockDB, mock, err := sqlmock.New() if err != nil { t.Fatalf("an error '%s' was not expected when opening a stub database connection", err) } defer mockDB.Close() db := sqlx.NewDb(mockDB, "sqlmock") rows := sqlmock.NewRows([]string{"name", "type", "address"}). AddRow("example", "https", "https://example.com") mock.ExpectQuery("^SELECT name, type, address"). WillReturnRows(rows) th, err := GetTestHelpers("onion", db) if err != nil { t.Errorf("error in calling GetTestHelpers: %s", err) } if len(th) != 1 { t.Errorf("inconsistent count: %d", len(th)) } if th[0].Address != "https://example.com" { t.Errorf("adress does not match: %s", th[0].Address) } if th[0].Type != "https" { t.Errorf("type does not match: %s", th[0].Type) } } func TestGetCollectors(t *testing.T) { mockDB, mock, err := sqlmock.New() if err != nil { t.Fatalf("an error '%s' was not expected when opening a stub database connection", err) } defer mockDB.Close() db := sqlx.NewDb(mockDB, "sqlmock") rows := sqlmock.NewRows([]string{"type", "address", "front_domain"}). AddRow("onion", "http://example.onion", ""). AddRow("https", "https://example.onion", ""). AddRow("domain_fronted", "domain.com", "cdn.com") mock.ExpectQuery("^SELECT type, address, front_domain"). WillReturnRows(rows) th, err := GetCollectors("", db) if err != nil { t.Errorf("error in calling GetTestHelpers: %s", err) } if len(th) != 3 { t.Errorf("inconsistent count: %d", len(th)) } if th[2].Address != "domain.com@cdn.com" { t.Errorf("wrong format of address: %s", th[2].Address) } }
package io.gridgo.bean; import java.util.Map; import java.util.function.BiFunction; import java.util.function.Function; public interface ImmutableBObject extends BObject { static final UnsupportedOperationException UNSUPPORTED = new UnsupportedOperationException("Instance of ImmutableBObject cannot be modified"); @Override default BElement put(String key, BElement value) { throw UNSUPPORTED; } @Override default BElement remove(Object key) { throw UNSUPPORTED; } @Override default void putAll(Map<? extends String, ? extends BElement> m) { throw UNSUPPORTED; } @Override default void replaceAll(BiFunction<? super String, ? super BElement, ? extends BElement> function) { throw UNSUPPORTED; } @Override default BElement putIfAbsent(String key, BElement value) { throw UNSUPPORTED; } @Override default boolean remove(Object key, Object value) { throw UNSUPPORTED; } @Override default boolean replace(String key, BElement oldValue, BElement newValue) { throw UNSUPPORTED; } @Override default BElement replace(String key, BElement value) { throw UNSUPPORTED; } @Override default void clear() { throw UNSUPPORTED; } @Override default BElement computeIfAbsent(String key, Function<? super String, ? extends BElement> mappingFunction) { throw UNSUPPORTED; } @Override default BElement computeIfPresent(String key, BiFunction<? super String, ? super BElement, ? extends BElement> remappingFunction) { throw UNSUPPORTED; } @Override default BElement compute(String key, BiFunction<? super String, ? super BElement, ? extends BElement> remappingFunction) { throw UNSUPPORTED; } @Override default BElement merge(String key, BElement value, BiFunction<? super BElement, ? super BElement, ? extends BElement> remappingFunction) { throw UNSUPPORTED; } }
#!/bin/bash -eu BG=`cat /setup.dat | sed '1q;d'` SETUP_SCRIPT_LOCATION=`cat /setup.dat | sed '2q;d'` TESTSYSTEM=`cat /setup.dat | sed '3q;d'` INSTALLHEAD=`cat /setup.dat | sed '4q;d'` source /arch_func.sh echo " Geany" PKGS="geany geany-plugins" AURPKGS="geany-themes-git" install_packs "$PKGS" install_packs_aur "$AURPKGS" mkdir -p /etc/skel/.config/geany/ retry "wget -q -O /etc/skel/.config/geany/geany.conf ${SETUP_SCRIPT_LOCATION}/02_apps/geany/geany.conf" exit 0
def compute_average(list_of_numbers): total = 0 for num in list_of_numbers: total += num return total/len(list_of_numbers)
#!/bin/sh . bin/activate cd examples export FLASK_APP=auth_basic #export FLASK_APP=auth_database #export FLASK_APP=auth_withrole #export FLASK_APP=email_account #export FLASK_APP=starter_1 export FLASK_ENV=development flask run
#!/usr/bin/env bats load helpers/general export SCRIPT_LOCATION="scripts/roxe_build.sh" export TEST_LABEL="[roxe_build_centos]" [[ $ARCH == "Linux" ]] || exit 0 # Exit 0 is required for pipeline [[ $NAME == "CentOS Linux" ]] || exit 0 # Exit 0 is required for pipeline # A helper function is available to show output and status: `debug` # Testing Root user ./tests/bash-bats/modules/root-user.sh # Testing Options ./tests/bash-bats/modules/dep_script_options.sh # Testing CMAKE ./tests/bash-bats/modules/cmake.sh # Testing Clang ./tests/bash-bats/modules/clang.sh # Testing MongoDB ./tests/bash-bats/modules/mongodb.sh ## Needed to load roxe_build_ files properly; it can be empty @test "${TEST_LABEL} > General" { set_system_vars # Obtain current machine's resources and set the necessary variables (like JOBS, etc) execute-always yum -y --enablerepo=extras install centos-release-scl &>/dev/null install-package devtoolset-8 WETRUN &>/dev/null # Ensure SCL and devtoolset-8 for c++ binary installation run bash -c "printf \"y\n%.0s\" {1..100}| ./${SCRIPT_LOCATION} -i /NEWPATH" [[ ! -z $(echo "${output}" | grep "centos-release-.*centos.noarch found") ]] || exit [[ ! -z $(echo "${output}" | grep "devtoolset-8.* found") ]] || exit [[ ! -z $(echo "${output}" | grep "Executing: source /opt/rh/devtoolset-8/enable") ]] || exit [[ ! -z $(echo "${output}" | grep "Executing: make -j${JOBS}") ]] || exit [[ ! -z $(echo "${output}" | grep "Starting ROXE Dependency Install") ]] || exit [[ ! -z $(echo "${output}" | grep "Executing: eval /usr/bin/yum -y update") ]] || exit [[ ! -z $(echo "${output}" | grep "Python36 successfully enabled") ]] || exit [[ -z $(echo "${output}" | grep "- NOT found") ]] || exit [[ ! -z $(echo "${output}" | grep "Ensuring CMAKE") ]] || exit [[ ! -z $(echo "${output}" | grep /NEWPATH.*/src/boost) ]] || exit [[ ! -z $(echo "${output}" | grep "Starting ROXE Build") ]] || exit [[ ! -z $(echo "${output}" | grep "make -j${CPU_CORES}") ]] || exit [[ ! -z $(echo "${output}" | grep "ROXE has been successfully built") ]] || exit uninstall-package devtoolset-8* WETRUN &>/dev/null uninstall-package centos-release-scl WETRUN &>/dev/null }
# Oil Blocks #### cd with block shopt -s oil:all # OLDPWD is NOT defined cd / { echo $PWD; echo OLDPWD=${OLDPWD:-} }; echo done echo $(basename $PWD) # restored cd /tmp { echo PWD=$PWD echo -sep ' ' pwd builtin: $(pwd) } echo $(basename $PWD) # restored ## STDOUT: / OLDPWD= done oil-blocks.test.sh PWD=/tmp pwd builtin: /tmp oil-blocks.test.sh ## END #### cd with block: fatal error in block shopt -s oil:all cd / { echo one false echo two } ## status: 1 ## STDOUT: one ## END #### cd with block: return in block shopt -s oil:all f() { cd / { echo one return echo two } echo 'end func' } f ## STDOUT: one end func ## END #### cd with block: break in block shopt -s oil:all f() { cd / { echo one for i in 1 2; do echo $i break # break out of loop done break # break out of block isn't valid echo two } echo end func } f ## status: 1 ## STDOUT: one 1 ## END #### cd with block exits with status 0 shopt -s oil:all cd / { echo block # This return value is ignored. # Or maybe this should be a runtime error? return 1 } echo status=$? ## STDOUT: block status=0 ## END #### block has its own scope, e.g. shadows outer vars shopt -s oil:all var x = 1 cd / { #set y = 5 # This would be an error because set doesn't do dynamic lookup var x = 42 echo "inner x = $x" } echo "outer x = $x" ## STDOUT: inner x = 42 outer x = 1 ## END
class DataHandler: def __init__(self, format, length, text=None, data=None): if format.startswith("Fortran"): self.format = FortranFormat(format) else: self.format = format self.length = length if text is None: self._output() if data is None: self._input() def _output(self): # Implementation for handling the output operation # This method should handle the output operation for the data pass # Placeholder for the actual implementation def _input(self): # Implementation for handling the input operation # This method should handle the input operation for the data pass # Placeholder for the actual implementation def __len__(self): # Implementation for returning the length of the data return self.length
<filename>old-katas/btree-kata/btree-kata-day-2/src/main/java/kata/java/BTreeSet.java package kata.java; import java.util.Arrays; public class BTreeSet { private final int pageSize; private Page root; private int height; public BTreeSet(int pageSize) { this.pageSize = pageSize; this.root = new Page(); this.height = 1; } public boolean contain(int key) { return contain(root, key, height); } private boolean contain(Page page, int key, int height) { if (height == 1) { for (Entity e : page.keys) { if (e == null) { return false; } if (e.key == key) { return true; } } return false; } else { for (int i = 1; i < pageSize; i++) { if (page.keys[i - 1] == null) { return false; } if (page.keys[i] == null || key < page.keys[i].key) { return contain(page.keys[i - 1].next, key, height - 1); } } return false; } } public void add(int key) { add(root, key, height); } private void add(Page page, int key, int height) { int i = 0; if (height == 1) { while (i < pageSize && page.keys[i] != null && page.keys[i].key < key) { i++; } } else { while (i < pageSize) { if (page.keys[i - 1] == null) { return; } if (page.keys[i] == null || key < page.keys[i].key) { add(page.keys[i - 1].next, key, height - 1); } i++; } } if (i > pageSize - 1) { split(page, key); this.height += 1; } else { page.add(key); } } private void split(Page page, int key) { Page left = new Page(); Page right = new Page(); int leftLength = pageSize / 2 + 1; int j = 0; while (j < leftLength) { left.add(page.keys[j].key); j++; } while (j < pageSize) { right.add(page.keys[j].key); j++; } right.add(key); page.size = 2; page.keys[0].key = left.keys[0].key; page.keys[0].next = left; page.keys[1].key = right.keys[0].key; page.keys[1].next = right; for (int i = 2; i < pageSize; i++) { page.keys[i] = null; } } public int height() { return height; } private class Page { private Entity[] keys; private int size; private Page() { this.keys = new Entity[pageSize]; this.size = 0; } @Override public String toString() { return "{ Page " + Arrays.toString(keys) + " }"; } public void add(int key) { keys[size] = new Entity(); keys[size].key = key; size += 1; } } private class Entity { private int key; private Page next; @Override public String toString() { return "[ Entity key - " + key + " ]"; } } }
#!/bin/bash # # Usage: # ./src/admission_control_sim.R [--] [--help] [--consider-mem] [--opts opts] [--cpu-capacity-factor factor] \ # [--mem-capacity-factor factor] [--cpu-load-factor factor] [--mem-load-factor factor] \ # [--slo-scenario scenario] [--output-file-prefix prefix] method # Input parameters for admission control simulation: # # mandatory arguments: # # method Name of the admission control method. # Options: <greedy-norejection, greedy-quota, forecast-mean-quota, forecast-ets-quota> # # flags: # # -h, --help Show this help message and exit # # -cmem, --consider-mem Flag that defines if memory is considered in admission control decisions. # [default: FALSE] # # optional arguments: # # -x, --opts OPTS RDS file containing argument values # # -ccf, --cpu-capacity-factor FACTOR Decimal factor applied to the original cloud CPU capacity. # A factor = 1 simulates the cloud with the same CPU capacity found in the # traces. [default: 1] # # -mcf, --mem-capacity-factor FACTOR Decimal factor applied to the original cloud memory capacity. # A factor = 1 simulates the cloud with the same memory capacity found in # the traces. [default: 1] # # -clf, --cpu-load-factor FACTOR Decimal factor applied to the original cloud CPU load. # A factor = 1 simulates the cloud with the same CPU load # (requested resources) found in the traces. [default: 1] # # -mlf, --mem-load-factor FACTOR Decimal factor applied to the original cloud Memory load. # A factor = 1 simulates the cloud with the same Memory load # (requested resources)found in the traces. [default: 1] # # -s, --slo-scenario SCENARIO Integer that identifies the availability SLO scenario. Possible values: # 1 (medium); 2 (very low); 3 (low); 4 (high); 5 (very high). [default: 1] # # -o, --output-file-prefix PREFIX Prefix for the CSV output file names resulted from simulations. # [default: "res"] # # # Sample execution of a base scenario: # # ./src/admission_control_sim.R forecast-ets-quota --cpu-capacity-factor 1 --mem-capacity-factor 1 --cpu-load-factor 1 \ # --mem-load-factor 1 --slo-scenario 1 --consider-mem # # For details, see paper: # Prediction-Based Admission Control for IaaS Clouds with Multiple Service. CloudCom'2015. # ################################################################################################### ARGS=$* SCRIPT_FILE="src/admission_control_sim.R" Rscript $SCRIPT_FILE $ARGS
#!/bin/bash # publish to bin directory dotnet publish $1/$1.csproj -o ../bin --framework netcoreapp2.0 # create an executable script echo "dotnet $PWD/bin/$1.dll \"\$@\"" > bin/$1 chmod 777 bin/$1
#!/usr/bin/env bash set -x echo -e "\n\nInstalling a fresh version of Miniforge." if [[ ${CI} == "travis" ]]; then echo -en 'travis_fold:start:install_miniforge\\r' fi MINIFORGE_URL="https://github.com/conda-forge/miniforge/releases/latest/download" MINIFORGE_FILE="Miniforge3-MacOSX-x86_64.sh" curl -L -O "${MINIFORGE_URL}/${MINIFORGE_FILE}" bash $MINIFORGE_FILE -b if [[ ${CI} == "travis" ]]; then echo -en 'travis_fold:end:install_miniforge\\r' fi echo -e "\n\nConfiguring conda." if [[ ${CI} == "travis" ]]; then echo -en 'travis_fold:start:configure_conda\\r' fi source ${HOME}/miniforge3/etc/profile.d/conda.sh conda activate base echo -e "\n\nInstalling conda-forge-ci-setup=3 and conda-build." conda install -n base --quiet --yes conda-forge-ci-setup=3 conda-build pip echo -e "\n\nSetting up the condarc and mangling the compiler." setup_conda_rc ./ ./recipe ./.ci_support/${CONFIG}.yaml mangle_compiler ./ ./recipe .ci_support/${CONFIG}.yaml echo -e "\n\nMangling homebrew in the CI to avoid conflicts." /usr/bin/sudo mangle_homebrew /usr/bin/sudo -k echo -e "\n\nRunning the build setup script." source run_conda_forge_build_setup if [[ ${CI} == "travis" ]]; then echo -en 'travis_fold:end:configure_conda\\r' fi set -e echo -e "\n\nMaking the build clobber file and running the build." make_build_number ./ ./recipe ./.ci_support/${CONFIG}.yaml conda build ./recipe -m ./.ci_support/${CONFIG}.yaml --clobber-file ./.ci_support/clobber_${CONFIG}.yaml validate_recipe_outputs "biorbd-optim-feedstock" if [[ "${UPLOAD_PACKAGES}" != "False" ]]; then echo -e "\n\nUploading the packages." upload_package --validate --feedstock-name="biorbd-optim-feedstock" ./ ./recipe ./.ci_support/${CONFIG}.yaml fi
package com.treetasks.application.data.service; import com.treetasks.application.data.entity.Category; import org.springframework.data.jpa.repository.JpaRepository; import java.time.LocalDate; public interface CategoryRepository extends JpaRepository<Category, Integer> { }
public static boolean isPrime(int num) { //check if input is less than or equal to 1 if (num <= 1) { return false; } //check for all numbers below the given number for (int i = 2; i < num; i++) { if (num % i == 0) { return false; } } //if it passes all tests, it is a prime number return true; }
<reponame>nanov/cqrs-examples-core 'use strict'; module.exports = require('cqrs-domain').defineContext({ // optional, default is the directory name name: 'hr', });
<gh_stars>0 "use strict"; Object.defineProperty(exports, "__esModule", { value: true }); exports.fileCode = void 0; var fileCode = { "viewBox": "0 0 12 16", "children": [{ "name": "path", "attribs": { "fill-rule": "evenodd", "d": "M8.5 1H1c-.55 0-1 .45-1 1v12c0 .55.45 1 1 1h10c.55 0 1-.45 1-1V4.5L8.5 1zM11 14H1V2h7l3 3v9zM5 6.98L3.5 8.5 5 10l-.5 1L2 8.5 4.5 6l.5.98zM7.5 6L10 8.5 7.5 11l-.5-.98L8.5 8.5 7 7l.5-1z" }, "children": [] }], "attribs": {} }; exports.fileCode = fileCode;
<filename>elements/lisk-elements/src/index.ts /* * Copyright © 2019 Lisk Foundation * * See the LICENSE file at the top-level directory of this distribution * for licensing information. * * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, * no part of this software, including this file, may be copied, modified, * propagated, or distributed except according to the terms contained in the * LICENSE file. * * Removal or modification of this copyright notice is prohibited. * */ export * as apiClient from '@liskhq/lisk-api-client'; export * as cryptography from '@liskhq/lisk-cryptography'; export * as p2p from '@liskhq/lisk-p2p'; export * as passphrase from '@liskhq/lisk-passphrase'; export * as transactionPool from '@liskhq/lisk-transaction-pool'; export * as transactions from '@liskhq/lisk-transactions'; export * as utils from '@liskhq/lisk-utils'; export * as tree from '@liskhq/lisk-tree'; export * as validator from '@liskhq/lisk-validator'; export * as codec from '@liskhq/lisk-codec'; export * as db from '@liskhq/lisk-db'; export * as chain from '@liskhq/lisk-chain'; export * as bft from '@liskhq/lisk-bft'; export * as genesis from '@liskhq/lisk-genesis';
<gh_stars>0 package server /* * mimi * * Copyright (c) 2018 beito * * This software is released under the MIT License. * http://opensource.org/licenses/mit-license.php **/ import ( "errors" "path/filepath" "strings" "github.com/beito123/mimi/util" ) type LoaderManager struct { Loaders map[string]Loader } func (lm *LoaderManager) Get(name string) (Loader, bool) { loader, ok := lm.Loaders[name] if !ok { return nil, false } return loader.New(), true } func (lm *LoaderManager) Add(loader Loader) { lm.Loaders[loader.Name()] = loader } func (lm *LoaderManager) Remove(name string) { delete(lm.Loaders, name) } type Loader interface { Name() string Path() string Cmd() (string, []string) Init(path string, options map[string]string) error New() Loader } type PMMPLoader struct { path string PHPPath string MainPath string Args []string } func (PMMPLoader) Name() string { return "PMMP" } func (loader *PMMPLoader) Path() string { return loader.path } func (loader *PMMPLoader) Init(path string, options map[string]string) (err error) { loader.path, err = filepath.Abs(filepath.Clean(path)) if err != nil { return err } phpPath, ok := options["php"] if ok { loader.PHPPath = phpPath } else { if util.IsWin() { loader.PHPPath = loader.path + "/bin/php/php.exe" } else { loader.PHPPath = loader.path + "/bin/php/php" } } mainPath, ok := options["main"] if ok { loader.MainPath = mainPath } else { if util.ExistFile(loader.path + "/src/pocketmine/PocketMine.php") { loader.MainPath = loader.path + "/src/pocketmine/PocketMine.php" } else { loader.MainPath = loader.path + "/PocketMine-MP.phar" } } args, ok := options["args"] if ok { loader.Args = strings.Fields(args) } else { loader.Args = []string{"-c", "bin/php"} } // check if util.ExistFile(loader.Program()) { return errors.New("Couldn't find php program") } if util.ExistFile(loader.Target()) { return errors.New("Couldn't find PMMP program") } return nil } func (loader *PMMPLoader) Program() string { return loader.PHPPath } func (loader *PMMPLoader) Target() string { return loader.MainPath } func (loader *PMMPLoader) Cmd() (string, []string) { return loader.Program(), append(loader.Args, loader.Target()) } func (PMMPLoader) New() Loader { return new(PMMPLoader) }
import sqlite3 conn = sqlite3.connect('example.db') cursor = conn.cursor() cursor.execute('SELECT * FROM table') num_records = len(cursor.fetchall()) print(num_records)
#!/bin/bash # Copyright 2017 The Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # This script will scan all md (markdown) files for bad references. # It will look for strings of the form [...](...) and make sure that # the (...) points to either a valid file in the source tree or, in the # case of it being an http url, it'll make sure we don't get a 404. # # Usage: verify-links.sh [ dir | file ... ] # default arg is root of our source tree set -o errexit set -o nounset set -o pipefail REPO_ROOT=$(dirname "${BASH_SOURCE}")/.. verbose="" debugFlag="" maxRetries="1" skipExternal="" skips="" stop="" tmp=/tmp/out${RANDOM} trap clean EXIT seenFiles=( ":" ) # just to prevent "undefined" errors # findPrevious will search for a file to see if we've seen it before. # If we have then return the matching "anchorFile". If we haven't # seen it then add it to "seenFiles" and create a new "anchorFile". # $1 == search file # Note we can't use a map because bash on a mac doesn't support it. foundAnchor="" function findPreviousFile() { for f in "${seenFiles[@]}" ; do orig=${f%%:*} if [[ "${orig}" == "$1" ]]; then foundAnchor=${f#*:} return 0 fi done # Didn't it so create a new anchorFile and save it for next time foundAnchor="${tmp}-anchors-${RANDOM}-${RANDOM}" seenFiles+=("$1:${foundAnchor}") return 1 } function debug { if [[ "$debugFlag" != "" ]]; then (>&2 echo $*) fi } function clean { rm -f ${tmp}* } while [[ "$#" != "0" && "$1" == "-"* ]]; do opts="${1:1}" while [[ "$opts" != "" ]]; do case "${opts:0:1}" in d) debugFlag="1" ; verbose="1" ;; s) word=${opts:1} if [[ "${word}" == "" && "$2" != "" && "$2" != "-"* ]]; then word=$2 shift fi if [[ "${word}" == "" ]]; then echo "Missing arg for -s flag" exit 1 fi skips="${skips} ${word}" opts="" ;; t) maxRetries="5" ;; v) verbose="1" ;; x) skipExternal="1" ;; -) stop="1" ;; ?) echo "Usage: $0 [OPTION]... [DIR|FILE]..." echo "Verify all links in markdown files." echo echo " -d show each href as it is found" echo " -sWORD skip files with 'WORD' in them" echo " -t retry GETs to http(s) URLs 5 times" echo " -v show each file as it is checked" echo " -x skip checking non-local hrefs" echo " -? show this help text" echo " -- treat remainder of args as dir/files" exit 0 ;; *) echo "Unknown option '${opts:0:1}'" exit 1 ;; esac opts="${opts:1}" done shift if [[ "$stop" == "1" ]]; then break fi done # echo verbose:$verbose # echo debugFlag:$debugFlag # echo args:$* arg="" if [ "$*" == "" ]; then arg="${REPO_ROOT}" fi # Default to skipping some well-known golang dirs SKIPS="${SKIPS:=vendor glide} ${skips}" mdFiles=$(find $* $arg -name "*.md" | sort | ( while read line ; do skip=false for pattern in ${SKIPS:=}; do if [[ "${line}" == *"${pattern}"* ]]; then skip=true break fi done [[ "${skip}" == "true" ]] && continue echo $line done )) clean for file in ${mdFiles}; do # echo scanning $file dir=$(dirname $file) [[ -n "$verbose" ]] && echo "> $file" # Replace ) with )\n so that each possible href is on its own line. # Then only grab lines that have [..](..) in them - put results in tmp file. # If the file doesn't have any lines with [..](..) then skip this file # Steps: # tr - convert all \n to a space since newlines shouldn't change anything # sed - add a \n after each ) since ) ends what we're looking for. # This makes it so that each href is on a line by itself # sed - prefix each line with a space so the grep can do [^\\] # grep - find all lines that match [...](...) # Macs require this funky newline stuff cat $file | \ tr '\n' ' ' | \ sed 's/)/)\ /g' | \ sed "s/^/ /g" | \ grep "[^\\]\[.*\](.*)" > ${tmp}1 || true # This sed will extract the href portion of the [..](..) - meaning # the stuff in the parens. sed "s/.*\[*\]\([^()]*\)/\1/" < ${tmp}1 > ${tmp}2 || true # Look for bookmark URLs cat $file | sed -n "s/^ *\[.*\]: //p" > ${tmp}2 || true cat $file | sed -n "s/^ *\[.*\]: .*/&/p" > ${tmp}bks || true # Look for bookmarks cat $file | \ tr '\n' ' ' | \ sed -e 's/\[[^][]*\]\[[^][]*\]/&\ /g' | \ sed -n -e 's/^.*\[.*\[\(.*\)\]$/\1/p' > "${tmp}links" || true cat ${tmp}links | while read bk ; do grep -q "^ *\\[${bk}\\]: " ${tmp}bks || echo "$file: Can't find bookmark '[$bk]'" | \ tee -a ${tmp}3 done # Skip file if there are no matches [ ! -s ${tmp}2 ] && continue cat ${tmp}2 | while read line ; do # Strip off the leading and trailing parens ref=${line#*(} ref=${ref%)*} # Strip off any "title" associated with the href ref=$(echo $ref | sed 's/ ".*//') # Strip off leading and trailing spaces ref=$(echo $ref | sed "s/^ *//" | sed "s/ *$//") # Show all hrefs - mainly for verifying in our tests debug "Checking: '$ref'" # An external href (ie. starts with http(s): ) if [ "${ref:0:5}" == "http:" ] || [ "${ref:0:6}" == "https:" ]; then if [ "$skipExternal" == "1" ]; then continue fi try=0 while true ; do if curl -f -s -k --connect-timeout 10 ${ref} > /dev/null 2>&1 ; then break fi sleep 3 let try=try+1 if [ ${try} -eq ${maxRetries} ]; then extra="" if [ ${try} -gt 1 ]; then extra="(tried ${try} times) " fi echo $file: Can\'t load url: ${ref} ${extra} | tee -a ${tmp}3 break fi sleep 1 done continue fi # Skip "mailto:" refs if [ "${ref:0:7}" == "mailto:" ]; then continue fi # Local file link (i.e. ref contains a #) if [[ "${ref/\#}" != "${ref}" ]]; then # If ref doesn't start with "#" then update filepath if [ "${ref:0:1}" != "#" ]; then # Split ref into filepath and the section link reffile=$(echo ${ref} | awk -F"#" '{print $1}') fullpath=${dir}/${reffile} ref=$(echo ${ref} | awk -F"#" '{$1=""; print $0}') else fullpath=${file} ref=${ref:1} fi if [[ ! -e "${fullpath}" ]]; then echo "$file: Can't find referenced file '${fullpath}'" | \ tee -a ${tmp}3 continue fi # Remove leading and trailing spaces ref=$(echo ${ref} | sed 's/^[[:space:]]*//' | sed 's/[[:space:]]*$//') # If we've seen this file before then grab its processed tmp file if findPreviousFile "${fullpath}" ; then anchorFile="${foundAnchor}" else anchorFile="${foundAnchor}" # Search file for sections used="" # anchors used, seen+twiddled ones # Find all section headers in the file. # Remove leading & trailing spaces. # Lower case it. # Convert spaces to "-". # Drop all non alphanumeric chars. # Twiddle section anchor if we've seen it before. grep "^[[:space:]]*#" < ${fullpath} | \ sed 's/[[:space:]]*##*[[:space:]]*//' | \ sed 's/[[:space:]]*$//' | \ tr '[:upper:]' '[:lower:]' | \ sed 's/\[\([^\[]*\)\](\([^()]*\))/\1/' | \ sed "s/ */-/g" | \ sed "s/[^-a-zA-Z0-9]//g" | while read section ; do # If we haven't used this exact anchor before just use it now if [[ "${used}" != *" ${section} "* ]]; then anchor=${section} else # We've used this anchor before so add "-#" to the end. # Keep adding 1 to "#" until we find a free spot. let num=1 while true; do anchor="${section}-${num}" if [[ "${used}" != *" ${anchor} "* ]]; then break fi let num+=1 done fi echo "${anchor}" used="${used} ${anchor} " debug "Mapped section '${section}' to '${anchor}'" done > ${anchorFile} || true # Add sections of the form <a name="xxx"> # Macs require this funky newline stuff grep "<a name=" <${fullpath} | \ sed 's/<a name="/\ <a name="/g' | \ sed 's/^.*<a name="\(.*\)">.*$/\1/' | \ sort | uniq >> ${anchorFile} || true # echo sections ; cat ${tmp}sections1 fi # Skip refs of the form #L<num> and assume its pointing to a line # number of a file and those don't have anchors if [[ "${ref}" =~ ^L([0-9])+$ ]]; then continue fi # Finally, look for the ref in the list of sections/anchors debug "Anchor file(${fullpath}): ${anchorFile}" if ! grep "^${ref}$" ${anchorFile} > /dev/null 2>&1 ; then echo $file: Can\'t find section \'\#${ref}\' in ${fullpath} | \ tee -a ${tmp}3 fi continue fi newPath=${dir}/${ref} # And finally make sure the file is there # debug line: echo ref: $ref "->" $newPath if [[ ! -e "${newPath}" ]]; then echo $file: Can\'t find: ${newPath} | tee -a ${tmp}3 fi done done rc=0 if [ -a ${tmp}3 ]; then rc=1 fi exit $rc
#ifndef LITE_PACK_ENDIAN_H #define LITE_PACK_ENDIAN_H #include "lite_pack/compiler.h" #include <arpa/inet.h> #include <stdbool.h> #if __APPLE__ #include <machine/endian.h> #else #include <endian.h> #endif #if BYTE_ORDER != LITTLE_ENDIAN && BYTE_ORDER != BIG_ENDIAN #error We are supporting little and big endian only for now. #endif #ifndef htonll #ifndef BYTE_ORDER #define BYTE_ORDER __BYTE_ORDER__ #endif #ifndef BIG_ENDIAN #define BIG_ENDIAN __BIG_ENDIAN__ #endif #if BYTE_ORDER == BIG_ENDIAN static inline uint64_t htonll(uint64_t x) { return x; } static inline uint64_t ntohll(uint64_t x) { return x; } #else static inline uint64_t htonll(uint64_t x) { return __builtin_bswap64(x); } static inline uint64_t ntohll(uint64_t x) { return __builtin_bswap64(x); } #endif #endif static inline uint16_t __lip_htons(uint16_t x) { return htons(x); } static inline uint32_t __lip_htonl(uint32_t x) { return htonl(x); } static inline uint64_t __lip_htonll(uint64_t x) { return htonll(x); } static inline uint16_t __lip_ntohs(uint16_t x) { return ntohs(x); } static inline uint32_t __lip_ntohl(uint32_t x) { return ntohl(x); } static inline uint64_t __lip_ntohll(uint64_t x) { return ntohll(x); } #define __lip_big_endian(x) \ _Generic((x), uint16_t \ : __lip_htons, uint32_t \ : __lip_htonl, uint64_t \ : __lip_htonll, int16_t \ : __lip_htons, int32_t \ : __lip_htonl, int64_t \ : __lip_htonll)(x) #define __lip_host_endian(x) \ _Generic((x), uint16_t \ : __lip_ntohs, uint32_t \ : __lip_ntohl, uint64_t \ : __lip_ntohll, int16_t \ : __lip_ntohs, int32_t \ : __lip_ntohl, int64_t \ : __lip_ntohll)(x) static inline bool __lip_is_big_endian(void) { #if BYTE_ORDER == BIG_ENDIAN return true; #else return false; #endif } #endif
import xml.etree.ElementTree as ET def parse_ofx_data(ofx_data: str) -> dict: root = ET.fromstring(ofx_data) ledger_bal = root.find('.//LEDGERBAL') balance_amount = float(ledger_bal.find('BALAMT').text) date_as_of = ledger_bal.find('DTASOF').text return { 'balance_amount': balance_amount, 'date_as_of': date_as_of } # Test the function with the given OFX data ofx_data = ''' <OFX> <BANKMSGSRSV1> <STMTTRNRS> <STMTRS> <BANKTRANLIST> ... </BANKTRANLIST> <LEDGERBAL> <BALAMT>17752.42</BALAMT> <DTASOF>20130930</DTASOF> </LEDGERBAL> </STMTRS> </STMTTRNRS> </BANKMSGSRSV1> </OFX> ''' result = parse_ofx_data(ofx_data) print(result) # Output: {'balance_amount': 17752.42, 'date_as_of': '20130930'}
#!/bin/bash # TYPO3 Installation Backup Restore Script # written by Oliver Salzburg set -o nounset set -o errexit SELF=$(basename "$0") # Show the help for this script function showHelp() { cat << EOF Usage: $0 [OPTIONS] [--file=]<FILE> Core: --help Display this help and exit. --verbose Display more detailed messages. --quiet Do not display anything. --force Perform actions that would otherwise abort the script. --update Tries to update the script to the latest version. --update-check Checks if a newer version of the script is available. --export-config Prints the default configuration of this script. --extract-config Extracts configuration parameters from TYPO3. --base=PATH The name of the base path where TYPO3 is installed. If no base is supplied, "typo3" is used. Options: --file=FILE The file in which the backup is stored. Database: --hostname=HOST The name of the host where the TYPO3 database is running. --username=USER The username to use when connecting to the TYPO3 database. --password=PASSWORD The password to use when connecting to the TYPO3 database. --database=DB The name of the database in which TYPO3 is stored. EOF } # Print the default configuration to ease creation of a config file. function exportConfig() { # Spaces are escaped here to avoid sed matching this line when exporting the # configuration sed -n "/#\ Script\ Configuration\ start/,/# Script Configuration end/p" "$0" } # Extract all known (database related) parameters from the TYPO3 configuration. function extractConfig() { LOCALCONF="$BASE/typo3conf/localconf.php" LOCALCONFIGURATION="$BASE/typo3conf/LocalConfiguration.php" if [[ -r $LOCALCONF ]]; then echo HOST=$(tac $LOCALCONF | grep --perl-regexp --only-matching "(?<=typo_db_host = ')[^']*(?=';)") echo USER=$(tac $LOCALCONF | grep --perl-regexp --only-matching "(?<=typo_db_username = ')[^']*(?=';)") echo PASS=$(tac $LOCALCONF | grep --perl-regexp --only-matching "(?<=typo_db_password = ')[^']*(?=';)") echo DB=$(tac $LOCALCONF | grep --perl-regexp --only-matching "(?<=typo_db = ')[^']*(?=';)") elif [[ -r $LOCALCONFIGURATION ]]; then if [[ ! -e "./configurationProxy.php" ]]; then echo "Required 'configurationProxy.php' is missing."; exit 1 fi echo HOST=$(./configurationProxy.php --get=TYPO3_CONF_VARS.DB.host) echo USER=$(./configurationProxy.php --get=TYPO3_CONF_VARS.DB.username) echo PASS=$(./configurationProxy.php --get=TYPO3_CONF_VARS.DB.password) echo DB=$(./configurationProxy.php --get=TYPO3_CONF_VARS.DB.database) else echo "Unable to find readable configuration file." >&2 fi } # Check on minimal command line argument count REQUIRED_ARGUMENT_COUNT=1 if [[ $# -lt $REQUIRED_ARGUMENT_COUNT ]]; then echo "Insufficient command line arguments!" >&2 echo "Use $0 --help to get additional information." >&2 exit 1 fi # Script Configuration start # Should the script give more detailed feedback? VERBOSE=false # Should the script surpress all feedback? QUIET=false # Should the script ignore reasons that would otherwise cause it to abort? FORCE=false # The base directory where TYPO3 is installed BASE=typo3 # The file to restore the backup from FILE= # The hostname of the MySQL server that TYPO3 uses HOST=localhost # The username used to connecto to that MySQL server USER=root # The password for that user PASS=*password* # The name of the database in which TYPO3 is stored DB=typo3 #Script Configuration end function consoleWrite() { [ "false" == "$QUIET" ] && echo -n $* >&2 return 0 } function consoleWriteLine() { [ "false" == "$QUIET" ] && echo $* >&2 return 0 } function consoleWriteVerbose() { $VERBOSE && consoleWrite $* return 0 } function consoleWriteLineVerbose() { $VERBOSE && consoleWriteLine $* return 0 } # The base location from where to retrieve new versions of this script UPDATE_BASE=https://raw.github.com/oliversalzburg/typo3scripts/master # Update check function updateCheck() { if ! hash curl 2>&-; then consoleWriteLine "Update checking requires curl. Check skipped." return 2 fi SUM_LATEST=$(curl $UPDATE_BASE/versions 2>&1 | grep $SELF | awk '{print $2}') SUM_SELF=$(tail --lines=+2 "$0" | md5sum | awk '{print $1}') consoleWriteLineVerbose "Remote hash source: '$UPDATE_BASE/versions'" consoleWriteLineVerbose "Own hash: '$SUM_SELF' Remote hash: '$SUM_LATEST'" if [[ "" == $SUM_LATEST ]]; then consoleWriteLine "No update information is available for '$SELF'" consoleWriteLine "Please check the project home page 'https://github.com/oliversalzburg/typo3scripts'." return 2 elif [[ "$SUM_LATEST" != "$SUM_SELF" ]]; then consoleWriteLine "NOTE: New version available!" return 1 fi return 0 } # Self-update function runSelfUpdate() { echo "Performing self-update..." _tempFileName="$0.tmp" _payloadName="$0.payload" # Download new version echo -n "Downloading latest version..." if ! wget --quiet --output-document="$_payloadName" $UPDATE_BASE/$SELF ; then echo "Failed: Error while trying to wget new version!" echo "File requested: $UPDATE_BASE/$SELF" exit 1 fi echo "Done." # Restore shebang _interpreter=$(head --lines=1 "$0") echo $_interpreter > "$_tempFileName" tail --lines=+2 "$_payloadName" >> "$_tempFileName" rm "$_payloadName" # Copy over modes from old version OCTAL_MODE=$(stat -c '%a' $SELF) if ! chmod $OCTAL_MODE "$_tempFileName" ; then echo "Failed: Error while trying to set mode on $_tempFileName." exit 1 fi # Spawn update script cat > updateScript.sh << EOF #!/bin/bash # Overwrite old file with new if mv "$_tempFileName" "$0"; then echo "Done." echo "Update complete." rm -- \$0 else echo "Failed!" fi EOF echo -n "Inserting update process..." exec /bin/bash updateScript.sh } # Make a quick run through the command line arguments to see if the user wants # to print the help. This saves us a lot of headache with respecting the order # in which configuration parameters have to be overwritten. for option in $*; do case "$option" in --help|-h) showHelp exit 0 ;; esac done # Read external configuration - Stage 1 - typo3scripts.conf (overwrites default, hard-coded configuration) BASE_CONFIG_FILENAME="typo3scripts.conf" if [[ -e "$BASE_CONFIG_FILENAME" ]]; then if [[ ! -r $BASE_CONFIG_FILENAME ]]; then consoleWriteLine "Unable to read '$BASE_CONFIG_FILENAME'. Check permissions." exit 1 fi consoleWriteVerbose "Sourcing script configuration from $BASE_CONFIG_FILENAME..." source $BASE_CONFIG_FILENAME consoleWriteLineVerbose "Done." fi # Read external configuration - Stage 2 - script-specific (overwrites default, hard-coded configuration) CONFIG_FILENAME=${SELF:0:${#SELF}-3}.conf if [[ -e "$CONFIG_FILENAME" ]]; then if [[ ! -r $CONFIG_FILENAME ]]; then consoleWriteLine "Unable to read '$CONFIG_FILENAME'. Check permissions." exit 1 fi consoleWriteVerbose "Sourcing script configuration from $CONFIG_FILENAME..." source $CONFIG_FILENAME consoleWriteLineVerbose "Done." fi # Read command line arguments (overwrites config file) for option in $*; do case "$option" in --verbose) VERBOSE=true ;; --quiet) QUIET=true ;; --force) FORCE=true ;; --update) runSelfUpdate ;; --update-check) updateCheck exit $? ;; --export-config) exportConfig exit 0 ;; --extract-config) extractConfig exit 0 ;; --file=*) FILE=$(echo $option | cut -d'=' -f2) ;; --base=*) BASE=$(echo $option | cut -d'=' -f2) ;; --hostname=*) HOST=$(echo $option | cut -d'=' -f2) ;; --username=*) USER=$(echo $option | cut -d'=' -f2) ;; --password=*) PASS=$(echo $option | cut -d'=' -f2) ;; --database=*) DB=$(echo $option | cut -d'=' -f2) ;; *) FILE=$option ;; esac done # Check for dependencies function checkDependency() { consoleWriteVerbose "Checking dependency '$1' => " if ! hash $1 2>&-; then consoleWriteLine "Failed!" consoleWriteLine "This script requires '$1' but it can not be found. Aborting." exit 1 fi consoleWriteLineVerbose $(which $1) return 0 } consoleWrite "Checking dependencies..." consoleWriteLineVerbose checkDependency wget checkDependency curl checkDependency md5sum checkDependency grep checkDependency awk checkDependency find checkDependency tar checkDependency mysql consoleWriteLine "Succeeded." # Begin main operation # Check default argument validity if [[ $FILE == --* ]]; then consoleWriteLine "The given TYPO3 snapshot '$FILE' looks like a command line parameter." consoleWriteLine "Please use --help to see a list of available command line parameters." exit 1 fi if [[ ! -e "$FILE" ]]; then consoleWriteLine "The given snapshot '$FILE' does not exist." exit 1 fi # Does the base directory exist? if [[ ! -d $BASE ]]; then if [[ "true" == $FORCE ]]; then # When --force was give, create the base directory if ! mkdir $BASE; then consoleWriteLine "Unable to create base directory '$BASE'!" exit 1 fi else consoleWriteLine "The base directory '$BASE' does not seem to exist!" exit 1 fi fi # Is the base directory writeable? if [[ ! -w $BASE ]]; then consoleWriteLine "The base directory '$BASE' is not writeable!" exit 1 fi # Check if we can delete the target base folder consoleWrite "Testing write permissions in $BASE..." if ! find $BASE -type f -o -type d \( -exec test -w {} \; -o \( -exec echo {} \; -quit \) \) | xargs -I {} bash -c "if [ -n "{}" ]; then echo Failed\! >&2; echo {} is not writable\! >&2; exit 1; fi"; then exit 1 fi consoleWriteLine "Succeeded" consoleWrite "Erasing current TYPO3 installation '$BASE'..." if ! rm --recursive --force -- $BASE/* > /dev/null; then consoleWriteLine "Failed!" exit 1 fi consoleWriteLine "Done." consoleWrite "Extracting TYPO3 backup '$FILE'..." # The archive contains a single folder, this folder is the original BASE. # So for the extraction, we need to target the parent directory with /.. if ! tar --extract --gzip --file $FILE --directory $BASE/.. > /dev/null; then consoleWriteLine "Failed!" exit 1 fi consoleWriteLine "Done." consoleWrite "Importing database dump..." set +e errexit _errorMessage=$(mysql --host=$HOST --user=$USER --password=$PASS --default-character-set=utf8 --database=$DB < $BASE/database.sql 2>&1 >/dev/null) _status=$? set -e errexit if [[ 0 < $_status ]]; then consoleWriteLine "Failed!" consoleWriteLine "Error: $_errorMessage" exit 1 fi consoleWriteLine "Done." consoleWriteVerbose "Deleting database dump..." rm --force -- $BASE/database.sql consoleWriteLineVerbose "Done!" # vim:ts=2:sw=2:expandtab:
#!/bin/bash # wait-for-grid.sh set -e cmd="$@" while ! curl -sSL "${SELENIUM_URL}/status" 2>&1 \ | jq -r '.value.ready' 2>&1 | grep "true" >/dev/null; do echo "Waiting for the Grid ${SELENIUM_URL}" sleep 1 done >&2 echo "Selenium Grid is up - executing parser" exec $cmd
#!/bin/bash docker build --pull -t docker-opensuse-osc-client . docker tag docker-opensuse-osc-client fbartels/docker-opensuse-osc-client docker login docker push fbartels/docker-opensuse-osc-client
#!../lib/test-in-container-environs.sh set -ex [ -d mirrorbrain ] ./environ.sh pg9-system2 ./environ.sh ap9-system2 ./environ.sh ap8-system2 ./environ.sh ap7-system2 ./environ.sh mb9 $(pwd)/mirrorbrain pg9*/start.sh mb9*/configure_db.sh pg9 mb9*/configure_apache.sh ap9 ap9=$(ls -d ap9*) # populate test data for x in ap7 ap8 ap9; do xx=$(ls -d $x*/) mkdir -p $xx/dt/downloads/{folder1,folder2,folder3} echo $xx/dt/downloads/{folder1,folder2,folder3}/{file1,file2}.dat | xargs -n 1 touch done mb9*/mb.sh makehashes -v $PWD/ap9-system2/dt ap9*/start.sh mb9*/mb.sh makehashes -v $PWD/ap9-system2/dt ap9*/curl.sh downloads/ | grep folder1 for x in ap7 ap8; do $x*/start.sh $x*/status.sh mb9*/mb.sh new $x --http http://"$($x-system2/print_address.sh)" --region NA --country us mb9*/mb.sh scan --enable $x $x-system2/curl.sh | grep downloads done ap9*/curl.sh /downloads/folder1/file1.dat tail ap9*/dt/error_log | grep 'Chose server '
package com.pucrs.sensores_plantas.model; import static org.junit.Assert.assertEquals; import org.junit.Test; public class SensorTest { @Test public void DeveCriarUmSensor() { Sensor sensor = new Sensor(); sensor.setId("10"); sensor.setHumidity(50); assertEquals(10, (int)sensor.getId()); assertEquals(50, sensor.getHumidity()); } }
SELECT country, AVG(salary) FROM employees GROUP BY country;
<reponame>kieranroneill/KRPulsingOverlayView<filename>example/KRPulsingOverlayViewDemo/KRPulsingOverlayViewDemo/KRPulsingOverlayViewController.h // // KRPulsingOverlayViewController.h // KRPulsingOverlayViewDemo // // Created by <NAME> on 07/01/2015. // Copyright (c) 2015 <NAME>. All rights reserved. // #import <UIKit/UIKit.h> @interface KRPulsingOverlayViewController : UIViewController @property (weak, nonatomic) IBOutlet UITextField* durationTextField; #pragma mark - IBActions. -(IBAction)onPulseButtonPress:(id)sender; -(IBAction)onPulseAlertButtonPress:(id)sender; @end
#!/bin/bash #set -x START_DIR=`dirname $0` cd $START_DIR export SYSREPO_REPOSITORY_PATH=`pwd`/sysrepo export LIBYANG_EXTENSIONS_PLUGINS_DIR=`pwd`/lib/libyang/extensions export LIBYANG_USER_TYPES_PLUGINS_DIR=`pwd`/lib/libyang/user_types export LD_LIBRARY_PATH=`pwd`/lib:$LD_LIBRARY_PATH # busibox version of 'ps' doesn't support '-ef' operand if ls -l `which ps` | grep busybox > /dev/null; then PS="ps" else PS="ps -ef" fi NETCONF_PARMS="" # Kill the old netopeer2-server instance unless it is running in the foreground (ie, was started separately) if $PS | grep netopeer2\-server | grep -v grep | grep '\-d' > /dev/null; then echo "netopeer2-server is running in the foreground. Keeping the running instance" else # Kill stale netopeer2-server instance if any killall netopeer2-server 2> /dev/null echo "Starting netopeer2-server in the background" `pwd`/bin/start_netopeer2_server.sh -v3 sleep 2 fi if [ "$1" = "gdb" ]; then INSTRUMENT="gdb --args" shift fi if [ "$1" = "valgrind" ]; then INSTRUMENT="valgrind" shift fi $INSTRUMENT ./bcmolt_netconf_server $* if ! $PS | grep bcmolt_netconf_server | grep -v grep > /dev/null; then echo Killing netopeer2-server killall netopeer2-server 2> /dev/null fi
#!/bin/bash cat $1 $2 > tmpfile mv tmpfile $2
package mg.security.token; import io.jsonwebtoken.*; import org.springframework.security.authentication.UsernamePasswordAuthenticationToken; import org.springframework.security.core.authority.SimpleGrantedAuthority; import org.springframework.security.core.context.SecurityContextHolder; import org.springframework.web.filter.OncePerRequestFilter; import javax.servlet.FilterChain; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.io.IOException; import java.util.List; import java.util.stream.Collectors; public class JWTAuthorizationFilter extends OncePerRequestFilter { @Override protected void doFilterInternal(HttpServletRequest request, HttpServletResponse response, FilterChain filterChain) throws ServletException, IOException { try { if (isJWTTokenPresent(request, response)) { Claims claims = validateToken(request); if (claims.get(JWTCommon.AUTHORITIES_CLAIM_NAME) != null) { setUpSpringAuthentication(claims); } else { SecurityContextHolder.clearContext(); } } else { SecurityContextHolder.clearContext(); } filterChain.doFilter(request, response); } catch (ExpiredJwtException | UnsupportedJwtException | MalformedJwtException | SignatureException e) { response.setStatus(HttpServletResponse.SC_UNAUTHORIZED); ((HttpServletResponse) response).sendError(HttpServletResponse.SC_UNAUTHORIZED, e.getMessage()); } } private Claims validateToken(HttpServletRequest request) { String jwtToken = request .getHeader(JWTCommon.AUTH_HTTP_HEADER_NAME) .replace(JWTCommon.AUTH_HTTP_HEADER_CONTENT_PREFIX, ""); return Jwts.parserBuilder() .setSigningKey(JWTCommon.SIGNING_KEY.getBytes()) .build() .parseClaimsJws(jwtToken).getBody(); } private void setUpSpringAuthentication(Claims claims) { @SuppressWarnings("unchecked") List<String> authoritiesStrings = claims.get(JWTCommon.AUTHORITIES_CLAIM_NAME, List.class); List<SimpleGrantedAuthority> authorities = authoritiesStrings.stream() .map(SimpleGrantedAuthority::new) .collect(Collectors.toList()); UsernamePasswordAuthenticationToken authenticationToken = new UsernamePasswordAuthenticationToken(claims.getSubject(), null, authorities); SecurityContextHolder.getContext().setAuthentication(authenticationToken); } private boolean isJWTTokenPresent(HttpServletRequest request, HttpServletResponse response) { String authenticationHeader = request.getHeader(JWTCommon.AUTH_HTTP_HEADER_NAME); return authenticationHeader != null && authenticationHeader.startsWith(JWTCommon.AUTH_HTTP_HEADER_CONTENT_PREFIX); } }
package de.bitbrain.braingdx.ai.pathfinding.heuristics; import de.bitbrain.braingdx.tmx.TiledMapContext; import de.bitbrain.braingdx.world.GameObject; /** * A heuristic that uses the tile that is closest to the target * as the next best tile. In this case the sqrt is removed * and the distance squared is used instead * * @author <NAME> */ public class ClosestSquaredHeuristic implements AStarHeuristic { @Override public float getCost(TiledMapContext context, GameObject target, int x, int y, int tx, int ty) { float dx = tx - x; float dy = ty - y; return ((dx * dx) + (dy * dy)); } }
#!/bin/bash set -e DINGTALK_ACCESS_TOKEN=${DINGTALK_ACCESS_TOKEN:-} DINGTALK_SECRET=${DINGTALK_SECRET:-} MSGTYPE=${MSGTYPE:-"markdown"} TITLE=${TITLE:-} TEXT=${TEXT:-} echo "## Sending message ##################" dingtalk -accessToken "${DINGTALK_ACCESS_TOKEN}" -secret "${DINGTALK_SECRET}" -msgtype "${MSGTYPE}" -title "${TITLE}" -text "${TEXT}" echo "## Done. ##################"
#!/bin/bash #SBATCH --account=def-dkulic #SBATCH --mem=8000M # memory per node #SBATCH --time=23:00:00 # time (DD-HH:MM) #SBATCH --output=/project/6001934/lingheng/Double_DDPG_Job_output/continuous_BipedalWalkerHardcore-v2_ddpg_hardcopy_action_noise_seed4_run4_%N-%j.out # %N for node name, %j for jobID module load qt/5.9.6 python/3.6.3 nixpkgs/16.09 gcc/7.3.0 boost/1.68.0 cuda cudnn source ~/tf_cpu/bin/activate python ./ddpg_discrete_action.py --env BipedalWalkerHardcore-v2 --random-seed 4 --exploration-strategy action_noise --summary-dir ../Double_DDPG_Results_no_monitor/continuous/BipedalWalkerHardcore-v2/ddpg_hardcopy_action_noise_seed4_run4 --continuous-act-space-flag --double-ddpg-flag --target-hard-copy-flag
"use strict"; Object.defineProperty(exports, "__esModule", { value: true }); exports.arrowUp2 = void 0; var arrowUp2 = { "viewBox": "0 0 16 16", "children": [{ "name": "path", "attribs": { "fill": "#000000", "d": "M13.707 6.293l-5-5c-0.39-0.391-1.024-0.391-1.414 0l-5 5c-0.391 0.391-0.391 1.024 0 1.414s1.024 0.391 1.414 0l3.293-3.293v9.586c0 0.552 0.448 1 1 1s1-0.448 1-1v-9.586l3.293 3.293c0.195 0.195 0.451 0.293 0.707 0.293s0.512-0.098 0.707-0.293c0.391-0.391 0.391-1.024 0-1.414z" } }] }; exports.arrowUp2 = arrowUp2;
<filename>code/Python/vtk_development/VTK_Example/Visualization/TextSource.py import vtk # Create a sphere textSource = vtk.vtkTextSource() textSource.SetText("Hello") textSource.SetForegroundColor(1.0, 0.0, 0.0) textSource.BackingOn() textSource.Update() # Create a mapper and actor mapper = vtk.vtkPolyDataMapper() mapper.SetInputConnection(textSource.GetOutputPort()) actor = vtk.vtkActor() actor.SetMapper(mapper) # Create a renderer, render window, and interactor renderer = vtk.vtkRenderer() renderWindow = vtk.vtkRenderWindow() renderWindow.AddRenderer(renderer) renderWindowInteractor = vtk.vtkRenderWindowInteractor() renderWindowInteractor.SetRenderWindow(renderWindow) # Add the actor to the scene renderer.AddActor(actor) renderer.SetBackground(1, 1, 1) # Background color white # Render and interact renderWindow.Render() renderWindowInteractor.Start()
def levenshtein_distance(word1, word2): m = len(word1) n = len(word2) dp = [[0] * (n + 1) for _ in range(m + 1)] for i in range(m + 1): for j in range(n + 1): if i == 0: dp[i][j] = j elif j == 0: dp[i][j] = i elif word1[i-1] == word2[j-1]: dp[i][j] = dp[i-1][j-1] else: dp[i][j] = 1 + min(dp[i-1][j], dp[i][j-1], dp[i-1][j-1]) return dp[m][n]
#!/usr/bin/env bash gcloud config set project gke-c2 gcloud beta container --project gke-c2 clusters delete gke-c2 --zone=us-central1-a
<gh_stars>0 export * from './user-factory.service'; export * from './user-services.service';
import React from 'react'; import SectionHeading from '../components/SectionHeading'; import Button from '../components/Button'; import Product from '../components/Product'; import products from '../config/products'; function RecommendedSection() { return ( <div id="recommended"> <SectionHeading>Polecane produkty</SectionHeading> <div id="product-container"> {products.filter(product => product.recommended === true).map((product) => ( <Product name={product.name} price={product.price} model={product.model} imgUrl={product.imgUrl} /> ))} </div> <div id="rec-button"> <Button href="/categories" bgColor="gray" textColor="white" size="big">Pokaż więcej</Button> </div> </div> ); } export default RecommendedSection;
import React from 'react'; const BookList = ({books}) => { return ( <div> <h4>Books</h4> {books.map(book => ( <div key={book.title}> <p>{book.title} by {book.author}</p> </div> ))} </div> ); }; export default BookList;