text
stringlengths
1
1.05M
// A library/utility for generating GIF files // Uses <NAME>'s omggif library // and <NAME>'s NeuQuant quantizer (JS 0.3 version with many fixes) // // @author sole / http://soledadpenades.com function Animated_GIF(options) { 'use strict'; var width = 160, height = 120, canvas = null, ctx = null, repeat = 0, delay = 250; var frames = []; var numRenderedFrames = 0; var onRenderCompleteCallback = function() {}; var onRenderProgressCallback = function() {}; var workers = [], availableWorkers = [], numWorkers, workerPath; var generatingGIF = false; options = options || {}; numWorkers = options.numWorkers || 2; workerPath = options.workerPath || 'src/quantizer.js'; // XXX hardcoded path for(var i = 0; i < numWorkers; i++) { var w = new Worker(workerPath); workers.push(w); availableWorkers.push(w); } // --- // Return a worker for processing a frame function getWorker() { if(availableWorkers.length === 0) { throw ('No workers left!'); } return availableWorkers.pop(); } // Restore a worker to the pool function freeWorker(worker) { availableWorkers.push(worker); } // Faster/closurized bufferToString function // (caching the String.fromCharCode values) var bufferToString = (function() { var byteMap = []; for(var i = 0; i < 256; i++) { byteMap[i] = String.fromCharCode(i); } return (function(buffer) { var numberValues = buffer.length; var str = ''; for(var i = 0; i < numberValues; i++) { str += byteMap[ buffer[i] ]; } return str; }); })(); function startRendering(completeCallback) { var numFrames = frames.length; onRenderCompleteCallback = completeCallback; for(var i = 0; i < numWorkers && i < frames.length; i++) { processFrame(i); } } function processFrame(position) { var frame; var worker; frame = frames[position]; if(frame.beingProcessed || frame.done) { console.error('Frame already being processed or done!', frame.position); onFrameFinished(); return; } frame.beingProcessed = true; worker = getWorker(); worker.onmessage = function(ev) { var data = ev.data; // Delete original data, and free memory delete(frame.data); // TODO grrr... HACK for object -> Array frame.pixels = Array.prototype.slice.call(data.pixels); frame.palette = Array.prototype.slice.call(data.palette); frame.done = true; frame.beingProcessed = false; freeWorker(worker); onFrameFinished(); }; // TODO maybe look into transfer objects // for further efficiency var frameData = frame.data; //worker.postMessage(frameData, [frameData]); worker.postMessage(frameData); } function processNextFrame() { var position = -1; for(var i = 0; i < frames.length; i++) { var frame = frames[i]; if(!frame.done && !frame.beingProcessed) { position = i; break; } } if(position >= 0) { processFrame(position); } } function onFrameFinished() { // ~~~ taskFinished // The GIF is not written until we're done with all the frames // because they might not be processed in the same order var allDone = frames.every(function(frame) { return !frame.beingProcessed && frame.done; }); numRenderedFrames++; onRenderProgressCallback(numRenderedFrames * 0.75 / frames.length); if(allDone) { if(!generatingGIF) { generateGIF(frames, onRenderCompleteCallback); } } else { setTimeout(processNextFrame, 1); } } // Takes the already processed data in frames and feeds it to a new // GifWriter instance in order to get the binary GIF file function generateGIF(frames, callback) { // TODO: Weird: using a simple JS array instead of a typed array, // the files are WAY smaller o_o. Patches/explanations welcome! var buffer = []; // new Uint8Array(width * height * frames.length * 5); var gifWriter = new GifWriter(buffer, width, height, { loop: repeat }); generatingGIF = true; frames.forEach(function(frame) { onRenderProgressCallback(0.75 + 0.25 * frame.position * 1.0 / frames.length); gifWriter.addFrame(0, 0, width, height, frame.pixels, { palette: frame.palette, delay: delay }); }); gifWriter.end(); onRenderProgressCallback(1.0); frames = []; generatingGIF = false; callback(buffer); } // --- this.setSize = function(w, h) { width = w; height = h; canvas = document.createElement('canvas'); canvas.width = w; canvas.height = h; ctx = canvas.getContext('2d'); }; // Internally, GIF uses tenths of seconds to store the delay this.setDelay = function(seconds) { delay = seconds * 0.1; }; // From GIF: 0 = loop forever, null = not looping, n > 0 = loop n times and stop this.setRepeat = function(r) { repeat = r; }; this.addFrame = function(element) { if(ctx === null) { this.setSize(width, height); } ctx.drawImage(element, 0, 0, width, height); var data = ctx.getImageData(0, 0, width, height); this.addFrameImageData(data.data); }; this.addFrameImageData = function(imageData) { var dataLength = imageData.length, imageDataArray = new Uint8Array(imageData); frames.push({ data: imageDataArray, done: false, beingProcessed: false, position: frames.length }); }; this.onRenderProgress = function(callback) { onRenderProgressCallback = callback; }; this.isRendering = function() { return generatingGIF; }; this.getBase64GIF = function(completeCallback) { var onRenderComplete = function(buffer) { var str = bufferToString(buffer); var gif = 'data:image/gif;base64,' + btoa(str); completeCallback(gif); }; startRendering(onRenderComplete); }; } if(define) { define([], function() { return Animated_GIF; }); }
"""Codewars: Sum of Digits / Digital Root 6 kyu URL: https://www.codewars.com/kata/541c8630095125aba6000c00/ In this kata, you must create a digital root function. A digital root is the recursive sum of all the digits in a number. Given n, take the sum of the digits of n. If that value has more than one digit, continue reducing in this way until a single-digit number is produced. This is only applicable to the natural numbers. Here's how it works: digital_root(16) => 1 + 6 => 7 digital_root(942) => 9 + 4 + 2 => 15 ... => 1 + 5 => 6 digital_root(132189) => 1 + 3 + 2 + 1 + 8 + 9 => 24 ... => 2 + 4 => 6 digital_root(493193) => 4 + 9 + 3 + 1 + 9 + 3 => 29 ... => 2 + 9 => 11 ... => 1 + 1 => 2 """ def _sum_digits(n): # Compute sum of n's digits. result = 0 while n > 0: div, digit = n // 10, n % 10 result += digit n = div return result def digital_root(n): # Iterate when number of digits is bigger than 1. while len(str(n)) > 1: n = _sum_digits(n) return n def main(): assert digital_root(16) == 7 assert digital_root(456) == 6 assert digital_root(942) == 6 assert digital_root(132189) == 6 assert digital_root(493193) == 2 if __name__ == '__main__': main()
<!DOCTYPE html> <html> <head> <title>Navigation bar</title> </head> <body> <nav> <ul> <li><a href="home.html">Home</a></li> <li><a href="about.html">About</a></li> </ul> </nav> </body> </html>
#!/bin/bash if [ -z $1 ] then echo "Usage: build-ssl-credential /home/workspace/ssl_keys" exit 1 fi export EASY_RSA="`pwd`" export KEY_CONFIG=`/usr/share/easy-rsa/whichopensslcnf /usr/share/easy-rsa/` export OPENSSL="openssl" export PKCS11TOOL="pkcs11-tool" export GREP="grep" export KEY_DIR="$EASY_RSA/keys" export PKCS11_MODULE_PATH="dummy" export PKCS11_PIN="dummy" export KEY_SIZE=1024 export CA_EXPIRE=3650 export KEY_EXPIRE=3650 export KEY_COUNTRY="CN" export KEY_PROVINCE="SH" export KEY_CITY="ShangHai" export KEY_ORG="Syscloud" export KEY_EMAIL="admin@syscloud.cn" export KEY_OU="cloud.syscloud.cn" export KEY_NAME="VPN" if [ ! -f /usr/share/easy-rsa/pkitool ] then \cp /usr/share/easy-rsa/2.0/* /usr/share/easy-rsa/ \cp /usr/share/easy-rsa/2.0/openssl-1.0.0.cnf /usr/share/easy-rsa/openssl.cnf fi KEY_DIR=$1 && \ rm -rf "$KEY_DIR" && \ mkdir -p "$KEY_DIR" && \ chmod go-rwx "$KEY_DIR" && \ touch "$KEY_DIR/index.txt" && \ echo 01 >"$KEY_DIR/serial" && \ "/usr/share/easy-rsa/pkitool" --initca && \ "/usr/share/easy-rsa/pkitool" --server server && \ $OPENSSL dhparam -out ${KEY_DIR}/dh${KEY_SIZE}.pem ${KEY_SIZE} && \ "/usr/share/easy-rsa/pkitool" client
public interface IAraObject { // Define the necessary methods and properties for interacting with an AraObject } public class AraObjectInstance { private string _InstanceID; private IAraObject _araObject; public AraObjectInstance(string vInstanceID) { _InstanceID = vInstanceID; } public AraObjectInstance(IAraObject vAraObject) { _araObject = vAraObject; } public IAraObject GetAraObject() { return _araObject; } // Additional methods and properties for interacting with the AraObject can be added here }
<reponame>aurelijusb/webcam-games #include <opencv/highgui.h> #include "WebCamVJ.h" #include "../Tracker/TrackerMotion.h" #define INTESITY(x, y) { setIntensivity(x, y, getIntensity(x, y)); } #define MAX_INTESITY(x, y) { setMaxIntensivity(x, y, getIntensity(x, y)); } WebCamVJ::WebCamVJ(int webCamDevice): TrackerMotion(webCamDevice) { for (int i=0; i < MAP_MAX * MAP_MAX; i++) { effects.push_back(NULL); } frame = 0; bigImage = cvCreateImage(cvSize(1280, 960), 8, 3); } /* * Configurations. */ void WebCamVJ::run() { setFlip(); add(0, 3, image, "../Data/img_1449.jpg"); add(7, 3, image, "../Data/img_1196.jpg"); add(4, 0, rectangules, ""); cvNamedWindow("Motion tracker", CV_WINDOW_NORMAL); cvSetWindowProperty("Motion tracker", CV_WND_PROP_FULLSCREEN, CV_WINDOW_FULLSCREEN); loop("Motion tracker"); } bool WebCamVJ::onKeyPress(char c) { switch (c) { case 27: case 'q': return false; } setMaxIntensivity(0, 3, 255); setMaxIntensivity(7, 3, 255); setMaxIntensivity(4, 0, 255); INTESITY(0, 3); INTESITY(7, 3); INTESITY(4, 0); show(getFrame()); return true; } /* * Rendering */ void WebCamVJ::show(IplImage *background) { /* About boxes */ int edgeWidth = background->width / MAP_MAX; int edgeHeight = background->height / MAP_MAX; for (int x = 0; x < MAP_MAX; x++) { for (int y = 0; y < MAP_MAX; y++) { int i = MAP_X(x) + MAP_Y(y); if (effects[i]) { effects[i]->markControllAreas(background, x * edgeWidth, y * edgeHeight, edgeWidth, edgeHeight); } } } /* Efects */ if (!frame) { frame = cvCloneImage(background); } cvCopy(background, frame); for (unsigned i = 0; i < effects.size(); i++) { if (effects[i]) { effects[i]->apply(frame); } } showFullScreen(); } void WebCamVJ::showFullScreen() { cvZero(bigImage); cvResize(frame, bigImage, CV_INTER_LINEAR); fullScreen("Images", bigImage); } /* * State */ void WebCamVJ::add(int mapX, int mapY, effectType type, const std::string file) { int key = getKey(mapX, mapY); if (!effects[key]) { effects[key] = new WebCamEffect(type, file); } else { cerr << "Key allready exists: " << mapX << " " << mapY << endl; } } void WebCamVJ::setIntensivity(int mapX, int mapY, unsigned value) { int key = getKey(mapX, mapY); if (effects[key]) { effects[key]->setIntensivity(value); } else { cerr << "Bad coordinates: " << mapX << " " << mapY << endl; } } void WebCamVJ::setMaxIntensivity(int mapX, int mapY, unsigned value) { int key = getKey(mapX, mapY); if (effects[key]) { effects[key]->setMaxIntensivity(value); } else { cerr << "Bad maxIntensity coordinates: " << mapX << " " << mapY << endl; } } int WebCamVJ::getKey(int mapX, int mapY) { RANGE(mapX, 0, MAP_MAX - 1); RANGE(mapY, 0, MAP_MAX - 1); return MAP_X(mapX) + MAP_Y(mapY); } /* * Destruction */ WebCamVJ::~WebCamVJ() { WebCamEffect* effect; for (unsigned i = 0; i < effects.size(); i++) { effect = effects[i]; if (effect) { delete effect; } } effects.clear(); if (frame) { cvReleaseImage(&frame); } if (bigImage) { cvReleaseImage(&bigImage); } }
#!/bin/sh exec 2>&1 # # qmail-send and friends # QMAIL="%QMAIL%" if [ -e $QMAIL/control/defaultdelivery ]; then ALIASEMPTY=`cat $QMAIL/control/defaultdelivery 2> /dev/null` else ALIASEMPTY=`cat $QMAIL/control/aliasempty 2> /dev/null` fi ALIASEMPTY=${ALIASEMPTY:="./Maildir/"} PATH="$QMAIL/bin:$PATH" # limit to prevent memory hogs ulimit -c 204800 exec envdir ./env qmail-start "$ALIASEMPTY"
# Compiler cc65 -Or -v -t cx16 cballs.c # Assembler ca65 -v -t cx16 cballs.s ca65 -v -t cx16 text.s # Linker ld65 -v -o cballs.prg \ -t cx16 \ -Ln cballs.lbl \ -m cballs.map \ cballs.o text.o \ c64.lib # Emulator x16emu -run -prg cballs.prg &
<filename>main.js<gh_stars>0 const btn = document.getElementById('button'); const rainbow = ['red', 'orange', 'yellow', 'green', 'blue', 'rebeccapurple', 'violet', 'pink', 'aqua', 'beige', 'crimson', 'cyan', 'gold', 'greenyellow', 'khaki', 'lightgreen', 'paleturquoise']; function change() { document.body.style.background = rainbow[Math.floor(17*Math.random())]; } btn.addEventListener('click', change);
"use strict"; Object.defineProperty(exports, "__esModule", { value: true }); exports.ic_grain_twotone = void 0; var ic_grain_twotone = { "viewBox": "0 0 24 24", "children": [{ "name": "path", "attribs": { "d": "M0 0h24v24H0V0z", "fill": "none" }, "children": [] }, { "name": "path", "attribs": { "d": "M18 8c1.1 0 2-.9 2-2s-.9-2-2-2-2 .9-2 2 .9 2 2 2zm0 8c1.1 0 2-.9 2-2s-.9-2-2-2-2 .9-2 2 .9 2 2 2zM6 8c-1.1 0-2 .9-2 2s.9 2 2 2 2-.9 2-2-.9-2-2-2zm8 0c-1.1 0-2 .9-2 2s.9 2 2 2 2-.9 2-2-.9-2-2-2zm-4 8c1.1 0 2-.9 2-2s-.9-2-2-2-2 .9-2 2 .9 2 2 2zm0-12c-1.1 0-2 .9-2 2s.9 2 2 2 2-.9 2-2-.9-2-2-2zm4 12c-1.1 0-2 .9-2 2s.9 2 2 2 2-.9 2-2-.9-2-2-2zm-8 4c1.1 0 2-.9 2-2s-.9-2-2-2-2 .9-2 2 .9 2 2 2z" }, "children": [] }] }; exports.ic_grain_twotone = ic_grain_twotone;
<gh_stars>10-100 export default class { static initialize({people, adults, children}) { Object.assign(people, { entity: 'people', types() { return { Adult: adults, Child: children, }; }, fields() { return { id: this.attr(null), // The STI discriminator field. type: this.attr(null), name: this.attr(null), }; }, }); } }
#!/bin/sh set -e # Prep test environment echo "#############################" echo "# #" echo "# PREPPING TEST ENVIRONMENT #" echo "# #" echo "#############################" cd /app pwd yarn global add grunt-cli gem install sass gem update --system gem install scss_lint yarn install echo "############################" echo "# #" echo "# BEGIN REQUESTED TEST #" echo "# #" echo "############################" exec "$@"
<reponame>mr-xhz/summer-mis<gh_stars>0 package cn.cerc.jbean.form; public interface IClient { public void setForm(IForm form); public IForm getForm(); public boolean isPhone(); // 返回设备Id public String getId(); // 返回设备型号 public String getDevice(); // 返回设备语言 public String getLanguage(); // 设置设备型号 public void setDevice(String device); }
# Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. #!/bin/sh docker pull sathwik/apache-buildr:latest-jruby-jdk8 export JAVA_OPTS="-Xmx1024M -XX:MaxPermSize=512M" BUILDR_ARGS="$@" CONTAINER_USERNAME="dummy" CONTAINER_GROUPNAME="dummy" HOMEDIR="/home/$CONTAINER_USERNAME" GROUP_ID=$(id -g) USER_ID=$( id -u) CREATE_USER_COMMAND="groupadd -f -g $GROUP_ID $CONTAINER_GROUPNAME \ && useradd -u $USER_ID -g $CONTAINER_GROUPNAME $CONTAINER_USERNAME \ && mkdir --parent $HOMEDIR \ && chown -R $CONTAINER_USERNAME:$CONTAINER_GROUPNAME $HOMEDIR" BUNDLER_COMMAND="jruby -S bundler install --gemfile=/workspace/Gemfile" BUILDR_COMMAND="su $CONTAINER_USERNAME -c 'buildr $BUILDR_ARGS'" FINAL_COMMAND="$CREATE_USER_COMMAND && $BUNDLER_COMMAND && $BUILDR_COMMAND" ## For release set these arguments with proper values ## export JAVADOC=on ## export BUILDR_ENV=production ## (Append -SNAPSHOT for every next version) ## export NEXT_VERSION=1.3.8-SNAPSHOT ## export GNUPGHOME="$HOME/.gnupg" ## export GPG_USER= ## export GPG_PASS= ## mount volume for release ## -v $GNUPGHOME:/home/dummy/.gnupg docker run --rm \ -e JAVADOC=$JAVADOC \ -e NEXT_VERSION=$NEXT_VERSION \ -e GPG_USER=$GPG_USER \ -e GPG_PASS=$GPG_PASS \ -e BUILDR_ENV=$BUILDR_ENV \ -e JAVA_OPTS \ -e GNUPGHOME=/home/dummy/.gnupg \ -v `pwd`:/workspace \ -v $HOME/.m2:/home/dummy/.m2 \ -v $HOME/.buildr:/home/dummy/.buildr \ -v /tmp:/tmp \ -v $GNUPGHOME:/home/dummy/.gnupg \ --entrypoint bash sathwik/apache-buildr:latest-jruby-jdk8 -c "$FINAL_COMMAND";
import logging import smtplib from email.message import EmailMessage class WindowLogHandler(logging.Handler): def __init__(self): super().__init__() def emit(self, record): if record.name == 'Window': if record.levelno == logging.INFO: print(f"Window log message (INFO): {record.getMessage()}") elif record.levelno == logging.WARNING: self.send_email_notification("Window log message (WARNING)", record.getMessage()) elif record.levelno >= logging.ERROR: self.trigger_alert("Window log message (ERROR)", record.getMessage()) def send_email_notification(self, subject, message): # Replace with actual email configuration sender_email = 'sender@example.com' receiver_email = 'admin@example.com' smtp_server = 'smtp.example.com' email_content = f"Subject: {subject}\n\n{message}" with smtplib.SMTP(smtp_server) as server: server.sendmail(sender_email, receiver_email, email_content) def trigger_alert(self, subject, message): # Replace with actual alert triggering mechanism print(f"ALERT: {subject} - {message}") # Usage example if __name__ == "__main__": logger = logging.getLogger(__name__) window_handler = WindowLogHandler() logger.addHandler(window_handler) # Simulate log messages from Window instances window_logger = logging.getLogger('Window') window_logger.setLevel(logging.INFO) window_logger.info("Window opened") window_logger.setLevel(logging.WARNING) window_logger.warning("Window obstruction detected") window_logger.setLevel(logging.ERROR) window_logger.error("Window motor malfunction")
# # Command Line / Terminal Package Installer # Author: Asura # Created: 2021-06-06 2110H, Asura # Modified: # - 2021-06-06 2110H, Asura # - 2021-06-06 2313H, Asura # - 2021-06-06 2320H, Asura # - 2021-06-07 0011H, Asura # - 2021-06-10 1026H, Asura # - 2021-06-15 0055H, Asura # - 2021-06-26 1614H, Asura # Features: # - Allows user to # > add packages of their choice into the list # > Select and install selected packages # Background Information: # A cli/terminal package installer where user can add packages of their choice and distribute to other users. # Users can select packages of their choice and install # Changelog: # 2021-06-06 2110H, Asura # - Created script file # 2021-06-06 2313H, Asura # - Edited structure of code # - Made file portable & modular # 2021-06-06 2320H, Asura # - Renamed script from [pkg_installer.sh] -> [clipkger.sh] # 2021-06-07 0011H, Asura # - Added help function # - Added Command Line features # 2021-06-10 1026H, Asura # - Added logging feature # 2021-06-15 0055H, Asura # - Added Documentations # - Fixed install function # 2021-06-26 1614H, Asura # - Changed 'aptitude' to use apt instead of apt-get # # --- Variables # [Program] PROGRAM_SCRIPTNAME="clipkger" PROGRAM_NAME="Command-Terminal Interface (CTI) Package Installer" PROGRAM_TYPE="Main" # [Default] logging_filePath=~/.logs/clipkger # [Global] PKGMGR="pacman" selected_Packages="" # [Arrays] pkgs=( # Edit this # - Place your files here "xterm" # Key Options "select-others" # Other Packages outside of the stated "select-end" # To end the selection "select-show" # To show all selected packages ) # [Associative Array] #declare -A pkgs=( # # [package-type]="package-name" # [terminal-1]="xterm" # [show]="select-show" # [exit]="select-end" #) case "$PKGMGR" in "pacman") declare -A pkg_controls=( [install]="sudo pacman -S $selected_pkg_Name" [remove]="sudo pacman -R $selected_pkg_Name" [uninstall]="sudo pacman -Rsu $selected_pkg_Name" [update]="sudo pacman -Sy" [upgrade]="sudo pacman -Su" [update-and-upgrade]="sudo pacman -Syu" [exit]="exit" ) ;; "aptitude") declare -A pkg_controls=( [install]="sudo apt install $selected_pkg_Name" [remove]="sudo apt" [uninstall]="sudo apt uninstall " [update]="sudo apt update" [upgrade]="sudo apt upgrade" [update-and-upgrade]="sudo apt update upgrade" [exit]="exit" ) ;; *) ;; esac # [Derivative Variables] number_of_Packages="${#pkgs[@]}" number_of_Controls="${#pkg_controls[@]}" # --- Functions # Menu Functions menu_Select() { # menu syntax: select PS3="Please enter your option: " # Manage Package select pkg_opt in "${!pkg_controls[@]}"; do selected_Command="${pkg_controls[$pkg_opt]}" case "$selected_Command" in "exit") break ;; *) select opt in "${pkgs[@]}"; do selected_pkg_Name="$opt" # Handle the selected package case "$selected_pkg_Name" in "select-end" ) # End Select # Data Validation: Empty/Null Value if [[ ! "$selected_Packages" == "" ]]; then selected_Command+="$selected_Packages" echo "Command: $selected_Command" $selected_Command else echo "No Packages selected" fi break ;; "select-show") # Show packages echo "Packages: $selected_Packages" ;; "select-others") # Other packages read -p "Package Name: " other_Pkgs if [[ ! "$selected_Packages" == *"$other_Pkgs"* ]]; then selected_Packages+="$other_Pkgs " else echo "Package is already selected." fi ;; *) # Not Exit / Quit # Check if string contains substring if [[ ! "$selected_Packages" == *"$selected_pkg_Name"* ]]; then # selected_Command+="$selected_pkg_Name " selected_Packages+="$selected_pkg_Name " else echo "Package is already selected." fi ;; esac done # Clear all packages selected_Packages="" selected_Command="" ;; esac done } menu_package_Control() { # Manage Package PS3="Please enter your option: " while true; do # Command #j=0 echo "[ Select Command ]" for cmd in "${!pkg_controls[@]}"; do #j=$((j+1)) #echo "[$j] : $cmd" echo "> $cmd" done read -p "$PS3" cmd echo "" if [[ ! "$cmd" == "" ]]; then # Data Validation: If command input is NOT empty selected_Command=${pkg_controls[$cmd]} # echo "Selected Command: $selected_Command" # Check selected command case "$cmd" in "update" | "upgrade" | "update-and-upgrade") # If is either Update, Upgrade or Update & Upgrade # Just activate $selected_Command break ;; *) if [[ "$selected_Command" == "exit" ]]; then break elif [[ ! "$selected_Command" == "" ]]; then while true; do echo "[ Select Package(s) ]" PS3="Please enter your option [Enter the option number]: " for(( i=1; i <= $number_of_Packages; i++ )); do echo "[$i] : ${pkgs[$((i-1))]}" done read -p "$PS3" opt if [[ ! "$opt" == "" ]]; then # Data Validation: Entered option is NOT empty selected_pkg_Name="${pkgs[$((opt-1))]}" if [[ "$selected_pkg_Name" == "select-end" ]]; then # End Selection # Data Validation: Empty/Null Value if [[ ! "$selected_Packages" == "" ]]; then selected_Command+="$selected_Packages" echo "Command: $selected_Command" $selected_Command else echo "No Packages selected" fi break elif [[ "$selected_pkg_Name" == "select-show" ]]; then echo "Packages: $selected_Packages" elif [[ "$selected_pkg_Name" == "select-others" ]]; then read -p "Package Name: " other_pkgs if [[ ! "$selected_Packages" == *"$other_pkgs"* ]]; then selected_Packages+="$other_pkgs " else echo "Package is already selected." fi elif [[ ! "$selected_pkg_Name" == "" ]]; then # Not Exit / Quit && Data Validation: Not Empty # Check if string contains substring if [[ ! "$selected_Packages" == *"$selected_pkg_Name"* ]]; then # selected_Command+="$selected_pkg_Name " selected_Packages+="$selected_pkg_Name " else echo "Package is already selected." fi else echo "Invalid Value" fi echo "" else echo "" echo "No Input" echo "" fi done else # Validation : Null/Empty Value checker echo "Invalid Option" fi ;; esac selected_Command="" selected_Packages="" else echo "No Input" echo "" fi done } prog_Help() { # # Help function # Edit Accordingly # echo "[Syntax]" echo " > $0 { -i | -r | -u | -Upd | -Upg | -Updg | -h } <package_name>" echo " > $0 { --install | --remove | --uninstall | --update | --upgrade | --update-and-upgrade | --help } <package_name>" echo "[Parameters]" echo " { --install | -i } : Install package" echo " { --remove | -r } : Remove package" echo " { --uninstall | -u } : Uninstall package" echo " { --update | -Upd } : Update system" echo " { --upgrade | -Upg } : Upgrade system" echo " { --update-and-upgrade | -Updg } : Update and Upgrade system" echo " { --help | -h } : Help" } # General Functions init() { # # On Runtime initialization # # Check If folder exists; # If not: Create if [[ ! -d $logging_filePath ]]; then mkdir -p $logging_filePath fi echo "Program Name: $PROGRAM_NAME" } body() { # # Main function to run # argv=("$@") argc="${#argv[@]}" # --- Command Line Interface Feature Implementation # Generate Menu using the package array option="${argv[0]}" package_Name="${argv[1]}" package_Controls="" if [[ ! "$option" == "" ]]; then # && echo "Option: $option" # Switch case option case "$option" in "--install" | "-i" ) # Install echo "Install [$package_Name]" package_Controls="${pkg_controls["install"]}" if [[ ! "$package_Name" == "" ]]; then # Data Validation: (Option AND package name) NOT Null/Empty Value package_Controls+="--noconfirm --needed $package_Name" echo "Command: $package_Controls" $package_Controls | tee -a $logging_filePath/packages_installation.log ret_code="$?" echo "Return Code: $ret_code" echo "$(date +'%y/%m/%d %H_%M_%S')" : $package_Name >> $logging_filePath/packages_installed.log else echo " Package name not provided." fi ;; "--remove" | "-r" ) # Remove Package echo "Remove [$package_Name]" package_Controls="${pkg_controls["remove"]}" if [[ ! "$package_Name" == "" ]]; then # Data Validation: (Option AND package name) NOT Null/Empty Value package_Controls+="$package_Name" echo "Command: $package_Controls" $package_Controls ret_code="$?" echo "Return Code: $ret_code" echo "$(date +'%y/%m/%d %H_%M_%S')" : $package_Name >> $logging_filePath/packages_removed.log else echo " Package name not provided." fi ;; "--uninstall" | "-u" ) # Uninstall Package echo "Uninstall [$package_Name]" package_Controls="${pkg_controls["uninstall"]}" if [[ ! "$package_Name" == "" ]]; then # Data Validation: (Option AND package name) NOT Null/Empty Value package_Controls+="$package_Name" echo "Command: $package_Controls" $package_Controls ret_code="$?" echo "Return Code: $ret_code" echo "$(date +'%y/%m/%d %H_%M_%S')" : $package_Name >> $logging_filePath/packages_uninstalled.log else echo " Package name not provided." fi ;; "--update" | "-Upd" ) # Update System echo "Update System" package_Controls="${pkg_controls["update"]}" $package_Controls echo "Command: $package_Controls" ret_code="$?" echo "Return Code: $ret_code" echo "$(date +'%y/%m/%d %H_%M_%S')" : $(pacman -Qu) >> $logging_filePath/packages_updated.log ;; "--upgrade" | "-Upg" ) # Upgrade System echo "Upgrade System" package_Controls="${pkg_controls["upgrade"]}" echo "$(date +'%y/%m/%d %H_%M_%S')" : $(pacman -Qu) >> $logging_filePath/packages_upgraded.log $package_Controls echo "Command: $package_Controls" ret_code="$?" echo "Return Code: $ret_code" ;; "--update_and_upgrade" | "-Updg" ) # Upgrade System echo "Update and Upgrade System" package_Controls="${pkg_controls["update-and-upgrade"]}" $package_Controls echo "Command: $package_Controls" ret_code="$?" echo "Return Code: $ret_code" echo "$(date +'%y/%m/%d %H_%M_%S')" : $(pacman -Qu) >> $logging_filePath/packages_updated_and_upgraded.log ;; "--help" | "-H") # Help echo "Help" prog_Help ;; *) echo "Invalid Option" ;; esac else menu_package_Control fi } function END() { line="" read -p "Pause" line } function main() { body "$@" } if [[ "${BASH_SOURCE[@]}" == "${0}" ]]; then # START init main "$@" END fi
#!/bin/sh python3 /code/service.py & /prototype -d -cluster default -service service -tags=env:development -prototype-url http://prototype:10000
var express = require('express'); var router = express.Router(); const fetch = require('node-fetch'); router.get('/*', function(req, res, next) { process.env.NODE_TLS_REJECT_UNAUTHORIZED = "0"; fetch('https://mobile-api.innovate.fresnostate.edu/channel') .then((response)=>response.json()) .then((responseJSON)=>{ res.locals.icube = responseJSON.icube; next(); }) .catch(err=>console.log(err)) }) //prototype form only router.get('/', function(req, res){ res.render('subsForm.ejs', {icube: res.locals.icube}) }) router.get('/add_area', function(req, res){ res.render('addArea.ejs', {icube: res.locals.icube}) }) router.get('/add_subject', function(req, res){ res.render('addSubject.ejs', {icube: res.locals.icube}) }) router.get('/manage_areas', function(req, res){ res.render('manageAreas.ejs', {icube: res.locals.icube}) }) router.get('/manage_subjects', function(req, res){ res.render('manageSubjects.ejs', {icube: res.locals.icube}) }) router.post('/add_area', function(req, res){ process.env.NODE_TLS_REJECT_UNAUTHORIZED = "0"; fetch('https://mobile-api.innovate.fresnostate.edu/area/'+req.body.channelID, { method: 'POST', headers: { 'Content-Type': 'application/json' }, body: JSON.stringify({ "name": req.body.name, "desc": req.body.desc }) }) .then(()=>{ res.redirect('/menu'); }) .catch(err=>console.log(err)) }) router.post('/add_subject', function(req, res){ process.env.NODE_TLS_REJECT_UNAUTHORIZED = "0"; fetch('https://mobile-api.innovate.fresnostate.edu/subject/'+req.body.channelID+'/'+req.body.areaID, { method: 'POST', headers: { 'Content-Type': 'application/json' }, body: JSON.stringify({ "name": req.body.name, "desc": req.body.desc, "opt": { "level": req.body.level, "distribution": req.body.dist } }) }) .then(()=>{ res.redirect('/menu'); }) .catch(err=>console.log(err)) }) router.post('/update_area', function(req, res){ process.env.NODE_TLS_REJECT_UNAUTHORIZED = "0"; fetch('https://mobile-api.innovate.fresnostate.edu/area/'+req.body.channelID+'/'+req.body.areaID, { method: 'PUT', headers: { 'Content-Type': 'application/json' }, body: JSON.stringify({ "name": req.body.name, "desc": req.body.desc }) }) .then(()=>{ res.redirect('/menu'); }) .catch(err=>console.log(err)) }) router.post('/update_subject', function(req, res){ process.env.NODE_TLS_REJECT_UNAUTHORIZED = "0"; fetch('https://mobile-api.innovate.fresnostate.edu/subject/'+req.body.channelID+'/'+req.body.areaID+'/'+req.body.subjectID, { method: 'PUT', headers: { 'Content-Type': 'application/json' }, body: JSON.stringify({ "name": req.body.name, "desc": req.body.desc, "opt": { "level": req.body.level, "distribution": req.body.dist } }) }) .then(()=>{ res.redirect('/menu'); }) .catch(err=>console.log(err)) }) router.post('/delete_area', function(req, res){ process.env.NODE_TLS_REJECT_UNAUTHORIZED = "0"; fetch('https://mobile-api.innovate.fresnostate.edu/area/'+req.body.channelID+'/'+req.body.areaID, { method: 'DELETE', headers: { 'Content-Type': 'application/json' } }) .then(()=>{ res.redirect('/menu'); }) .catch(err=>console.log(err)) }) router.post('/delete_subject', function(req, res){ process.env.NODE_TLS_REJECT_UNAUTHORIZED = "0"; fetch('https://mobile-api.innovate.fresnostate.edu/subject/'+req.body.channelID+'/'+req.body.areaID+'/'+req.body.subjectID, { method: 'DELETE', headers: { 'Content-Type': 'application/json' } }) .then(()=>{ res.redirect('/menu'); }) .catch(err=>console.log(err)) }) module.exports = router
<gh_stars>0 var searchData= [ ['list_2ehpp_0',['list.hpp',['../list_8hpp.html',1,'']]] ];
#!/usr/bin/env bats load helpers function setup() { teardown_running_container_inroot test_box1 "$HELLO_BUNDLE" teardown_running_container_inroot test_box2 "$HELLO_BUNDLE" teardown_running_container_inroot test_box3 "$HELLO_BUNDLE" teardown_busybox setup_busybox } function teardown() { teardown_running_container_inroot test_box1 "$HELLO_BUNDLE" teardown_running_container_inroot test_box2 "$HELLO_BUNDLE" teardown_running_container_inroot test_box3 "$HELLO_BUNDLE" teardown_busybox } @test "list" { # run a few busyboxes detached ROOT=$HELLO_BUNDLE runc run -d --console-socket "$CONSOLE_SOCKET" test_box1 [ "$status" -eq 0 ] ROOT=$HELLO_BUNDLE runc run -d --console-socket "$CONSOLE_SOCKET" test_box2 [ "$status" -eq 0 ] ROOT=$HELLO_BUNDLE runc run -d --console-socket "$CONSOLE_SOCKET" test_box3 [ "$status" -eq 0 ] ROOT=$HELLO_BUNDLE runc list [ "$status" -eq 0 ] [[ ${lines[0]} =~ ID\ +PID\ +STATUS\ +BUNDLE\ +CREATED+ ]] [[ "${lines[1]}" == *"test_box1"*[0-9]*"running"*$BUSYBOX_BUNDLE*[0-9]* ]] [[ "${lines[2]}" == *"test_box2"*[0-9]*"running"*$BUSYBOX_BUNDLE*[0-9]* ]] [[ "${lines[3]}" == *"test_box3"*[0-9]*"running"*$BUSYBOX_BUNDLE*[0-9]* ]] ROOT=$HELLO_BUNDLE runc list -q [ "$status" -eq 0 ] [[ "${lines[0]}" == "test_box1" ]] [[ "${lines[1]}" == "test_box2" ]] [[ "${lines[2]}" == "test_box3" ]] ROOT=$HELLO_BUNDLE runc list --format table [ "$status" -eq 0 ] [[ ${lines[0]} =~ ID\ +PID\ +STATUS\ +BUNDLE\ +CREATED+ ]] [[ "${lines[1]}" == *"test_box1"*[0-9]*"running"*$BUSYBOX_BUNDLE*[0-9]* ]] [[ "${lines[2]}" == *"test_box2"*[0-9]*"running"*$BUSYBOX_BUNDLE*[0-9]* ]] [[ "${lines[3]}" == *"test_box3"*[0-9]*"running"*$BUSYBOX_BUNDLE*[0-9]* ]] ROOT=$HELLO_BUNDLE runc list --format json [ "$status" -eq 0 ] [[ "${lines[0]}" == [\[][\{]"\"ociVersion\""[:]"\""*[0-9][\.]*[0-9][\.]*[0-9]*"\""[,]"\"id\""[:]"\"test_box1\""[,]"\"pid\""[:]*[0-9][,]"\"status\""[:]*"\"running\""[,]"\"bundle\""[:]*$BUSYBOX_BUNDLE*[,]"\"rootfs\""[:]"\""*"\""[,]"\"created\""[:]*[0-9]*[\}]* ]] [[ "${lines[0]}" == *[,][\{]"\"ociVersion\""[:]"\""*[0-9][\.]*[0-9][\.]*[0-9]*"\""[,]"\"id\""[:]"\"test_box2\""[,]"\"pid\""[:]*[0-9][,]"\"status\""[:]*"\"running\""[,]"\"bundle\""[:]*$BUSYBOX_BUNDLE*[,]"\"rootfs\""[:]"\""*"\""[,]"\"created\""[:]*[0-9]*[\}]* ]] [[ "${lines[0]}" == *[,][\{]"\"ociVersion\""[:]"\""*[0-9][\.]*[0-9][\.]*[0-9]*"\""[,]"\"id\""[:]"\"test_box3\""[,]"\"pid\""[:]*[0-9][,]"\"status\""[:]*"\"running\""[,]"\"bundle\""[:]*$BUSYBOX_BUNDLE*[,]"\"rootfs\""[:]"\""*"\""[,]"\"created\""[:]*[0-9]*[\}][\]] ]] }
# views.py from django.shortcuts import render def team(request): team_sisi = 'hello mates!' return render(request, 'team.html', {'team_sisi': team_sisi})
<reponame>ch1huizong/learning weekly_sales = itertools.imap(sum, windows(daily_sales, 7))
#!/bin/bash ver="5.00" DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" # get cur dir of this script progName=$(basename -- "$0") cd $DIR echo "INFO : $progName $ver written by Claude Pageau" # makedailymovie.sh version 2.91 - written by Claude Pageau. # To install/update avconv execute the following command in RPI terminal session # # sudo apt-get install libav-tools # # Mounting a network share to /home/pi/pi-timolo/mnt folder # Note change the IP and sharename path below to suit your network # You can mount the network share by adding # the appropriate entry to the /etc/fstab # example # # sudo nano /etc/fstab # # Add a similar line below to the fstab. # This example mounts an external Hard Drive share # on a RPI running openelec xbmc. # Change the IP address, share name and paths appropriately (exclude the #) # # //192.168.1.154/sharename/Media/Images /home/pi/pi-timolo/mnt cifs username=root,password=openelec,uid=pi,gid=pi, 0 0 # # Add a crontab entry to the root crontab per the below. # make sure makedailymovie.sh is executable eg sudo chmod +x makedailymovie.sh # # sudo crontab -e # # Add similar crontab entry as line below (excluding the #). # This would execute makedailymovie.sh at 10pm every day # # 01 20 * * * /home/pi/pi-timolo/makedailymovie.sh # # For more details see GitHub Wiki here # https://github.com/pageauc/pi-timolo/wiki/Utilities # ------------- Start Script ------------------ # get current working folder that this script was launched from # User Settings for source and destination folders # Note destination folder will be created if it does not exist folder_source=$DIR/motion # location of source jpg images for video folder_destination=$DIR/daily_movies # destination movies folder (will be created if it does not exist) folder_working=$DIR/avconv_tmp error_log_file=$DIR/makedailymovie_error.log delete_source_files=false # Use with EXTREME CAUTION since source image files will be DELETED after encoding # If something goes wrong you may end up with no source images and a bad encode. # delete=true noAction=false (default) Note no spaces between variable and value # Output video path with a unique daily name by date and time. # Video can be specified as avi or mp4 moviename=dailymovie_$(date '+%Y%m%d-%H%M').mp4 # avconv encoding variables for output video fps=10 # Output video frames per second vid_size='1280x720' # Output video size width x height a_ratio=16:9 # Output video aspect ratio clear echo " ====================== SETTINGS ========================================== Movie Name : $moviename Source : $folder_source Destination : $folder_destination Working : $folder_working Delete Source : $delete_source_files ========================================================================== Working ..." if [ ! -e $folder_source/*jpg ] ; then echo "ERROR : No Files Found $folder_source/*jpg" echo " Please Investigate and Try Again" exit 1 fi # Remove old working folder if it exists if [ -d $folder_working ]; then echo "WARN : Removing previous working folder " $folder_working sudo rm -R $folder_working fi # Create a new working folder echo "INFO : Creating Temporary Working Folder " $folder_working mkdir $folder_working cd $folder_working # change to working folder # Create numbered soft links pointing to image files in source folder echo "INFO : Creating soft links for " $folder_source " files in "$folder_working a=0 ls $folder_source/*.jpg | ( # the first line will be the most recent file so ignore it # since it might still be in progress read the_most_recent_file # Skip this file in listing # do something with the rest of the files while read not_the_most_recent_file do new=$(printf "%05d.jpg" ${a}) #05 pad to length of 4 max 99999 images ln -s ${not_the_most_recent_file} ${new} let a=a+1 done ) cd $DIR # Return back to launch folder echo "==========================================================================" echo "INFO : Making Daily Movie ... $moviename" echo "==========================================================================" sudo /usr/bin/avconv -y -f image2 -r $fps -i $folder_working/%5d.jpg -aspect $a_ratio -s $vid_size $DIR/$moviename if [ $? -ne 0 ]; then echo "========================== ERROR =========================================" echo "ERROR : avconv Encoding Failed for " $DIR/$moviename " Please Investigate Problem" echo " Review avconv output for error messages and correct problem" echo "ERROR : avconv Encoding Failed for " $DIR/$moviename >> $error_log_file exit 1 else if [ ! -d $folder_destination ]; then mkdir $folder_destination if [ "$?" -ne 0]; then echo "============================ ERROR +======================================" echo "ERROR : Problem Creating Destination Folder " $folder_destination echo " If destination is a remote folder or mount then check network, destination IP address, permissions, Etc" echo "ERROR : mkdir Failed - " $folder_destination " Could NOT be Created. Please investigate ..." >> $error_log_file exit 1 fi fi echo "Copy Daily Movie to Final Destination " $folder_destination cp $DIR/$moviename $folder_destination/$moviename if [ $? -ne 0 ]; then echo "============================= ERROR +======================================" echo "ERROR : Problem copying " $DIR/$moviename " to " $folder_destination/$moviename echo " If destination is a remote folder or mount then check network, destination IP address, permissions, Etc" echo "ERROR : Copy Failed - " $DIR/$moviename " to " $folder_destination/$moviename " Please investigate ..." >> $error_log_file exit 1 else if [ -e $folder_destination/$moviename ]; then echo "==========================================================================" echo "INFO : Success - Daily Movie Saved to " $folder_destination/$moviename sudo rm $DIR/$moviename echo "INFO : Processing Completed Successfully ..." echo "INFO : Deleting Working Folder " $folder_working sudo rm -R $folder_working if [ $? -ne 0 ]; then echo "ERROR : Could not Delete Working Folder " $folder_working " Please Investigate ..." echo " Check for permissions or other possible problems" echo "============================ ERROR +======================================" exit 1 else if [ "$delete_source_files" = true ] ; then echo "WARN - Deleting Source Files $folder_source/*jpg" sudo rm $folder_source/*jpg fi echo "=========================== SUCCESS ======================================" echo "INFO : Video Saved to $folder_destination/$moviename" fi else echo "============================ ERROR +======================================" echo "ERROR : Problem copying " $DIR/$moviename " to " $folder_destination/$moviename echo " If destination is a remote folder or mount then check network, destination IP address, permissions, Etc" echo "ERROR : Copy Failed - " $DIR/$moviename " to " $folder_destination/$moviename " Please investigate ..." >> $error_log_file exit 1 fi fi fi # ------------------ End Script ----------------------------
#!/usr/bin/env bats load 'test_helper' fixtures 'exist' setup () { touch ${TEST_FIXTURE_ROOT}/dir/file ln -s ${TEST_FIXTURE_ROOT}/dir/file ${TEST_FIXTURE_ROOT}/dir/link } teardown () { rm -f ${TEST_FIXTURE_ROOT}/dir/link ${TEST_FIXTURE_ROOT}/dir/file } # Correctness @test 'assert_link_exists() <file>: returns 0 if <file> Link exists' { local -r file="${TEST_FIXTURE_ROOT}/dir/link" run assert_link_exists "$file" [ "$status" -eq 0 ] [ "${#lines[@]}" -eq 0 ] } @test 'assert_link_exists() <file>: returns 1 and displays path if <file>symbolic link does not exist' { local -r file="${TEST_FIXTURE_ROOT}/dir/ " run assert_link_exists "$file" [ "$status" -eq 1 ] [ "${#lines[@]}" -eq 3 ] [ "${lines[0]}" == '-- symbolic link does not exist --' ] [ "${lines[1]}" == "path : $file" ] [ "${lines[2]}" == '--' ] } # Transforming path @test 'assert_link_exists() <file>: replace prefix of displayed path' { local -r BATSLIB_FILE_PATH_REM="#${TEST_FIXTURE_ROOT}" local -r BATSLIB_FILE_PATH_ADD='..' run assert_link_exists "${TEST_FIXTURE_ROOT}/nodir" [ "$status" -eq 1 ] [ "${#lines[@]}" -eq 3 ] [ "${lines[0]}" == '-- symbolic link does not exist --' ] [ "${lines[1]}" == "path : ../nodir" ] [ "${lines[2]}" == '--' ] } @test 'assert_link_exists() <file>: replace suffix of displayed path' { local -r BATSLIB_FILE_PATH_REM='%file.does_not_exists' local -r BATSLIB_FILE_PATH_ADD='..' run assert_link_exists "${TEST_FIXTURE_ROOT}/nodir" [ "$status" -eq 1 ] [ "${#lines[@]}" -eq 3 ] [ "${lines[0]}" == '-- symbolic link does not exist --' ] [ "${lines[1]}" == "path : ${TEST_FIXTURE_ROOT}/nodir" ] [ "${lines[2]}" == '--' ] } @test 'assert_link_exists() <file>: replace infix of displayed path' { local -r BATSLIB_FILE_PATH_REM='nodir' local -r BATSLIB_FILE_PATH_ADD='..' run assert_link_exists "${TEST_FIXTURE_ROOT}/nodir" [ "$status" -eq 1 ] [ "${#lines[@]}" -eq 3 ] [ "${lines[0]}" == '-- symbolic link does not exist --' ] [ "${lines[1]}" == "path : ${TEST_FIXTURE_ROOT}/.." ] [ "${lines[2]}" == '--' ] }
<reponame>beatrizmd/Aula-29-04 var tabela = document.querySelector("table") // var linhas = document.querySelectorAll("tr") // console.log(linhas[1]) var linhas = tabela.querySelectorAll("tr") for (var i = 1; i < linhas.length; i++) { linhas[i].addEventListener("dblclick",function(event) { event.target.parentNode.remove() }) calcularIMC(linhas[i]) } function calcularIMC(linha) { var tds = linha.querySelectorAll("td") var tdAltura = tds[1] var tdPeso = tds[2] var altura = tdAltura.textContent altura = parseFloat(altura) var peso = parseFloat(tdPeso.textContent) var IMC = peso / (altura * altura) var tdIMC = tds[3] tdIMC.textContent = IMC.toFixed(2) //toFixed(2) = duas casas depois da vírgula if(IMC >= 30) tdIMC.style.color = "red" } function adicionarPessoa() { var campoNome = document.getElementsByName("Nome") [0] var campoAltura = document.getElementsByName("Altura") [0] var campoPeso = document.getElementsByName ("Peso") [0] var pessoa = { nome: campoNome.value, altura: campoAltura.value, peso: campoPeso.value } var novaLinha = document.createElement("tr") var celulaNome = document.createElement("td") var celulaAltura = document.createElement("td") var celulaPeso = document.createElement("td") var celulaIMC = document.createElement("td") novaLinha.appendChild(celulaNome) novaLinha.appendChild(celulaAltura) novaLinha.appendChild(celulaPeso) novaLinha.appendChild(celulaIMC) tabela.appendChild(novaLinha) celulaNome.textContent = pessoa.nome celulaAltura.textContent = pessoa.altura celulaPeso.textContent = pessoa.peso campoNome.value = "" campoAltura.value = "" campoPeso.value = "" calcularIMC(novaLinha) }
package database import ( "context" "log" "os" "github.com/go-pg/pg/v10" ) // DB postgres db client var DB *pg.DB // Connect creates a connection to postgres func Connect() error { opt, err := pg.ParseURL(os.Getenv("DB_URI_HEROKU")) if err != nil { log.Printf("Error at Connect db.go : %v", err) return err } DB = pg.Connect(opt) // Check if database is up and running if err := DB.Ping(context.Background()); err != nil { log.Printf("Error at Connect db.go : %v", err) return err } log.Println("Connected to db") return nil }
def caesar_cipher_encrypt(message, shift): encoded_message = '' for c in message: if c.isalpha(): num = ord(c) num += shift if c.isupper(): if num > ord('Z'): num -= 26 elif num < ord('A'): num += 26 elif c.islower(): if num > ord('z'): num -= 26 elif num < ord('a'): num += 26 encoded_message += chr(num) else: encoded_message += c return encoded_message message = "hello world" encoded_message = caesar_cipher_encrypt(message, 10) print(encoded_message)
import { Graph } from '../../../../../Class/Graph' import { Config } from '../../../../../Class/Unit/Config' import { PO } from '../../../../../interface/PO' import { Primitive } from '../../../../../Primitive' import { GraphSpec } from '../../../../../types' export interface I { spec: GraphSpec } export interface O { pod: PO } export default class Pod extends Primitive<I, O> { _ = ['G'] constructor(config?: Config) { super( { i: ['spec'], o: ['graph'], }, config, { output: { graph: { ref: true, }, }, } ) } onDataInputData(name: string, data: any) { if (name === 'spec') { const spec = data const graph = new Graph(spec, { paused: false }) this._output.graph.push(graph) graph.play() } } onDataInputDrop(name: string) { if (name === 'spec') { this._output.graph.pull() } } }
<reponame>jamacanbacn/splits-io require 'rails_helper' RSpec.describe Api::V4::Races::EntriesController do describe '#show' do let(:user) { FactoryBot.create(:user) } let(:race) { FactoryBot.create(:race) } let(:entry) { FactoryBot.create(:entry, race: race, runner: user, creator: user) } context 'with no authorization header' do subject(:response) { get :show, params: {race_id: race.id, id: entry.id} } it 'returns a 401' do expect(response).to have_http_status(:unauthorized) end end context 'with a valid authorization header' do let(:user) { FactoryBot.create(:user) } let(:token) { FactoryBot.create(:access_token, resource_owner_id: user.id) } before { request.headers['Authorization'] = "Bearer #{token.token}" } context 'with no race found' do subject(:response) { get :show, params: {race_id: '!@#$%', id: entry.id} } it 'returns a 404' do expect(response).to have_http_status(:not_found) end end context 'with an entry present' do subject(:response) { get :show, params: {race_id: entry.race.id, id: entry.id} } it 'returns a 200' do expect(response).to have_http_status(:ok) end it 'renders an entry schema' do expect(response.body).to match_json_schema(:entry) end end end end describe '#create' do let(:race) { FactoryBot.create(:race) } context 'with no authorization header' do subject(:response) { put :create, params: {race_id: race.id} } it 'returns a 401' do expect(response).to have_http_status(:unauthorized) end end context 'with a valid authorization header' do let(:user) { FactoryBot.create(:user) } let(:token) { FactoryBot.create(:access_token, resource_owner_id: user.id) } subject(:response) { put :create, params: {race_id: race.id} } before { request.headers['Authorization'] = "Bearer #{token.token}" } context 'with no race found' do subject(:response) { put :create, params: {race_id: '!@#$%'} } it 'returns a 404' do expect(response).to have_http_status(:not_found) end end context 'as a ghost' do subject(:response) { put :create, params: {race_id: '!@#$%', entry: {run_id: FactoryBot.create(:run).id}} } it 'returns a 404' do expect(response).to have_http_status(:not_found) end end context 'with the user in another race' do let(:secondary_race) { FactoryBot.create(:race) } let(:entry) { FactoryBot.create(:entry, runner: user, creator: user, race: secondary_race) } it 'returns a 400' do entry # Touch entry so that it exists expect(response).to have_http_status(:bad_request) end end context 'with the user available to join a race' do context 'with a secret race' do let(:race) { FactoryBot.create(:race, visibility: :secret) } context 'with no join token' do it 'returns a 403' do expect(response).to have_http_status(:forbidden) end end context 'with an invalid join token' do subject(:response) { put :create, params: {race_id: race.id, join_token: '!@#$%'} } it 'returns a 403' do expect(response).to have_http_status(:forbidden) end end context 'with a valid join token' do subject(:response) { put :create, params: {race_id: race.id, join_token: race.join_token} } it 'returns a 201' do expect(response).to have_http_status(:created) end it 'renders an entry schema' do expect(response.body).to match_json_schema(:entry) end end end context 'with a public race' do it 'returns a 201' do expect(response).to have_http_status(:created) end it 'renders an entry schema' do expect(response.body).to match_json_schema(:entry) end end end end end describe '#update' do let(:race) { FactoryBot.create(:race) } let(:user) { FactoryBot.create(:user) } let(:entry) { FactoryBot.create(:entry, race: race, runner: user, creator: user) } context 'with no authorization header' do subject(:response) { patch :update, params: {race_id: race.id, id: entry.id, entry: {readied_at: Time.now.utc}} } it 'returns a 403' do expect(response).to have_http_status(:unauthorized) end end context 'with a valid authorization header' do let(:token) { FactoryBot.create(:access_token, resource_owner_id: user.id) } subject(:response) { patch :update, params: {race_id: race.id, id: entry.id} } before { request.headers['Authorization'] = "Bearer #{token.token}" } context 'with no race found' do subject(:response) { patch :update, params: {race_id: '!@#$', id: entry.id} } it 'returns a 404' do expect(response).to have_http_status(:not_found) end end context 'with no entry found' do subject(:response) { patch :update, params: {race_id: race.id, id: 'beepy beeperson'} } it 'returns a 404' do expect(response).to have_http_status(:not_found) end end context 'with an entry present' do before { entry } context 'with no parameters' do subject(:response) { patch :update, params: {race_id: race.id, id: entry.id} } it 'returns a 200' do expect(response).to have_http_status(:ok) end end context 'with 1 parameter to update' do let(:time) { Time.now.utc } subject(:response) do patch :update, params: {race_id: race.id, id: entry.id, entry: {readied_at: time.iso8601(3)}, format: :json} end it 'returns a 200' do expect(response).to have_http_status(:ok) end it 'renders an entry schema' do expect(response.body).to match_json_schema(:entry) end it 'matches the given time' do expect(JSON.parse(response.body)['entry']['readied_at']).to eq(time.iso8601(3)) end end context 'with a readied_at of "now"' do let(:time) { Time.now.utc } before { allow(Time).to receive(:now).and_return(time) } subject(:response) do patch :update, params: {race_id: race.id, id: entry.id, entry: {readied_at: 'now'}, format: :json} end it 'returns a 200' do expect(response).to have_http_status(:ok) end it 'renders an entry schema' do expect(response.body).to match_json_schema(:entry) end it 'matches the given time' do expect(JSON.parse(response.body)['entry']['readied_at']).to eq(time.iso8601(3)) end end context 'who unreadies' do context 'before the race starts' do subject(:response) do patch :update, params: { race_id: race.id, id: entry.id, entry: {readied_at: nil}, format: :json } end before { entry.update(readied_at: Time.now.utc) } it 'returns a 200' do expect(response).to have_http_status(:ok) end it 'renders an entry schema' do expect(response.body).to match_json_schema(:entry) end it 'has a null readied_at' do expect(JSON.parse(response.body)['entry']['readied_at']).to eq(nil) end end context 'after the race starts' do before do entry.update(readied_at: Time.now.utc - 10.minutes) race.update(started_at: Time.now.utc) end subject(:response) do patch :update, params: { race_id: race.id, id: entry.id, entry: {readied_at: nil}, format: :json } end it 'returns a 400' do expect(response).to have_http_status(:bad_request) end end end context 'setting both forfeited and finished' do let(:time) { Time.now.utc } subject(:response) do patch :update, params: { race_id: race.id, id: entry.id, entry: {forfeited_at: time.iso8601(3), finished_at: time.iso8601(3)}, format: :json } end it 'returns a 400' do expect(response).to have_http_status(:bad_request) end end context 'setting run_id' do let(:run) { FactoryBot.create(:run, :parsed, user: user) } subject(:response) do patch :update, params: { race_id: race.id, id: entry.id, entry: {run_id: run.id36}, format: :json } end it 'returns a 200' do expect(response).to have_http_status(:ok) end it 'renders an entry schema' do expect(response.body).to match_json_schema(:entry) end it 'sets the correct run' do expect(JSON.parse(response.body)['entry']['run']['id']).to eq(run.id36) end end end end end describe '#destroy' do let(:race) { FactoryBot.create(:race) } let(:user) { FactoryBot.create(:user) } let(:entry) { FactoryBot.create(:entry, race: race, runner: user, creator: user) } context 'with no authorization header' do subject(:response) { delete :destroy, params: {race_id: race.id, id: entry.id} } it 'returns a 401' do expect(response).to have_http_status(:unauthorized) end end context 'with a valid authorization header' do let(:token) { FactoryBot.create(:access_token, resource_owner_id: user.id) } subject(:response) { delete :destroy, params: {race_id: race.id, id: entry.id} } before { request.headers['Authorization'] = "Bearer #{token.token}" } context 'with no race found' do subject(:response) { delete :destroy, params: {race_id: 'beep', id: entry.id} } it 'returns a 404' do expect(response).to have_http_status(:not_found) end end context 'with no entry' do before { entry.destroy } it 'returns a 404' do expect(response).to have_http_status(:not_found) end end context 'with an entry present' do before { entry } context 'before the race starts' do it 'returns a 200' do expect(response).to have_http_status(:ok) end end context 'after the race starts' do before { race.update(started_at: Time.now.utc) } it 'returns a 409' do expect(response).to have_http_status(409) end end end end end end
import React from 'react'; import { Modal, Card, Descriptions, message } from 'antd'; import { ShopTableListItem } from '../../data'; interface ClearPointModalProps { visible: boolean; onCancel: () => void; onOk: (id: string, val: number) => void; confirmLoading: boolean; shopData: ShopTableListItem | null; } const ClearPointModal: React.FC<ClearPointModalProps> = (props) => { const { visible, onCancel, onOk, confirmLoading, shopData } = props; if (!shopData) return null; return ( <Modal destroyOnClose title="清空积分提示" onCancel={onCancel} visible={visible} confirmLoading={confirmLoading} onOk={() => { if (shopData.integral === 0) { message.info('此商铺积分已清空'); return; } onOk(shopData._id, -shopData.integral); }} okText="确认清空" cancelButtonProps={{ style: { display: 'none' } }} > <Card bordered={false}> <Descriptions title="是否清空该商铺积分" column={{ sm: 2, xs: 1 }}> <Descriptions.Item label="商铺名称">{shopData.name}</Descriptions.Item> {/* <Descriptions.Item label="商铺老板电话">18888888888</Descriptions.Item> */} <Descriptions.Item label="当前积分">{shopData.integral}</Descriptions.Item> </Descriptions> </Card> </Modal> ); }; export default ClearPointModal;
import { parseText } from 'markup' import { cleanText, specialCharacters } from 'utils/text' import { parseByline } from './byline.js' const childTypes = [ // child types in story redux state 'images', 'videos', 'pullquotes', 'asides', 'inline_html_blocks', ] const clean = R.when( R.is(String), R.pipe( cleanText, specialCharacters, ), ) // :: {story} -> [{storychild}] export const getChildren = R.pipe( R.pick(childTypes), R.mapObjIndexed((val, key, obj) => R.map(R.assoc('type', R.replace(/s$/, '', key)), val), ), R.values, R.reduce(R.concat, []), ) // :: {story} -> ['placement'] export const getPlaces = R.pipe( getChildren, R.pluck('placement'), R.uniq, ) // :: linkNode -> {story} -> {inline_html_link} export const getLink = ({ name }) => R.pipe( R.propOr([], 'links'), R.find(R.propEq('name', name)), ) // :: {place} -> {story} -> [{storychild}] export const getPlaceChildren = ({ name }) => R.pipe( getChildren, R.filter(R.propEq('placement', name)), ) const placeChildren = (walk, node, story) => R.pipe( getPlaceChildren(node), R.sortBy(R.prop('ordering')), R.map( R.when(R.prop('bodytext_markup'), child => ({ ...child, children: R.pipe( R.prop('bodytext_markup'), R.replace(/@fakta:/gi, '@faktatit:'), R.replace(/@sitat:/gi, ''), parseText, walk, )(child), })), ), )(story) // :: {story} -> {...story, nodeTree} export const buildNodeTree = story => { let { title, kicker, lede, theme_word, bylines = [] } = story const new_bylines = [] const walk = R.compose( R.map(clean), R.reject(R.isNil), R.map(parseNode => { if (R.is(String, parseNode)) return parseNode let { type, children, match, ...props } = parseNode if (children) props.children = walk(children) switch (type) { case 'place': props.children = placeChildren(walk, parseNode, story) break case 'link': props.link = getLink(props)(story) break case 'blockTag': switch (props.tag) { case 'facts': props.type = 'place' props.children = [{ type: 'aside', children }] break case 'sitat': props.type = 'place' props.children = [{ type: 'pullquote', children }] break case 'bl': new_bylines.push(parseByline(children[0])) return null case 'tit': if (!title) { title = match[2] return null } break case 'tema': theme_word = match[2] return null case 'ing': lede += match[2] return null case 'stikktit': kicker = match[2] return null } break } return { type, ...props } }), ) const parseTree = R.pipe( R.replace(/^@sit:/gm, '@sitat:'), parseText, )(story.bodytext_markup) const nodeTree = walk(parseTree) return { ...story, title, kicker, lede, theme_word, bylines: R.concat(bylines, new_bylines), parseTree, nodeTree, } }
import numpy as np data = np.random.randn(100,5) labels = np.random.choice([0,1],100)
################################################################################ # Extension library for ../build-rpm-specs.sh # # Preconditions: # * Declared before calling this code: confFileSettings, _globalSettingsRules # # Copyright 2001, 2018 William W. Kimball, Jr. MBA MSIS ################################################################################ # Extension libraries must not be directly executed if [ -z "${BASH_SOURCE[1]}" ]; then echo "ERROR: You may not call $0 directly." >&2 exit 1 fi # Import helper functions if ! source "${_myFuncDir}"/parse-config-file.sh; then errorOut 3 "Unable to import the config file parser." fi # The configuration source may be a file or directory. When it is a directory, # attempt to source every file within it in alphabetical order. configSource="${_globalSettings[GLOBAL_CONFIG_SOURCE]}" logDebug "Loading global configuration from source, ${configSource}" hasConfigError=false if [ -d "$configSource" ]; then while IFS= read -r -d '' configFile; do if ! parseConfigFile "$configFile" confFileSettings _globalSettingsRules; then logError "Unable to read from configuration file, ${configFile}." hasConfigError=true fi done < <(find "$configSource" -maxdepth 1 -type f -iname '*.conf' -print0) elif [ -e "$configSource" ]; then if ! parseConfigFile "$configSource" confFileSettings _globalSettingsRules; then logError "Unable to read from configuration file, ${configSource}." hasConfigError=true fi elif ${_globalSettings[USER_SET_GLOBAL_CONFIG_SOURCE]}; then # User-specified settings file does not exist logWarning "No settings file found at ${configSource}." fi # Abort on error if $hasConfigError; then exit 3 fi # Cleanup unset configSource configFile hasConfigError
# platform = Red Hat Virtualization 4,multi_platform_ol,multi_platform_rhel,multi_platform_sle find /root -xdev -type f -name ".rhosts" -exec rm -f {} \; find /home -maxdepth 2 -xdev -type f -name ".rhosts" -exec rm -f {} \; rm -f /etc/hosts.equiv
#!/bin/sh # Copyright (c) 2015, Plume Design Inc. All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # 3. Neither the name of the Plume Design Inc. nor the # names of its contributors may be used to endorse or promote products # derived from this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND # ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL Plume Design Inc. BE LIABLE FOR ANY # DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND # ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # FUT environment loading # shellcheck disable=SC1091 source /tmp/fut-base/shell/config/default_shell.sh [ -e "/tmp/fut-base/fut_set_env.sh" ] && source /tmp/fut-base/fut_set_env.sh source "${FUT_TOPDIR}/shell/lib/cm2_lib.sh" [ -e "${PLATFORM_OVERRIDE_FILE}" ] && source "${PLATFORM_OVERRIDE_FILE}" || raise "${PLATFORM_OVERRIDE_FILE}" -ofm [ -e "${MODEL_OVERRIDE_FILE}" ] && source "${MODEL_OVERRIDE_FILE}" || raise "${MODEL_OVERRIDE_FILE}" -ofm tc_name="cm2/$(basename "$0")" cm_setup_file="cm2/cm2_setup.sh" sleep_time_after_if_down=5 if_down_up_process_bits="05 35" if_default="eth0" usage() { cat << usage_string ${tc_name} [-h] arguments Description: - Script observes AW_Bluetooth_Config table field 'payload' during drop/up of link interface. If AW_Bluetooth_Config payload field fails to change in given sequence (${if_down_up_process_bits}), test fails Arguments: -h : show this help message \$1 (if_name) : <CONNECTION-INTERFACE> : (string)(optional) : (default:${if_default}) Testcase procedure: - On DEVICE: Run: ${cm_setup_file} (see ${cm_setup_file} -h) Run: ${tc_name} <WAN-IF-NAME> Script usage example: ./${tc_name} ${if_default} usage_string } if [ -n "${1}" ]; then case "${1}" in help | \ --help | \ -h) usage && exit 1 ;; *) ;; esac fi check_kconfig_option "CONFIG_MANAGER_BLEM" "y" || raise "CONFIG_MANAGER_BLEM != y - BLE not present on device" -l "${tc_name}" -s check_kconfig_option "TARGET_CAP_EXTENDER" "y" || raise "TARGET_CAP_EXTENDER != y - Testcase applicable only for EXTENDER-s" -l "${tc_name}" -s NARGS=1 [ $# -lt ${NARGS} ] && usage && raise "Requires at least '${NARGS}' input argument(s)" -l "${tc_name}" -arg if_name=${1:-${if_default}} trap ' fut_info_dump_line print_tables AW_Bluetooth_Config fut_info_dump_line ifconfig $if_name up || true check_restore_management_access || true run_setup_if_crashed cm || true ' EXIT SIGINT SIGTERM log_title "$tc_name: CM2 test - Observe BLE Status - Interface '${if_name}' down/up" print_tables Manager is_connected=$(${OVSH} s Manager is_connected -r) if [ $is_connected = 'true' ]; then log "$tc_name: Manager::is_connected indicates connection to Cloud is established" else log "$tc_name: Manager::is_connected indicates connection to Cloud is not established" fi log "$tc_name: Simulating CM full reconnection by dropping interface" log "$tc_name: Dropping interface $if_name" ifconfig "$if_name" down && log "$tc_name: Interface $if_name is down - Success" || raise "FAIL: Could not bring down interface $if_name" -l "$tc_name" -ds log "$tc_name: Sleeping for 5 seconds" sleep "${sleep_time_after_if_down}" down_bits=01 wait_ovsdb_entry AW_Bluetooth_Config -is payload "$down_bits:00:00:00:00:00" && log "$tc_name: wait_ovsdb_entry - AW_Bluetooth_Config::payload changed to $down_bits:00:00:00:00:00" || raise "FAIL: AW_Bluetooth_Config::payload failed to change to $down_bits:00:00:00:00:00" -l "$tc_name" -tc log "$tc_name: Bringing back interface $if_name" ifconfig "$if_name" up && log "$tc_name: Interface $if_name is up - Success" || raise "FAIL: Could not bring up interface $if_name" -l "$tc_name" -ds for bits in $if_down_up_process_bits; do log "$tc_name: Checking AW_Bluetooth_Config::payload for $bits:00:00:00:00:00" wait_ovsdb_entry AW_Bluetooth_Config -is payload "$bits:00:00:00:00:00" && log "$tc_name: wait_ovsdb_entry - AW_Bluetooth_Config::payload changed to $bits:00:00:00:00:00 - Success" || raise "FAIL: AW_Bluetooth_Config::payload failed to change to $bits:00:00:00:00:00" -l "$tc_name" -tc done pass
package git import ( "bytes" "fmt" "io" "os/exec" "strconv" "strings" "time" ) const Z40 = "0000000000000000000000000000000000000000" func RevList(dir, old, new string) ([]string, error) { cmd := exec.Command("git", "rev-list", fmt.Sprintf("%s..%s", old, new)) cmd.Dir = dir cmd.Stdout = &bytes.Buffer{} if err := cmd.Run(); err != nil { return nil, err } var revisions []string for { if revision, err := cmd.Stdout.(*bytes.Buffer).ReadString('\n'); err == nil { revisions = append(revisions, strings.TrimSpace(revision)) } else { if err == io.EOF { break } return nil, err } } return revisions, nil } func GetCommit(dir, revision string) (Commit, error) { cmd := exec.Command("git", "cat-file", "commit", revision) cmd.Dir = dir cmd.Stdout = &bytes.Buffer{} cmd.Stderr = &bytes.Buffer{} if err := cmd.Run(); err != nil { return Commit{}, fmt.Errorf(cmd.Stderr.(*bytes.Buffer).String()) } commit := Commit{ ID: revision, } for { line, err := cmd.Stdout.(*bytes.Buffer).ReadString(byte('\n')) if err != nil { break } fields := strings.Fields(line) switch true { case len(fields) == 0: commit.Message = cmd.Stdout.(*bytes.Buffer).String() break default: switch fields[0] { case "author", "tagger": commit.Author = committer(fields[1:]) if timestamp, err := strconv.ParseInt(fields[len(fields)-2], 10, 64); err == nil { commit.CommittedAt = time.Unix(timestamp, 0) } case "committer": commit.Committer = committer(fields[1:]) } } } return commit, nil } func GetLastCommit(dir string) (Commit, error) { cmd := exec.Command("git", "rev-parse", "HEAD") cmd.Dir = dir cmd.Stdout = &bytes.Buffer{} cmd.Stderr = &bytes.Buffer{} if err := cmd.Run(); err != nil { return Commit{}, fmt.Errorf(cmd.Stderr.(*bytes.Buffer).String()) } revision := strings.TrimSpace(cmd.Stdout.(*bytes.Buffer).String()) return GetCommit(dir, revision) } func GetCurrentRef(dir string) (string, error) { cmd := exec.Command("git", "symbolic-ref", "HEAD") cmd.Dir = dir cmd.Stdout = &bytes.Buffer{} if err := cmd.Run(); err != nil { return "", err } return strings.TrimSpace(cmd.Stdout.(*bytes.Buffer).String()), nil } func committer(in []string) Committer { return Committer{ Name: strings.Join(in[:len(in)-3], " "), Email: strings.TrimFunc(in[len(in)-3], func(r rune) bool { return r == '<' || r == '>' }), } }
<filename>ProjetOeuvreSpingBoot/src/main/java/com/epul/oeuvre/persistence/repositories/RepositoryEntityReservation.java package com.epul.oeuvre.persistence.repositories; import com.epul.oeuvre.domains.*; import org.springframework.data.jpa.repository.JpaRepository; import org.springframework.data.jpa.repository.Modifying; import org.springframework.data.jpa.repository.Query; import org.springframework.data.repository.query.Param; import org.springframework.stereotype.Repository; import org.springframework.transaction.annotation.Transactional; import java.sql.Date; import java.util.List; @Repository public interface RepositoryEntityReservation extends JpaRepository<EntityReservation, EntityReservationPK> { //obligatoire, je n'ai pas réussi avec save, avec une custom query, aucun soucis @Modifying @Query(value = "insert into Reservation values (:idOeuvre, :idAdherent, :date, :statut, :idProprietaire)", nativeQuery = true) @Transactional void addReservation(@Param("idOeuvre") Integer idOeuvre, @Param("idAdherent") Integer idAdherent, @Param("date") Date date, @Param("statut") String statut, @Param("idProprietaire") Integer idProprietaire); @Modifying @Query(value = "update reservation r set r.statut = :statut where r.id_adherent = :idAdherent and r.id_oeuvrevente = :idOeuvre ", nativeQuery = true) @Transactional void updateStatutReservation(@Param("idOeuvre") Integer idOeuvre, @Param("idAdherent") Integer idAdherent ,@Param("statut") String statut); void deleteByAdherentByIdAdherentAndOeuvreventeByIdOeuvrevente(EntityAdherent adherent, EntityOeuvrevente oeuvrevente); List<EntityReservation> findAllByAdherentByIdAdherent(EntityAdherent adherent); List<EntityReservation> findAllByProprietaireByIdProprietaire(EntityProprietaire proprietaire); EntityReservation findByAdherentByIdAdherentAndOeuvreventeByIdOeuvrevente(EntityAdherent adherent, EntityOeuvrevente oeuvrevente); }
echo "@@@ LNT VirtualEnv @@@" /usr/local/bin/virtualenv --python=python3 venv set +u . venv/bin/activate set -u echo "@@@@@@@"
package fr.syncrase.ecosyst.service.criteria; import java.io.Serializable; import java.util.Objects; import tech.jhipster.service.Criteria; import tech.jhipster.service.filter.BooleanFilter; import tech.jhipster.service.filter.DoubleFilter; import tech.jhipster.service.filter.Filter; import tech.jhipster.service.filter.FloatFilter; import tech.jhipster.service.filter.IntegerFilter; import tech.jhipster.service.filter.LongFilter; import tech.jhipster.service.filter.StringFilter; /** * Criteria class for the {@link fr.syncrase.ecosyst.domain.Feuillage} entity. This class is used * in {@link fr.syncrase.ecosyst.web.rest.FeuillageResource} to receive all the possible filtering options from * the Http GET request parameters. * For example the following could be a valid request: * {@code /feuillages?id.greaterThan=5&attr1.contains=something&attr2.specified=false} * As Spring is unable to properly convert the types, unless specific {@link Filter} class are used, we need to use * fix type specific filters. */ public class FeuillageCriteria implements Serializable, Criteria { private static final long serialVersionUID = 1L; private LongFilter id; private StringFilter type; private Boolean distinct; public FeuillageCriteria() {} public FeuillageCriteria(FeuillageCriteria other) { this.id = other.id == null ? null : other.id.copy(); this.type = other.type == null ? null : other.type.copy(); this.distinct = other.distinct; } @Override public FeuillageCriteria copy() { return new FeuillageCriteria(this); } public LongFilter getId() { return id; } public LongFilter id() { if (id == null) { id = new LongFilter(); } return id; } public void setId(LongFilter id) { this.id = id; } public StringFilter getType() { return type; } public StringFilter type() { if (type == null) { type = new StringFilter(); } return type; } public void setType(StringFilter type) { this.type = type; } public Boolean getDistinct() { return distinct; } public void setDistinct(Boolean distinct) { this.distinct = distinct; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } final FeuillageCriteria that = (FeuillageCriteria) o; return Objects.equals(id, that.id) && Objects.equals(type, that.type) && Objects.equals(distinct, that.distinct); } @Override public int hashCode() { return Objects.hash(id, type, distinct); } // prettier-ignore @Override public String toString() { return "FeuillageCriteria{" + (id != null ? "id=" + id + ", " : "") + (type != null ? "type=" + type + ", " : "") + (distinct != null ? "distinct=" + distinct + ", " : "") + "}"; } }
#!/bin/bash scriptdir=`dirname "$BASH_SOURCE"` if [[ -z "$SQLITE_NET_YEAR" ]]; then SQLITE_NET_YEAR=2013 fi pushd "$scriptdir/.." mono Externals/Eagle/bin/netFramework40/EagleShell.exe -preInitialize "set root_path {$scriptdir/..}; set test_configuration Debug; set test_year {$SQLITE_NET_YEAR}; set build_directory {bin/$SQLITE_NET_YEAR/Debug$SQLITE_NET_CONFIGURATION_SUFFIX/bin}" -file Tests/all.eagle "$@" popd
List<String> list = Arrays.asList("Apple", "Mango", "Banana"); for (String str : list) { System.out.println("Length: " + str.length()); }
package io.opensphere.mantle.data.impl; import java.util.List; import java.util.concurrent.CopyOnWriteArrayList; import io.opensphere.mantle.data.ColumnTypeDetector; import io.opensphere.mantle.data.MetaDataInfo; import io.opensphere.mantle.data.SpecialColumnDetector; import io.opensphere.mantle.data.SpecialKey; /** * Detects column types known to mantle. These detections should be fairly * generic since they could apply to any layer. */ public class ColumnTypeDetectorImpl implements ColumnTypeDetector { /** The individual detectors. */ private final List<SpecialColumnDetector> myDetectors = new CopyOnWriteArrayList<>(); /** * Constructs a new column type detector. */ public ColumnTypeDetectorImpl() { // intentionally blank } /** * Copy constructor. * * @param source the source object from which to copy data */ protected ColumnTypeDetectorImpl(ColumnTypeDetectorImpl source) { source.myDetectors.forEach(myDetectors::add); } /** * Adds the supplied detector instance to the set of available detectors. * * @param detector the detector to add. */ @Override public final void addSpecialColumnDetector(SpecialColumnDetector detector) { myDetectors.add(detector); } /** * Detects column types for the meta data. * * @param metaData the meta data */ @Override public void detectColumnTypes(MetaDataInfo metaData) { boolean wasDetected = false; for (String columnName : metaData.getKeyNames()) { wasDetected |= examineColumn(metaData, columnName); } if (wasDetected && metaData instanceof DefaultMetaDataInfo) { ((DefaultMetaDataInfo)metaData).copyKeysToOriginalKeys(); } metaData.setSpecialKeyExaminationRequired(false); } /** * Tests to determine if the supplied column is a special key, and if so, * marks it as such in the supplied metadata object. * * @param metaData the object in which to mark the key's status. * @param columnName the column to examine. * @return true if the column is a special key, false otherwise. */ @Override public boolean examineColumn(MetaDataInfo metaData, String columnName) { boolean wasDetected = false; for (SpecialColumnDetector detector : myDetectors) { // it's important not to terminate early, as we want all detectors // to have a shot at the column. wasDetected |= detector.markSpecialColumn(metaData, columnName); } return wasDetected; } @Override public SpecialKey detectColumn(String columnName) { SpecialKey specialKey = null; for (SpecialColumnDetector detector : myDetectors) { SpecialKey key = detector.detectColumn(columnName); if (key != null) { specialKey = key; break; } } return specialKey; } /** * {@inheritDoc} * * @see io.opensphere.mantle.data.ColumnTypeDetector#createCopy() */ @Override public ColumnTypeDetector createCopy() { return new ColumnTypeDetectorImpl(this); } }
<filename>reporter/logger_test.go package reporter import ( "bytes" "log" "testing" "context" "github.com/pkg/errors" "github.com/remind101/pkg/logger" ) func TestLogReporter(t *testing.T) { tests := []struct { err error out string }{ {errBoom, "request_id=1234 status=error error=\"boom\" line=0 file=unknown\n"}, {errors.WithStack(errBoom), "request_id=1234 status=error error=\"boom\" line=20 file=logger_test.go\n"}, } for i, tt := range tests { b := new(bytes.Buffer) l := logger.New(log.New(b, "request_id=1234 ", 0), logger.INFO) h := &LogReporter{} ctx := logger.WithLogger(context.Background(), l) if err := h.ReportWithLevel(ctx, "error", tt.err); err != nil { t.Fatal(err) } if got, want := b.String(), tt.out; got != want { t.Fatalf("#%d: Output => %s; want %s", i, got, want) } } }
const mongoose = require("mongoose"); const Event = require("../models/event"); const { check, validationResult } = require("express-validator"); const mongodb = require("mongodb"); const ObjectId = mongodb.ObjectID; module.exports = { showEditEvent: showEditEvent, showAllEvents: showAllEvents, // postEvent: postEvent, // getEvent: getEvent, // updateEvent: updateEvent, // deleteEvent: deleteEvent, // deleteAllEvents: deleteAllEvents, }; function showEditEvent(request, response) { Event.findOne({ id: request.params.id }, (error, event) => { response.render("pages/edit", { event: event, errors: error, }); }); } function showAllEvents(request, response) { console.log("here"); Event.find({}, (error, events) => { if (error) { console.log(error); } }) .then((data) => { response.render("pages/allEvents", { events: data, }); }) .catch(next); }
#! /usr/bin/env bash set -e TOP_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd -P)" source "${TOP_DIR}/scripts/apollo.bashrc" source "${TOP_DIR}/scripts/apollo_base.sh" BAZEL_OUT="${TOP_DIR}/bazel-out" # $(bazel info output_path) COVERAGE_HTML="${TOP_DIR}/.cache/coverage" COVERAGE_DAT="${BAZEL_OUT}/_coverage/_coverage_report.dat" # Note(storypku): branch coverage seems not work when running bazel coverage # GENHTML_OPTIONS="--rc genhtml_branch_coverage=1 --highlight --legend" ##============= Perception ===================## PERCEPTION_EXCEPTIONS="\ except //modules/perception/lidar/lib/detection/lidar_point_pillars:point_pillars_test \ " ##============= Localization ===================## LOCALIZATION_EXCEPTIONS="\ except //modules/localization/ndt/ndt_locator:ndt_lidar_locator_test \ except //modules/localization/msf/local_pyramid_map/pyramid_map:pyramid_map_test \ except //modules/localization/msf/local_pyramid_map/pyramid_map:pyramid_map_pool_test \ except //modules/localization/ndt/ndt_locator:ndt_solver_test \ except //modules/localization/msf:msf_localization_test \ " ##======================= Failed Test Cases are Listed Above ================## ARCH="$(uname -m)" : ${USE_ESD_CAN:=false} CMDLINE_OPTIONS= SHORTHAND_TARGETS= DISABLED_TARGETS= function _disabled_test_targets_all() { local disabled="${LOCALIZATION_EXCEPTIONS}" disabled="${disabled} ${PERCEPTION_EXCEPTIONS}" if ! ${USE_ESD_CAN} ; then warning "ESD CAN library supplied by ESD Electronics doesn't exist." warning "If you need ESD CAN, please refer to:" warning " third_party/can_card_library/esd_can/README.md" disabled="${disabled} except //modules/drivers/canbus/can_client/esd/..." fi # TODO(all): arch exceptions should be done in BUILD file level. if [[ "${ARCH}" != "x86_64" ]]; then disabled="${disabled} except //modules/localization/msf/..." fi echo "${disabled}" } # bazel run //modules/planning/tools:inference_demo crash function determine_disabled_targets() { if [[ "$#" -eq 0 ]]; then _disabled_test_targets_all return fi local disabled= for compo in $@ ; do if [[ "${compo}" == "drivers" ]]; then if ! ${USE_ESD_CAN} ; then warning "ESD CAN library supplied by ESD Electronics doesn't exist." warning "If you need ESD CAN, please refer to:" warning " third_party/can_card_library/esd_can/README.md" disabled="${disabled} except //modules/drivers/canbus/can_client/esd/..." fi elif [[ "${compo}" == "localization" ]]; then if [[ "${ARCH}" != "x86_64" ]]; then disabled="${disabled} except //modules/localization/msf/..." fi disabled="${disabled} ${LOCALIZATION_EXCEPTIONS}" fi done echo "${disabled}" } function determine_test_targets() { local targets_all if [[ "$#" -eq 0 ]]; then targets_all="//modules/... union //cyber/..." echo "${targets_all}" return fi for compo in $@ ; do local test_targets if [[ "${compo}" == "cyber" ]]; then test_targets="//cyber/..." elif [[ -d "${APOLLO_ROOT_DIR}/modules/${compo}" ]]; then test_targets="//modules/${compo}/..." else error "Oops, no such component '${compo}' under <APOLLO_ROOT_DIR>/modules/ . Exiting ..." exit 1 fi if [ -z "${targets_all}" ]; then targets_all="${test_targets}" else targets_all="${targets_all} union ${test_targets}" fi done echo "${targets_all}" | sed -e 's/^[[:space:]]*//' } function _parse_cmdline_arguments() { local known_options="" local remained_args="" for ((pos=1; pos <= $#; pos++)); do #do echo "$#" "$i" "${!i}"; done local opt="${!pos}" local optarg case "${opt}" in --config=*) optarg="${opt#*=}" known_options="${known_options} ${opt}" ;; --config) ((++pos)) optarg="${!pos}" known_options="${known_options} ${opt} ${optarg}" ;; -c) ((++pos)) optarg="${!pos}" known_options="${known_options} ${opt} ${optarg}" ;; *) remained_args="${remained_args} ${opt}" ;; esac done # Strip leading whitespaces known_options="$(echo "${known_options}" | sed -e 's/^[[:space:]]*//')" remained_args="$(echo "${remained_args}" | sed -e 's/^[[:space:]]*//')" CMDLINE_OPTIONS="${known_options}" SHORTHAND_TARGETS="${remained_args}" } function _run_bazel_coverage_impl() { local count="$(( $(nproc) / 2 ))" bazel coverage --jobs=${count} $@ } function bazel_coverage() { if ! "${APOLLO_IN_DOCKER}" ; then error "Coverage test must be run from within the docker container" exit 1 fi _parse_cmdline_arguments "$@" CMDLINE_OPTIONS="${CMDLINE_OPTIONS} --define USE_ESD_CAN=${USE_ESD_CAN}" local test_targets test_targets="$(determine_test_targets ${SHORTHAND_TARGETS})" local disabled_targets disabled_targets="$(determine_disabled_targets ${SHORTHAND_TARGETS})" info "Coverage Overview: " info "${TAB}Coverage Options: ${GREEN}${CMDLINE_OPTIONS}${NO_COLOR}" info "${TAB}Coverage Targets: ${GREEN}${test_targets}${NO_COLOR}" info "${TAB}Disabled: ${YELLOW}${disabled_targets}${NO_COLOR}" _run_bazel_coverage_impl "${CMDLINE_OPTIONS}" "$(bazel query ${test_targets} ${disabled_targets})" } function main() { if [ "${USE_GPU}" -eq 1 ]; then info "Your GPU is enabled to run coverage test on ${ARCH} platform." else info "Running coverage test under CPU mode on ${ARCH} platform." fi bazel_coverage $@ genhtml "${COVERAGE_DAT}" --output-directory "${COVERAGE_HTML}" success "Done bazel coverage ${SHORTHAND_TARGETS}. " info "Coverage report was generated under ${COVERAGE_HTML}" } main "$@"
import tensorflow as tf import pandas as pd # Load the dataset into a DataFrame df = pd.read_csv('tweet_sentiment.csv') # Extract the data in feature and target vectors X = df['tweet'] y = df['sentiment'] # Define a CountVectorizer for converting the text into feature vectors cv = CountVectorizer() # Extract feature vectors from the dataset X = cv.fit_transform(X).toarray() # Split the dataset into train and test sets X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 0.2, random_state = 0) # Define a neural network model model = tf.keras.models.Sequential([ tf.keras.layers.Dense(128, activation='relu', input_shape=(X_train.shape[1],)), tf.keras.layers.Dropout(0.2), tf.keras.layers.Dense(64, activation='relu'), tf.keras.layers.Dropout(0.2), tf.keras.layers.Dense(2, activation='sigmoid') ]) # Compile the model model.compile(optimizer='adam', loss=tf.keras.losses.BinaryCrossentropy(from_logits=True), metrics=['accuracy']) # Train the model model.fit(X_train, y_train, epochs=20) # Make predictions on the test set y_pred = model.predict(X_test)
#!/usr/bin/env bash cd "$(dirname "${BASH_SOURCE[0]}")"/.. set -euxo pipefail # https://github.com/asdf-vm/asdf-nodejs#install asdf plugin-add nodejs ~/.asdf/plugins/nodejs/bin/import-release-team-keyring asdf plugin-add just https://github.com/ggilmore/asdf-just.git asdf plugin-add ds-to-dhall https://github.com/sourcegraph/asdf-ds-to-dhall.git OTHER_PACKAGES=( "dhall" "shellcheck" "shfmt" "fd" "yarn" ) for package in "${OTHER_PACKAGES[@]}"; do asdf plugin-add "${package}" done
#!/bin/bash # # Yet Another UserAgent Analyzer # Copyright (C) 2013-2019 Niels Basjes # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an AS IS BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # #https://wiki.archlinux.org/index.php/Color_Bash_Prompt # Reset export Color_Off='\e[0m' # Text Reset # Regular Colors export Black='\e[0;30m' # Black export Red='\e[0;31m' # Red export Green='\e[0;32m' # Green export Yellow='\e[0;33m' # Yellow export Blue='\e[0;34m' # Blue export Purple='\e[0;35m' # Purple export Cyan='\e[0;36m' # Cyan export White='\e[0;37m' # White # Bold export BBlack='\e[1;30m' # Black export BRed='\e[1;31m' # Red export BGreen='\e[1;32m' # Green export BYellow='\e[1;33m' # Yellow export BBlue='\e[1;34m' # Blue export BPurple='\e[1;35m' # Purple export BCyan='\e[1;36m' # Cyan export BWhite='\e[1;37m' # White # Underline export UBlack='\e[4;30m' # Black export URed='\e[4;31m' # Red export UGreen='\e[4;32m' # Green export UYellow='\e[4;33m' # Yellow export UBlue='\e[4;34m' # Blue export UPurple='\e[4;35m' # Purple export UCyan='\e[4;36m' # Cyan export UWhite='\e[4;37m' # White # Background export On_Black='\e[40m' # Black export On_Red='\e[41m' # Red export On_Green='\e[42m' # Green export On_Yellow='\e[43m' # Yellow export On_Blue='\e[44m' # Blue export On_Purple='\e[45m' # Purple export On_Cyan='\e[46m' # Cyan export On_White='\e[47m' # White # High Intensity export IBlack='\e[0;90m' # Black export IRed='\e[0;91m' # Red export IGreen='\e[0;92m' # Green export IYellow='\e[0;93m' # Yellow export IBlue='\e[0;94m' # Blue export IPurple='\e[0;95m' # Purple export ICyan='\e[0;96m' # Cyan export IWhite='\e[0;97m' # White # Bold High Intensity export BIBlack='\e[1;90m' # Black export BIRed='\e[1;91m' # Red export BIGreen='\e[1;92m' # Green export BIYellow='\e[1;93m' # Yellow export BIBlue='\e[1;94m' # Blue export BIPurple='\e[1;95m' # Purple export BICyan='\e[1;96m' # Cyan export BIWhite='\e[1;97m' # White # High Intensity backgrounds export On_IBlack='\e[0;100m' # Black export On_IRed='\e[0;101m' # Red export On_IGreen='\e[0;102m' # Green export On_IYellow='\e[0;103m' # Yellow export On_IBlue='\e[0;104m' # Blue export On_IPurple='\e[0;105m' # Purple export On_ICyan='\e[0;106m' # Cyan export On_IWhite='\e[0;107m' # White
#!/bin/bash mode="on-the-go" # backup echo "Creating backup" sudo cp /usr/share/X11/xorg.conf.d/10-nvidia-drm-outputclass.conf ./backup/ sudo cp ~/.xinitrc ./backup/ # enable echo "Enabling $mode" sudo cp ./$mode/10-nvidia-drm-outputclass.conf /usr/share/X11/xorg.conf.d/10-nvidia-drm-outputclass.conf sudo cp ./$mode/.xinitrc ~/.xinitrc
import { CreateElement } from 'vue'; import { SetupContext } from '@vue/composition-api'; import { TableRowData, PrimaryTableCellParams, TdPrimaryTableProps, PrimaryTableCol, } from '../type'; import EditableCell from '../editable-cell'; export default function useEditableCell(props: TdPrimaryTableProps, context: SetupContext) { const renderEditableCell = ( h: CreateElement, p: PrimaryTableCellParams<TableRowData>, oldCell: PrimaryTableCol['cell'], ) => <EditableCell props={{ ...p }} oldCell={oldCell} scopedSlots={context.slots} />; return { renderEditableCell, }; }
<reponame>aphearin/cosmojax from setuptools import setup, find_packages PACKAGENAME = "cosmojax" VERSION = "0.0.dev" setup( name=PACKAGENAME, version=VERSION, author="<NAME>", author_email="<EMAIL>", description="Cosmology in JAX", long_description="Cosmology in JAX", install_requires=["numpy", "jax"], packages=find_packages(), url="https://github.com/aphearin/cosmojax", )
h1 { font-family: 'Roboto', sans-serif; font-size: 14px; } p { font-family: 'Open Sans', sans-serif; font-size: 14px; }
<reponame>xandjiji/Wordpress-Gatsby-prototype import React from 'react'; import PostContent from './PostContent.styled'; import MaterialContainer from '../../common/MaterialContainer'; export default ({ title, content }) => { return ( <PostContent> <MaterialContainer container={true} labelTag={title}> <section> <div className="content-wrapper" dangerouslySetInnerHTML={{ __html: content }}></div> </section> </MaterialContainer> </PostContent> ) }
import React, { useState } from 'react' import { makeStyles } from '@material-ui/core/styles' import PropTypes from 'prop-types' import TextField from '@material-ui/core/TextField' import Grid from '@material-ui/core/Grid' import Button from '@material-ui/core/Button' import Typography from '@material-ui/core/Typography' import Modal from '@material-ui/core/Modal' import Backdrop from '@material-ui/core/Backdrop' import IconButton from '@material-ui/core/IconButton' import ReCAPTCHA from 'react-google-recaptcha' import { eosApi } from '../api/eos-api' import config from '../config' import Styles from './styles' const useStyles = makeStyles(Styles) const DEFAULT_MESSAGE = 'This field is required' const INITIAL_VALUES = { accountName: { value: '', error: '', isRequired: true, isValid: false }, ownerPK: { value: '', error: '', isRequired: true, isValid: false }, activePK: { value: '', error: '', isRequired: true, isValid: false } } const AccountInfo = ({ onHandleSubmit, customBtnStyle }) => { const classes = useStyles() const [values, setValues] = useState(INITIAL_VALUES) const [open, setOpen] = useState(false) const handleOpen = () => { setOpen(!open) } const handleOnSubmit = async () => { try { if ( values.accountName.isValid && values.activePK.isValid && values.ownerPK.isValid ) { await eosApi.transact( { actions: [ { account: 'eosio', name: 'newaccount', authorization: [ { actor: 'useraaaaaaaa', permission: 'active' } ], data: { creator: 'useraaaaaaaa', name: values.accountName, owner: { threshold: 1, keys: [ { key: values.ownerPK, weight: 1 } ], accounts: [], waits: [] }, active: { threshold: 1, keys: [ { key: values.activePK, weight: 1 } ], accounts: [], waits: [] } } }, { account: 'eosio', name: 'buyrambytes', authorization: [ { actor: 'useraaaaaaaa', permission: 'active' } ], data: { payer: 'useraaaaaaaa', receiver: values.accountName, bytes: 8192 } }, { account: 'eosio', name: 'delegatebw', authorization: [ { actor: 'useraaaaaaaa', permission: 'active' } ], data: { from: 'useraaaaaaaa', receiver: values.accountName, stake_net_quantity: '1.0000 SYS', stake_cpu_quantity: '1.0000 SYS', transfer: false } } ] }, { blocksBehind: 3, expireSeconds: 30 } ) onHandleSubmit({ accountName: values.accountName.value, ownerPK: values.activePK.value, activePK: values.ownerPK.value }) setValues(INITIAL_VALUES) return } setValues({ accountName: { ...values.accountName, error: !values.accountName.value.length ? DEFAULT_MESSAGE : '' }, ownerPK: { ...values.ownerPK, error: !values.activePK.value.length ? DEFAULT_MESSAGE : '' }, activePK: { ...values.activePK, error: !values.ownerPK.value.length ? DEFAULT_MESSAGE : '' } }) } catch (error) { console.log('Create account', error) } } const handleChange = (event) => { event.preventDefault() const { name, value } = event.target const regexValidation = { accountName: /^[a-zA-Z1-5]{12}/, eosKey: /^\bEOS[a-zA-Z0-9]+$/ } let error = '' let isValid = false switch (name) { case 'accountName': if (!regexValidation.accountName.test(value)) { error = 'a-z,1-5 are allowed only. Length 12' } else { error = '' isValid = true } break case 'ownerPK': if (!regexValidation.accountName.test(value)) { error = 'Owner Public Key format is not valid!' } else { error = '' isValid = true } break case 'activePK': if (!regexValidation.accountName.test(value)) { error = 'Public Public Key format is not valid!' } else { error = '' isValid = true } break default: break } setValues({ ...values, [name]: { isRequired: true, value, error, isValid } }) } return ( <div> <Button size="large" color="secondary" onClick={handleOpen} className={customBtnStyle} > Create Account </Button> <Modal aria-labelledby="transition-modal-title" aria-describedby="transition-modal-description" className={classes.modal} open={open} onClose={handleOpen} closeAfterTransition BackdropComponent={Backdrop} BackdropProps={{ timeout: 500 }} > <div className={classes.paper}> <form noValidate autoComplete="off"> <Grid container direction="column" justify="space-between" className={classes.root} > <div className={classes.deleteBtn}> <Typography variant="h6" gutterBottom color="primary"> Create Account </Typography> <IconButton classes={{ root: classes.iconBtnPadding }} aria-label="delete" onClick={() => setOpen(false)} > X </IconButton> </div> <div className={classes.inputBox}> <Grid item> <TextField variant="filled" fullWidth error={Boolean(values.accountName.error)} helperText={ values.accountName.error ? values.accountName.error : '' } label="Account Name" placeholder="eoscrtest123" required autoComplete="off" name="accountName" onChange={handleChange} /> </Grid> <Grid item> <TextField variant="filled" fullWidth error={Boolean(values.ownerPK.error)} helperText={ values.ownerPK.error ? values.ownerPK.error : '' } label="Owner Public Key" placeholder="<KEY>" required autoComplete="off" name="ownerPK" onChange={handleChange} /> </Grid> <Grid item> <TextField variant="filled" fullWidth error={Boolean(values.activePK.error)} helperText={ values.activePK.error ? values.activePK.error : '' } label="Active Public Key" placeholder="<KEY>" required autoComplete="off" name="activePK" onChange={handleChange} /> </Grid> <div className={classes.captcha}> <ReCAPTCHA sitekey={config.sitekey} /> </div> </div> <div className={classes.btn}> <Button size="large" variant="contained" color="secondary" onClick={handleOnSubmit} > Create account </Button> </div> </Grid> </form> </div> </Modal> </div> ) } AccountInfo.propTypes = { onHandleSubmit: PropTypes.func, customBtnStyle: PropTypes.oneOfType([PropTypes.string, PropTypes.object]) } AccountInfo.defaultProps = { onHandleSubmit: () => console.log('click Submit button'), customBtnStyle: {} } export default AccountInfo
<filename>src/shared/modules/ContextFilter/ContextFilterHandler.ts import { cloneDeep } from "lodash"; import ModuleTable from "../ModuleTable"; import ContextFilterVO from "./vos/ContextFilterVO"; import ContextQueryFieldVO from "./vos/ContextQueryFieldVO"; import ContextQueryVO, { query } from "./vos/ContextQueryVO"; export default class ContextFilterHandler { public static getInstance(): ContextFilterHandler { if (!ContextFilterHandler.instance) { ContextFilterHandler.instance = new ContextFilterHandler(); } return ContextFilterHandler.instance; } private static instance: ContextFilterHandler = null; private constructor() { } public get_active_field_filters(filters: ContextFilterVO[]): { [api_type_id: string]: { [field_id: string]: ContextFilterVO } } { let res: { [api_type_id: string]: { [field_id: string]: ContextFilterVO } } = {}; for (let i in filters) { let filter = filters[i]; if (!res[filter.vo_type]) { res[filter.vo_type] = {}; } res[filter.vo_type][filter.field_id] = filter; } return res; } public get_filters_from_active_field_filters(active_field_filters: { [api_type_id: string]: { [field_id: string]: ContextFilterVO } }): ContextFilterVO[] { let res: ContextFilterVO[] = []; for (let i in active_field_filters) { let filters = active_field_filters[i]; for (let j in filters) { let filter = filters[j]; if (!filter) { continue; } res.push(filter); } } return res; } /** * @param context_filter_tree_root * @param type * @returns the context_filter that has the asked type from the tree_root */ public find_context_filter_by_type(context_filter_tree_root: ContextFilterVO, type: number): ContextFilterVO { if (context_filter_tree_root && (context_filter_tree_root.filter_type != type) && context_filter_tree_root.left_hook && context_filter_tree_root.right_hook) { return this.find_context_filter_by_type(context_filter_tree_root.left_hook, type) || this.find_context_filter_by_type(context_filter_tree_root.right_hook, type); } if (context_filter_tree_root.filter_type != type) { return null; } return context_filter_tree_root; } /** * Remove the context_filter_to_delete from context_filter_tree_root and returns the new root * Need to ask the deletion with the real contextfilter object and not a description or clone of it. * Tests are done on the objects adresses, not deeply on the contents. * @param context_filter_tree_root * @param context_filter_to_delete * @returns */ public remove_context_filter_from_tree(context_filter_tree_root: ContextFilterVO, context_filter_to_delete: ContextFilterVO): ContextFilterVO { if ((!context_filter_tree_root) || (!context_filter_to_delete)) { return null; } // beware this is no deep check if (context_filter_tree_root == context_filter_to_delete) { return null; } if (!context_filter_tree_root.left_hook) { /** * On est sur une feuille et c'est pas celle qu'on cherche, on la renvoie */ return context_filter_tree_root; } if (context_filter_tree_root.left_hook == context_filter_to_delete) { return context_filter_tree_root.right_hook; } if (context_filter_tree_root.right_hook == context_filter_to_delete) { return context_filter_tree_root.left_hook; } /** * On tente la suppression à gauche. si on récupère un null, on doit renvoyer le hook_right en guise de nouveau root à ce niveau */ let left_hook_replacement = this.remove_context_filter_from_tree(context_filter_tree_root.left_hook, context_filter_to_delete); if (!left_hook_replacement) { return context_filter_tree_root.right_hook; } if (left_hook_replacement != context_filter_tree_root.left_hook) { context_filter_tree_root.left_hook = left_hook_replacement; return context_filter_tree_root; } let right_hook_replacement = this.remove_context_filter_from_tree(context_filter_tree_root.right_hook, context_filter_to_delete); if ((!right_hook_replacement) && (context_filter_tree_root.right_hook)) { return context_filter_tree_root.left_hook; } if (right_hook_replacement != context_filter_tree_root.right_hook) { context_filter_tree_root.right_hook = right_hook_replacement; } return context_filter_tree_root; } /** * Objectif retrouver les filtres simples (pas de or / xor ou subquery par exemple) d'un vo_type spécifique */ public get_simple_filters_by_vo_type(filters: ContextFilterVO[], vo_type: string): ContextFilterVO[] { let res: ContextFilterVO[] = []; for (let i in filters) { let filter = filters[i]; if (filter.vo_type != vo_type) { continue; } switch (filter.filter_type) { case ContextFilterVO.TYPE_FILTER_AND: case ContextFilterVO.TYPE_FILTER_NOT: case ContextFilterVO.TYPE_FILTER_OR: case ContextFilterVO.TYPE_FILTER_XOR: case ContextFilterVO.TYPE_IN: case ContextFilterVO.TYPE_NOT_IN: case ContextFilterVO.TYPE_NOT_EXISTS: continue; } res.push(filter); } return res; } /** * Objectif retrouver un filtre simple (pas de or / xor ou subquery par exemple) pour identifier par exemple * un filtre sur un champ de segmentation * on checke qu'on a qu'un seul résultat (sinon on est sur un filtre complexe) */ public get_simple_filter_by_vo_type_and_field_id(filters: ContextFilterVO[], vo_type: string, field_id: string): ContextFilterVO { let res = null; for (let i in filters) { let filter = filters[i]; if (filter.field_id != field_id) { continue; } if (filter.vo_type != vo_type) { continue; } switch (filter.filter_type) { case ContextFilterVO.TYPE_FILTER_AND: case ContextFilterVO.TYPE_FILTER_NOT: case ContextFilterVO.TYPE_FILTER_OR: case ContextFilterVO.TYPE_FILTER_XOR: case ContextFilterVO.TYPE_IN: case ContextFilterVO.TYPE_NOT_IN: case ContextFilterVO.TYPE_NOT_EXISTS: continue; } if (res) { return null; } res = filter; } return res; } /** * Add context_filter to the root, using the and/or/xor .... type of operator if necessary * Returns the new root * @param context_filter_tree_root * @param context_filter_to_delete * @param operator_type * @returns */ public add_context_filter_to_tree(context_filter_tree_root: ContextFilterVO, context_filter_to_add: ContextFilterVO, operator_type: number = ContextFilterVO.TYPE_FILTER_AND): ContextFilterVO { if (!context_filter_tree_root) { return context_filter_to_add; } if (!context_filter_to_add) { return context_filter_tree_root; } // Le root est déjà rempli, on renvoie un nouvel operateur let new_root = new ContextFilterVO(); new_root.vo_type = context_filter_to_add.vo_type; new_root.field_id = context_filter_to_add.field_id; new_root.filter_type = operator_type; new_root.left_hook = context_filter_tree_root; new_root.right_hook = context_filter_to_add; return new_root; } /** * Clone and remove custom_filters */ public clean_context_filters_for_request(get_active_field_filters: { [api_type_id: string]: { [field_id: string]: ContextFilterVO } }): { [api_type_id: string]: { [field_id: string]: ContextFilterVO } } { let res: { [api_type_id: string]: { [field_id: string]: ContextFilterVO } } = cloneDeep(get_active_field_filters); if (res) { delete res[ContextFilterVO.CUSTOM_FILTERS_TYPE]; } return res; } /** * Renvoie une context query qui renvoie systématiquement 0 éléments, pour bloquer l'accès à un vo par exemple dans un context access hook */ public get_empty_res_context_hook_query(api_type_id: string) { // on veut rien renvoyer, donc on fait une query qui retourne rien let filter_none: ContextFilterVO = new ContextFilterVO(); filter_none.filter_type = ContextFilterVO.TYPE_NULL_ALL; filter_none.field_id = 'id'; filter_none.vo_type = api_type_id; return query(api_type_id).field('id').add_filters([filter_none]).ignore_access_hooks(); } }
export default { elem: 'svg', attrs: { xmlns: 'http://www.w3.org/2000/svg', viewBox: '0 0 32 32', width: 20, height: 20, }, content: [ { elem: 'path', attrs: { d: 'M24 20l-1.41 1.41L26.17 25H10a4 4 0 0 1 0-8h12a6 6 0 0 0 0-12H5.83l3.58-3.59L8 0 2 6l6 6 1.41-1.41L5.83 7H22a4 4 0 0 1 0 8H10a6 6 0 0 0 0 12h16.17l-3.58 3.59L24 32l6-6z', }, }, ], name: 'movement', size: 20, };
def containsSubstring(str, sub): for i in range(len(str)-len(sub)+1): j = 0 while j < len(sub) and sub[j] == str[i+j]: j += 1 if j == len(sub): return True return False
#include QMK_KEYBOARD_H #ifdef AUDIO_ENABLE #include "muse.h" #endif #include "enums.h" #include "eeprom.h" #include "keymap_canadian_multilingual.h" #include "layer_with_mod_tap.h" #define KC_MAC_UNDO LGUI(KC_Z) #define KC_MAC_CUT LGUI(KC_X) #define KC_MAC_COPY LGUI(KC_C) #define KC_MAC_PASTE LGUI(KC_V) #define KC_PC_UNDO LCTL(KC_Z) #define KC_PC_CUT LCTL(KC_X) #define KC_PC_COPY LCTL(KC_C) #define KC_PC_PASTE LCTL(KC_V) #define ES_LESS_MAC KC_GRAVE #define ES_GRTR_MAC LSFT(KC_GRAVE) #define ES_BSLS_MAC ALGR(KC_6) #define NO_PIPE_ALT KC_GRAVE #define NO_BSLS_ALT KC_EQUAL #define LSA_T(kc) MT(MOD_LSFT | MOD_LALT, kc) #define BP_NDSH_MAC ALGR(KC_8) #define LOWER MO(_LOWER) #define RAISE MO(_RAISE) const uint16_t PROGMEM keymaps[][MATRIX_ROWS][MATRIX_COLS] = { [_BASE] = LAYOUT_planck_grid(KC_TAB,KC_Q,KC_W,KC_E,KC_R,KC_T,KC_Y,KC_U,KC_I,KC_O,KC_P,KC_BSPACE,LCA_T(KC_ESCAPE),KC_A,KC_S,KC_D,KC_F,KC_G,KC_H,KC_J,KC_K,KC_L,KC_SCOLON,KC_ENTER,MO(7),KC_Z,KC_X,KC_C,KC_V,KC_B,KC_N,KC_M,KC_COMMA,KC_DOT,CSA_SLASH,CSA_BSLS,KC_LCTRL,KC_LALT,KC_TRANSPARENT,KC_LGUI,LOWER,LT(4,KC_SPACE),KC_NO,RAISE,MO(6),KC_TRANSPARENT,WEBUSB_PAIR,CSA_APOS), [_LOWER] = LAYOUT_planck_grid(KC_GRAVE,KC_1,KC_2,KC_3,KC_4,KC_5,KC_6,KC_7,KC_8,KC_9,KC_0,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_UNDS,KC_PLUS,CSA_LCBR,CSA_RCBR,CSA_PIPE,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_NO,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT), [_RAISE] = LAYOUT_planck_grid(CSA_DTLD,KC_EXLM,KC_AT,KC_HASH,KC_DLR,KC_PERC,KC_CIRC,KC_AMPR,KC_ASTR,KC_LPRN,KC_RPRN,KC_DELETE,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_MINUS,KC_EQUAL,CSA_LBRC,CSA_RBRC,CSA_BSLS,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,LSFT(KC_INSERT),KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_NO,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT), [_ADJUST] = LAYOUT_planck_grid(KC_TRANSPARENT,KC_F1,KC_F2,KC_F3,KC_F4,KC_F5,KC_F6,KC_F7,KC_F8,KC_F9,KC_F10,KC_F11,KC_DELETE,KC_TRANSPARENT,AU_ON,AU_OFF,AU_TOG,KC_TRANSPARENT,KC_TRANSPARENT,RGB_TOG,RGB_VAI,RGB_VAD,KC_TRANSPARENT,KC_F12,KC_TRANSPARENT,KC_TRANSPARENT,MU_ON,MU_OFF,MU_TOG,KC_TRANSPARENT,KC_TRANSPARENT,RGB_MOD,RGB_HUI,RGB_HUD,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_NO,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,RESET), [_LAYER4] = LAYOUT_planck_grid(KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_PSCREEN,KC_SCROLLLOCK,KC_SCROLLLOCK,KC_PAUSE,KC_TRANSPARENT,KC_DELETE,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_LEFT,KC_DOWN,KC_UP,KC_RIGHT,KC_TRANSPARENT,KC_TRANSPARENT,KC_LSHIFT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,MO(5),KC_TRANSPARENT,KC_NO,MO(5),KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT), [_LAYER5] = LAYOUT_planck_grid(KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_HOME,KC_PGDOWN,KC_PGUP,KC_END,KC_TRANSPARENT,KC_TRANSPARENT,KC_LSHIFT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_NO,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT), [_LAYER6] = LAYOUT_planck_grid(KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,CSA_DCRC,CSA_CCED,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,CSA_DGRV,KC_TRANSPARENT,KC_LSHIFT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,CSA_ECUT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_NO,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT), [_LAYER7] = LAYOUT_planck_grid(LSFT(KC_TAB),LSFT(KC_Q),LSFT(KC_W),LSFT(KC_E),LSFT(KC_R),LSFT(KC_T),LSFT(KC_Y),LSFT(KC_U),LSFT(KC_I),LSFT(KC_O),LSFT(KC_P),LSFT(KC_BSPACE),LSFT(KC_ESCAPE),LSFT(KC_A),LSFT(KC_S),LSFT(KC_D),LSFT(KC_F),LSFT(KC_G),LSFT(KC_H),LSFT(KC_J),LSFT(KC_K),LSFT(KC_L),LSFT(KC_SCOLON),LSFT(KC_ENTER),KC_TRANSPARENT,LSFT(KC_Z),LSFT(KC_X),LSFT(KC_C),LSFT(KC_V),LSFT(KC_B),LSFT(KC_N),LSFT(KC_M),CSA_LESS,CSA_GRTR,LSFT(CSA_QEST),LSFT(KC_BSLASH),LSFT(KC_LCTRL),KC_TRANSPARENT,LSFT(KC_LALT),LSFT(KC_LGUI),KC_TRANSPARENT,KC_TRANSPARENT,KC_NO,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,KC_TRANSPARENT,LSFT(CSA_DQOT)), }; extern bool g_suspend_state; extern rgb_config_t rgb_matrix_config; void keyboard_post_init_user(void) { rgb_matrix_enable(); } const uint8_t PROGMEM ledmap[][DRIVER_LED_TOTAL][3] = { [0] = { {199,228,248}, {199,228,248}, {199,228,248}, {199,228,248}, {199,228,248}, {199,228,248}, {199,228,248}, {199,228,248}, {199,228,248}, {199,228,248}, {199,228,248}, {199,228,248}, {199,228,248}, {199,228,248}, {199,228,248}, {199,228,248}, {199,228,248}, {199,228,248}, {199,228,248}, {199,228,248}, {199,228,248}, {199,228,248}, {199,228,248}, {199,228,248}, {199,228,248}, {199,228,248}, {199,228,248}, {199,228,248}, {199,228,248}, {199,228,248}, {199,228,248}, {199,228,248}, {199,228,248}, {199,228,248}, {199,228,248}, {199,228,248}, {199,228,248}, {199,228,248}, {199,228,248}, {199,228,248}, {199,228,248}, {199,228,248}, {199,228,248}, {199,228,248}, {199,228,248}, {199,228,248}, {199,228,248} }, [1] = { {48,255,255}, {48,255,255}, {48,255,255}, {48,255,255}, {48,255,255}, {48,255,255}, {48,255,255}, {48,255,255}, {48,255,255}, {48,255,255}, {48,255,255}, {48,255,255}, {48,255,255}, {48,255,255}, {48,255,255}, {48,255,255}, {48,255,255}, {48,255,255}, {48,255,255}, {48,255,255}, {48,255,255}, {48,255,255}, {48,255,255}, {48,255,255}, {48,255,255}, {48,255,255}, {48,255,255}, {48,255,255}, {48,255,255}, {48,255,255}, {48,255,255}, {48,255,255}, {48,255,255}, {48,255,255}, {48,255,255}, {48,255,255}, {48,255,255}, {48,255,255}, {48,255,255}, {48,255,255}, {48,255,255}, {48,255,255}, {48,255,255}, {48,255,255}, {48,255,255}, {48,255,255}, {48,255,255} }, [2] = { {126,249,255}, {126,249,255}, {126,249,255}, {126,249,255}, {126,249,255}, {126,249,255}, {126,249,255}, {126,249,255}, {126,249,255}, {126,249,255}, {126,249,255}, {126,249,255}, {126,249,255}, {126,249,255}, {126,249,255}, {126,249,255}, {126,249,255}, {126,249,255}, {126,249,255}, {126,249,255}, {126,249,255}, {126,249,255}, {126,249,255}, {126,249,255}, {126,249,255}, {126,249,255}, {126,249,255}, {126,249,255}, {126,249,255}, {126,249,255}, {126,249,255}, {126,249,255}, {126,249,255}, {126,249,255}, {126,249,255}, {126,249,255}, {126,249,255}, {126,249,255}, {126,249,255}, {126,249,255}, {126,249,255}, {126,249,255}, {126,249,255}, {126,249,255}, {126,249,255}, {126,249,255}, {126,249,255} }, [4] = { {255,255,255}, {255,255,255}, {255,255,255}, {255,255,255}, {255,255,255}, {255,255,255}, {255,255,255}, {255,255,255}, {255,255,255}, {255,255,255}, {255,255,255}, {255,255,255}, {255,255,255}, {255,255,255}, {255,255,255}, {255,255,255}, {255,255,255}, {255,255,255}, {255,255,255}, {255,255,255}, {255,255,255}, {255,255,255}, {255,255,255}, {255,255,255}, {255,255,255}, {255,255,255}, {255,255,255}, {255,255,255}, {255,255,255}, {255,255,255}, {255,255,255}, {255,255,255}, {255,255,255}, {255,255,255}, {255,255,255}, {255,255,255}, {255,255,255}, {255,255,255}, {255,255,255}, {255,255,255}, {255,255,255}, {255,255,255}, {255,255,255}, {255,255,255}, {255,255,255}, {255,255,255}, {255,255,255} }, [6] = { {165,255,255}, {0,0,255}, {165,255,255}, {0,0,255}, {165,255,255}, {0,0,255}, {165,255,255}, {0,0,255}, {165,255,255}, {0,0,255}, {165,255,255}, {0,0,255}, {0,0,255}, {165,255,255}, {0,0,255}, {165,255,255}, {0,0,255}, {165,255,255}, {0,0,255}, {165,255,255}, {0,0,255}, {165,255,255}, {0,0,255}, {165,255,255}, {165,255,255}, {0,0,255}, {165,255,255}, {0,0,255}, {165,255,255}, {0,0,255}, {165,255,255}, {0,0,255}, {165,255,255}, {0,0,255}, {165,255,255}, {0,0,255}, {0,0,255}, {165,255,255}, {0,0,255}, {165,255,255}, {0,0,255}, {165,255,255}, {0,0,255}, {0,0,255}, {165,255,255}, {0,0,255}, {165,255,255} }, [7] = { {199,228,248}, {199,228,248}, {199,228,248}, {199,228,248}, {199,228,248}, {199,228,248}, {199,228,248}, {199,228,248}, {199,228,248}, {199,228,248}, {199,228,248}, {199,228,248}, {199,228,248}, {199,228,248}, {199,228,248}, {199,228,248}, {199,228,248}, {199,228,248}, {199,228,248}, {199,228,248}, {199,228,248}, {199,228,248}, {199,228,248}, {199,228,248}, {199,228,248}, {199,228,248}, {199,228,248}, {199,228,248}, {199,228,248}, {199,228,248}, {199,228,248}, {199,228,248}, {199,228,248}, {199,228,248}, {199,228,248}, {199,228,248}, {199,228,248}, {199,228,248}, {199,228,248}, {199,228,248}, {199,228,248}, {199,228,248}, {199,228,248}, {199,228,248}, {199,228,248}, {199,228,248}, {199,228,248} }, }; void set_layer_color(int layer) { for (int i = 0; i < DRIVER_LED_TOTAL; i++) { HSV hsv = { .h = pgm_read_byte(&ledmap[layer][i][0]), .s = pgm_read_byte(&ledmap[layer][i][1]), .v = pgm_read_byte(&ledmap[layer][i][2]), }; if (!hsv.h && !hsv.s && !hsv.v) { rgb_matrix_set_color( i, 0, 0, 0 ); } else { RGB rgb = hsv_to_rgb( hsv ); float f = (float)rgb_matrix_config.hsv.v / UINT8_MAX; rgb_matrix_set_color( i, f * rgb.r, f * rgb.g, f * rgb.b ); } } } void rgb_matrix_indicators_user(void) { if (g_suspend_state || keyboard_config.disable_layer_led) { return; } switch (biton32(layer_state)) { case 0: set_layer_color(0); break; case 1: set_layer_color(1); break; case 2: set_layer_color(2); break; case 4: set_layer_color(4); break; case 6: set_layer_color(6); break; case 7: set_layer_color(7); break; default: if (rgb_matrix_get_flags() == LED_FLAG_NONE) rgb_matrix_set_color_all(0, 0, 0); break; } } bool process_record_user(uint16_t keycode, keyrecord_t *record) { // Hook user define functionality here. ------------------------------------- layer_with_mod_tap_on_key_press(keycode); // -------------------------------------------------------------------------- switch (keycode) { case CSA_LSPO: perform_space_cadet(record, keycode, KC_LSFT, KC_LSFT, KC_9); return false; case CSA_RSPC: perform_space_cadet(record, keycode, KC_LSFT, KC_LSFT, KC_0); return false; case RGB_SLD: if (record->event.pressed) { rgblight_mode(1); } return false; } return true; } #ifdef AUDIO_ENABLE bool muse_mode = false; uint8_t last_muse_note = 0; uint16_t muse_counter = 0; uint8_t muse_offset = 70; uint16_t muse_tempo = 50; void encoder_update(bool clockwise) { if (muse_mode) { if (IS_LAYER_ON(_RAISE)) { if (clockwise) { muse_offset++; } else { muse_offset--; } } else { if (clockwise) { muse_tempo+=1; } else { muse_tempo-=1; } } } else { if (clockwise) { #ifdef MOUSEKEY_ENABLE register_code(KC_MS_WH_DOWN); unregister_code(KC_MS_WH_DOWN); #else register_code(KC_PGDN); unregister_code(KC_PGDN); #endif } else { #ifdef MOUSEKEY_ENABLE register_code(KC_MS_WH_UP); unregister_code(KC_MS_WH_UP); #else register_code(KC_PGUP); unregister_code(KC_PGUP); #endif } } } void matrix_scan_user(void) { #ifdef AUDIO_ENABLE if (muse_mode) { if (muse_counter == 0) { uint8_t muse_note = muse_offset + SCALE[muse_clock_pulse()]; if (muse_note != last_muse_note) { stop_note(compute_freq_for_midi_note(last_muse_note)); play_note(compute_freq_for_midi_note(muse_note), 0xF); last_muse_note = muse_note; } } muse_counter = (muse_counter + 1) % muse_tempo; } #endif } bool music_mask_user(uint16_t keycode) { switch (keycode) { case RAISE: case LOWER: return false; default: return true; } } #endif uint32_t layer_state_set_user(uint32_t state) { return update_tri_layer_state(state, _LOWER, _RAISE, _ADJUST); }
<reponame>khoadaxne15/LastProject<filename>src/redux/reducers/filterReducer.ts import { AnyAction } from "typescript-fsa"; import { applyFilter, clearRecentSearch, removeSelectedRecent, resetFilter, setBrandFilter, setMaxPriceFilter, setMinPriceFilter, setTextFilter, } from "../actions"; export interface Filter { recent?: string[]; keyword?: string; brand: string; minPrice: number; maxPrice: number; sortBy: string; } export type FilterState = Filter; const initState: FilterState = { recent: [], keyword: "", brand: "", minPrice: 0, maxPrice: 0, sortBy: "", }; export function filterReducer(state: FilterState | undefined, action: AnyAction): FilterState { if (state === undefined) { return initState; } if (setTextFilter.match(action)) { return { ...state, recent: !!state.recent?.find((n) => n === action.payload) || action.payload === "" ? state.recent : [action.payload, ...(state.recent ?? [])], keyword: action.payload, }; } if (setBrandFilter.match(action)) { return { ...state, brand: action.payload, }; } if (setMaxPriceFilter.match(action)) { return { ...state, maxPrice: action.payload, }; } if (setMinPriceFilter.match(action)) { return { ...state, minPrice: action.payload, }; } if (resetFilter.match(action)) { return initState; } if (clearRecentSearch.match(action)) { return { ...state, recent: [], }; } if (removeSelectedRecent.match(action)) { return { ...state, recent: state.recent?.filter((item) => item !== action.payload), }; } if (applyFilter.match(action)) { return { ...state, ...action.payload, }; } return state; }
#!/bin/bash curl -fsSL get.docker.com -o get-docker.sh sudo sh get-docker.sh sudo usermod -aG docker $USER
import os from 'os'; import buildAssets from './lib/builders/build-assets.js'; import Console from './lib/utils/console.js'; import findProjectRoot from './lib/utils/find-project-root.js'; import appImportTransformation from './lib/transpilers/app-import-transformation.js'; import parseCLIArguments from './lib/utils/parse-cli-arguments.js'; import resolvePortNumberFor from './lib/utils/resolve-port-number-for.js'; import WorkerPool from './lib/worker-pool/index.js'; global.mainContext = global.mainContext || global; export default { indexHTMLInjections: {}, vendorPrepends: [], vendorAppends: [], applicationPrepends: [], applicationAppends: [], testPrepends: [], testAppends: [], import(path, options = {}) { const appendMetadata = options.prepend ? 'Prepends' : 'Appends'; const type = options.type || 'application'; this[`${type}${appendMetadata}`].push({ path: path, type: 'library', options: options }); }, importAddon(name, path, options = {}) { const OPTIONS = typeof path === 'object' ? path : options; const PATH = typeof path === 'string' ? path : name; const appendMetadata = OPTIONS.prepend ? 'Prepends' : 'Appends'; const type = options.type || 'application'; this[`${type}${appendMetadata}`].push({ name: name, path: PATH, type: 'addon', options: OPTIONS }); }, importAsAMDModule(npmModuleName, path, options = {}) { const OPTIONS = typeof path === 'object' ? path : options; const PATH = typeof path === 'string' ? path : npmModuleName; const appendMetadata = OPTIONS.prepend ? 'Prepends' : 'Appends'; const type = options.type || 'application'; this[`${type}${appendMetadata}`].push({ name: npmModuleName, path: PATH, type: 'amdModule', options: OPTIONS }); }, injectInlineContent(keyName, value) { this.indexHTMLInjections[keyName] = value; }, build(environment) { return new Promise(async (resolve) => { global.MBER_THREAD_POOL = WorkerPool.start(os.cpus().length); const projectRoot = await findProjectRoot(); // get env const ENV = serializeRegExp( (await import(`${projectRoot}/config/environment.js`)).default(environment) ); const applicationName = ENV.modulePrefix || 'frontend'; // transpilation const buildMeta = [ 'vendorPrepends', 'vendorAppends', 'applicationPrepends', 'applicationAppends', 'testPrepends', 'testAppends' ].reduce((result, key) => { if (this[key].length > 0) { return Object.assign(result, { [key]: transpileAddonToES5(projectRoot, this[key], applicationName) }); } return result; }, {}); Promise.all(Object.keys(buildMeta).map((metaKey) => buildMeta[metaKey])) .then(async (finishedBuild) => { // cliArguments const cliArguments = Object.assign( {}, { fastboot: true, port: 1234, socketPort: global.MBER_DISABLE_SOCKETS || ENV.environment === 'production' ? null : 65511, talk: true, testing: ENV.environment !== 'production' }, parseCLIArguments() ); const { socketPort, port } = cliArguments; const targetPort = await resolvePortNumberFor('Web server', port); const targetSocketPort = socketPort ? await resolvePortNumberFor('Websocket server', socketPort) : null; // build assets const result = await buildAssets({ applicationName: ENV.modulePrefix || 'frontend', ENV: ENV, cliArguments: Object.assign({}, cliArguments, { port: targetPort, socketPort: targetSocketPort }), projectRoot: projectRoot, buildCache: finishedBuild.reduce((result, code, index) => { return Object.assign(result, { [`${Object.keys(buildMeta)[index]}`]: code }); }, {}), indexHTMLInjections: this.indexHTMLInjections }); resolve(result); }) .catch((error) => reportErrorAndExit(error)); }); } }; function transpileAddonToES5(projectRoot, arrayOfImportableObjects, applicationName) { return new Promise((resolve) => { Promise.all( arrayOfImportableObjects.map((importObject) => { if (importObject.type === 'amdModule') { return global.MBER_THREAD_POOL.submit({ action: 'NPM_IMPORT', importObject }); } else if (importObject.type === 'addon') { return global.MBER_THREAD_POOL.submit({ action: 'IMPORT_ADDON_TO_AMD', importObject, applicationName, projectRoot }); } return appImportTransformation(importObject, projectRoot); }) ) .then((contents) => resolve(contents.join('\n'))) .catch((error) => console.log('transpileAddonToES5 error', error)); }); } function reportErrorAndExit(error) { console.log(error); Console.log('Error occured, exiting!'); setTimeout(() => process.exit(1), 100); } function serializeRegExp(object) { RegExp.prototype.toJSON = function() { return this.source; }; return JSON.parse(JSON.stringify(object)); }
#!/bin/sh ICON="📅 " printf "$ICON%s" "$(date '+%b %d, %R')"
An algorithm for aggregating multiple conversation topics in a chatbot could involve using natural language processing techniques to identify the topics of conversation in each message sent by the user. The chatbot can then use a hierarchical clustering algorithm to group similar topics together. This will allow the chatbot to process multiple topics in a single conversation and respond accordingly.
def quick_sort(arr): if len(arr) <= 1: return arr pivot = arr[len(arr) // 2] left = [x for x in arr if x < pivot] middle = [x for x in arr if x == pivot] right = [x for x in arr if x > pivot] return quick_sort(left) + middle + quick_sort(right) unsorted_list = [100, 200, 15, 25, 3, 2, 9] print(quick_sort(unsorted_list))
<reponame>xfyre/tapestry-5 // Copyright 2007-2013 The Apache Software Foundation // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package org.apache.tapestry5.services; import org.apache.tapestry5.MarkupWriter; import org.apache.tapestry5.json.JSONObject; /** * A filter (the main interface being {@link PartialMarkupRenderer}) applied when performing a partial page render as * part of an Ajax-oriented request. This is similar to {@link org.apache.tapestry5.services.MarkupRendererFilter} and * filters are often in place so as to contribute {@link org.apache.tapestry5.annotations.Environmental} services to the * pages and components that render. * * @see org.apache.tapestry5.modules.TapestryModule#contributePartialMarkupRenderer */ public interface PartialMarkupRendererFilter { /** * Implementations should perform work before or after passing the writer to the renderer. * * @param writer * to which markup should be written * @param reply * JSONObject which will contain the partial response * @param renderer * delegate to which the writer should be passed */ void renderMarkup(MarkupWriter writer, JSONObject reply, PartialMarkupRenderer renderer); }
<filename>src/commands/util/wiki.js<gh_stars>0 const wiki = require("wikijs").default(); const BaseEmbed = require("../../modules/BaseEmbed"); module.exports = { name: "wiki", aliases: ["wikipediasearch", "wikipedia"], category: "util", description: "Search something up on Wikipedia", requiredArgs: ["query"], async execute(bot, message, args) { const lang = await bot.getGuildLang(message.guild.id); if (!args[0]) { return message.channel.send(lang.GLOBAL.PROVIDE_ARGS); } const search = await wiki.search(args.join(" ")); if (!search.results[0]) { return message.channel.send(lang.UTIL.NO_W_FOUND); } const result = await wiki.page(search.results[0]); const description = await result.summary(); const title = result.raw.title; const url = result.raw.fullurl; const embed = BaseEmbed(message) .setTitle(`${title} (read more)`) .setURL(url) .setDescription(`${description.slice(0, 2045)}${description.length > 2048 ? "..." : ""}`); message.channel.send("", embed); }, };
/* * Merlin * * API Guide for accessing Merlin's model management, deployment, and serving functionalities * * API version: 0.14.0 * Generated by: Swagger Codegen (https://github.com/swagger-api/swagger-codegen.git) */ package client type EndpointStatus string // List of EndpointStatus const ( PENDING EndpointStatus = "pending" RUNNING EndpointStatus = "running" SERVING EndpointStatus = "serving" FAILED EndpointStatus = "failed" TERMINATED EndpointStatus = "terminated" )
#!/bin/bash home_dir="" data_dir=$home_dir"" results_path=$home_dir"" pre_trained_model_name=$results_path"" cache_dir="" echo "FEW-SHOT LEARNING ON ALL LANGUAGES JOINTLY" for SEED in 42 44 40 163 do for LANG in ar bn fi id ru sw te do python main_trans_ada_no_acc.py --train-langs en \ --dev-langs $LANG \ --test-langs ar bn en fi id ko ru sw te \ --do-lower-case \ --adam-lr 3e-5 \ --max-seq-length 384 \ --doc-stride 128 \ --save-steps 50 \ --gradient-accumulation-steps 4 \ --k-spt 6 --q-qry 6 \ --data-dir $data_dir \ --out-dir $results_path \ --batch-sz 2500 \ --warmup-steps 500 \ --pre-train-steps 5000 \ --local_rank -1 \ --use-pretrained-model \ --pre-trained-model-name $pre_trained_model_name \ --cache_dir $cache_dir --seed $SEED done done
# # # export PROJECT_NAME="MulleObjCOSFoundation" # # # export PROJECT_IDENTIFIER="MulleObjCOSFoundation" # # # export PROJECT_DOWNCASE_IDENTIFIER="mulle_objc_os_foundation" # # # export PROJECT_UPCASE_IDENTIFIER="MULLE_OBJC_OS_FOUNDATION" # # # export PROJECT_TYPE="none" # # # export PROJECT_LANGUAGE="c" # # # export PROJECT_DIALECT="objc" # # # export PROJECT_EXTENSIONS="m:aam"
package main; /** * @author <NAME> * */ public class TestCircle2D { public static void main(String[] args) { Circle2D c1 = new Circle2D(2, 2, 5.5); System.out.println("The area of this circle is " + c1.getArea() + "."); System.out.println("The perimeter of this circle is " + c1.getPerimeter() + "."); System.out.println("Is the point (3, 3) inside this circle? " + c1.contains(3, 3)); System.out.println("Is the circle centered at (4, 5) with radius 10.5 inside this circle? " + c1.contains(new Circle2D(4, 5, 10.5))); System.out.println("Is the circle centered at (3, 5) with radius 2.3 overlapping this circle? " + c1.overlaps(new Circle2D(3, 5, 2.3))); } }
#include <iostream> class Node { public: int data; Node* left; Node* right; Node(int data): data(data), left(nullptr), right() { } }; class BinarySearchTree { public: Node* root; BinarySearchTree(): root(nullptr) { } void insert(int data) { if (root == nullptr) root = new Node(data); else insert(data, root); } void insert(int data, Node* node) { if (data <= node->data) { if (node->left == nullptr) node->left = new Node(data); else insert(data, node->left); } else { if (node->right == nullptr) node->right = new Node(data); else insert(data, node->right); } } };
import time import numpy as np from scipy import stats from pcit.IndependenceTest import PCIT np.random.seed(1) # with open('C:/Users/Sam/Dropbox/UniversityStuff/UCL/Project/Data/Wine.csv', 'rt') as f: # Wine = np.loadtxt(f, delimiter=";") # Download data set from https://archive.ics.uci.edu/ml/datasets/wine and store it as 'Wine' n = Wine.shape[0] # Extract data X1 = Wine[:,1:2] X2 = Wine[:,2:3] noise = Wine[:,5:6] # Sample sizes and number of resamples for test n_range = [100,200,500,1000,2000,5000] B = 500 power = [] time_sample_size = [] for sample_size in n_range: # Reset counters mistakes = 0 tic = time.time() for i in range(B): # Sample with replacement from base arrays X1_round = X1[stats.randint.rvs(low = 0, high = n, size = sample_size)] X2_round = X2[stats.randint.rvs(low = 0, high = n, size = sample_size)] # Generate noise array noise_round = np.multiply(np.reshape((stats.uniform.rvs(size = sample_size) > 0.5) * 2 - 1,(-1,1)), np.sqrt(noise[stats.randint.rvs(low = 0, high = n, size = sample_size)])) # Calculate conditioning set, which makes X1_round and X2_round dependent Z = np.log(X1_round)*np.exp(X2_round) + noise_round # Independence test temp, indep, temp = PCIT(X1_round, X2_round, z = Z) # If test made a mistake by attesting independence, update counter mistakes += indep[0] print('Sample size: ', sample_size, 'Resample round: ', i) power.append(1 - mistakes / B) time_sample_size.append((time.time() - tic) / 500) # Calculate standard error (power follows a binomial) SE = [] for i in range(len(n_range)): SE.append(np.sqrt(power[i] * (1 - power[i]) / np.sqrt(B)))
function extractComponentNames(filePaths: string[]): string[] { const componentNames: string[] = []; filePaths.forEach(filePath => { const componentName = filePath.split('/').pop()?.replace('.component', ''); if (componentName) { componentNames.push(componentName); } }); return Array.from(new Set(componentNames)); }
#!/bin/bash # # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # Script to create SQL API docs. This requires `mkdocs` and to build # Spark first. After running this script the html docs can be found in # $SPARK_HOME/sql/site set -o pipefail set -e FWDIR="$(cd "`dirname "${BASH_SOURCE[0]}"`"; pwd)" SPARK_HOME="$(cd "`dirname "${BASH_SOURCE[0]}"`"/..; pwd)" if ! hash python 2>/dev/null; then echo "Missing python in your path, skipping SQL documentation generation." exit 0 fi if ! hash mkdocs 2>/dev/null; then echo "Missing mkdocs in your path, trying to install mkdocs for SQL documentation generation." pip install mkdocs fi # Now create the markdown file rm -fr docs mkdir docs echo "Generating markdown files for SQL documentation." "$SPARK_HOME/bin/spark-submit" gen-sql-markdown.py # Now create the HTML files echo "Generating HTML files for SQL documentation." mkdocs build --clean rm -fr docs
/** * Copyright 2021 Huawei Technologies Co., Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #ifndef MINDSPORE_CCSRC_RUNTIME_FRAMEWORK_ACTOR_COPY_ACTOR_H_ #define MINDSPORE_CCSRC_RUNTIME_FRAMEWORK_ACTOR_COPY_ACTOR_H_ #include <vector> #include <string> #include <memory> #include <utility> #include <unordered_map> #include "runtime/framework/actor/actor_common.h" #include "runtime/framework/actor/memory_aware_actor.h" #include "runtime/hardware/device_context.h" #include "runtime/framework/device_tensor_store.h" namespace mindspore { namespace runtime { using mindspore::device::DeviceContext; // The copy actor is used to receive the device tensors and control info to copy data between input device tensor and // output device tensor. The processing flow is RunOpData/RunOpControl -> CheckRunningCondition -> SendMemoryAllocReq // -> OnMemoryAllocFinish -> Copy -> SendMemoryFreeReq -> SendOutput. class CopyActor : public MemoryAwareActor { public: CopyActor(const std::string &name, const AID &memory_manager_aid) : MemoryAwareActor(name, KernelTransformType::kCopyActor, nullptr, memory_manager_aid), output_(nullptr) {} ~CopyActor() override = default; void Init() override; // The memory related operation interface. void SendMemoryAllocReq(OpContext<DeviceTensor> *const context) override; void SendMemoryFreeReq(OpContext<DeviceTensor> *const context) override; // The copy processing after memory alloc finished. void OnMemoryAllocFinish(OpContext<DeviceTensor> *const context) override; const DeviceTensorPtr &output() const { return output_; } protected: void Run(OpContext<DeviceTensor> *const context) override; void UpdateOutputData(OpData<DeviceTensor> *const output_data, const DataArrow *data_arrow, OpContext<DeviceTensor> *const context) override; private: friend class GraphScheduler; // Fetch the device tensor for copy. void FetchDeviceTensor(OpContext<DeviceTensor> *const context); // The input device tensor is saved from the input data or fetched by device_tensor_store_keys_. std::vector<DeviceTensor *> input_device_tensor_; // The output device tensor is saved from the output or fetched by device_tensor_store_keys_. std::vector<DeviceTensor *> output_device_tensor_; // The output is created in the copy actor build, so can't be the raw pointer. DeviceTensorPtr output_; }; using CopyActorPtr = std::shared_ptr<CopyActor>; } // namespace runtime } // namespace mindspore #endif // MINDSPORE_CCSRC_RUNTIME_FRAMEWORK_ACTOR_COPY_ACTOR_H_
class UsersEstateIdNonUniqueIndex < ActiveRecord::Migration[4.2] def change remove_index :users, :estate_id add_index :users, :estate_id end end
<reponame>hgySandy/springboot-flowable package com.home.demo.bean; import lombok.Data; @Data public class Msg { private String title; private String content; private String etraInfo; public Msg(String title, String content, String etraInfo) { super(); this.title = title; this.content = content; this.etraInfo = etraInfo; } }
#!/bin/bash # MySQL Exporter Tool # > vadb shortcut # # Package: net.ikigai.ops.mysql.tools # Author: Narcis M PAP, Aug 2018 /bin/bash /tools/selection.sh
#!/usr/bin/env bash # ---------------------------------------------------------------------------------------------------------------------- # The MIT License (MIT) # # Copyright (c) 2018-2019 Ralph-Gordon Paul. All rights reserved. # # Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated # documentation files (the "Software"), to deal in the Software without restriction, including without limitation the # rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit # persons to whom the Software is furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all copies or substantial portions of the # Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE # WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR # COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR # OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. # ---------------------------------------------------------------------------------------------------------------------- set -e declare ARCH=$1 declare BUILD_TYPE=$2 if [ "${GITHUB_OS_NAME}" == "linux" ]; then source ~/.profile fi #======================================================================================================================= # create package for architecture and build type conan create . ${CONAN_PACKAGE_NAME}/${LIBRARY_VERSION}@${CONAN_USER}/${CONAN_CHANNEL} -s os=Linux -s arch=$ARCH \ -s build_type=$BUILD_TYPE -o shared=False #======================================================================================================================= # create zip file from package contents declare BUILD_TYPE_LOWER="$(echo ${BUILD_TYPE} | tr '[:upper:]' '[:lower:]')" declare ZIP_FILENAME="godot-cpp-${LIBRARY_VERSION}-linux-${ARCH}-${BUILD_TYPE_LOWER}.zip" mkdir deps || true mkdir output || true conan install .github/conan ${CONAN_PACKAGE_NAME}/${LIBRARY_VERSION}@${CONAN_USER}/${CONAN_CHANNEL} -s os=Linux \ -s arch=$ARCH -s build_type=$BUILD_TYPE cd deps zip -r "../output/${ZIP_FILENAME}" *
#!/bin/bash #SBATCH --account=def-dkulic #SBATCH --mem=8000M # memory per node #SBATCH --time=24:00:00 # time (DD-HH:MM) #SBATCH --output=/project/6001934/lingheng/Double_DDPG_Job_output/continuous_MountainCarContinuous-v0_doule_ddpg_softcopy_epsilon_greedy_seed4_run3_%N-%j.out # %N for node name, %j for jobID module load qt/5.9.6 python/3.6.3 nixpkgs/16.09 gcc/7.3.0 boost/1.68.0 cuda cudnn source ~/tf_cpu/bin/activate python ./ddpg_discrete_action.py --env MountainCarContinuous-v0 --random-seed 4 --exploration-strategy epsilon_greedy --summary-dir ../Double_DDPG_Results_no_monitor/continuous/MountainCarContinuous-v0/doule_ddpg_softcopy_epsilon_greedy_seed4_run3 --continuous-act-space-flag
#!/bin/sh # (c) Copyright 2009 - 2010 Xilinx, Inc. All rights reserved. # # This file contains confidential and proprietary information # of Xilinx, Inc. and is protected under U.S. and # international copyright and other intellectual property # laws. # # DISCLAIMER # This disclaimer is not a license and does not grant any # rights to the materials distributed herewith. Except as # otherwise provided in a valid license issued to you by # Xilinx, and to the maximum extent permitted by applicable # law: (1) THESE MATERIALS ARE MADE AVAILABLE "AS IS" AND # WITH ALL FAULTS, AND XILINX HEREBY DISCLAIMS ALL WARRANTIES # AND CONDITIONS, EXPRESS, IMPLIED, OR STATUTORY, INCLUDING # BUT NOT LIMITED TO WARRANTIES OF MERCHANTABILITY, NON- # INFRINGEMENT, OR FITNESS FOR ANY PARTICULAR PURPOSE; and # (2) Xilinx shall not be liable (whether in contract or tort, # including negligence, or under any other theory of # liability) for any loss or damage of any kind or nature # related to, arising under or in connection with these # materials, including for any direct, or any indirect, # special, incidental, or consequential loss or damage # (including loss of data, profits, goodwill, or any type of # loss or damage suffered as a result of any action brought # by a third party) even if such damage or loss was # reasonably foreseeable or Xilinx had been advised of the # possibility of the same. # # CRITICAL APPLICATIONS # Xilinx products are not designed or intended to be fail- # safe, or for use in any application requiring fail-safe # performance, such as life-support or safety devices or # systems, Class III medical devices, nuclear facilities, # applications related to the deployment of airbags, or any # other applications that could lead to death, personal # injury, or severe property or environmental damage # (individually and collectively, "Critical # Applications"). Customer assumes the sole risk and # liability of any use of Xilinx products in Critical # Applications, subject only to applicable laws and # regulations governing limitations on product liability. # # THIS COPYRIGHT NOTICE AND DISCLAIMER MUST BE RETAINED AS # PART OF THIS FILE AT ALL TIMES. #----------------------------------------------------------------------------- # Script to synthesize and implement the Coregen FIFO Generator #----------------------------------------------------------------------------- rm -rf results mkdir results cd results cp ../../../game_over.ngc . planAhead -mode batch -source ../planAhead_ise.tcl
#!/bin/bash source $HOME/qa.sh cd $BLDWRAP_TOP/gpMgmt make -f Makefile.behave behave $1 TAR=tar 2>&1
<reponame>smagill/opensphere-desktop<gh_stars>10-100 package com.bitsys.common.http.auth; import org.apache.http.auth.AuthScope; import org.apache.http.auth.Credentials; import org.apache.http.client.CredentialsProvider; /** * Implementations of this interface have the ability to specifically clear * entries from their credential's store. */ public interface ClearableCredentialsProvider extends CredentialsProvider { /** * Clears credentials that exactly match the given authentication scope. * * @param authScope the authentication scope. */ void clearCredentials(AuthScope authScope); /** * Sets the {@link Credentials credentials} for the given authentication * scope. Any previous credentials for the given scope will be overwritten. * If the credentials are <code>null</code>, the credentials will be removed * from this provider. * * @see org.apache.http.client.CredentialsProvider#setCredentials(org.apache.http.auth.AuthScope, * org.apache.http.auth.Credentials) */ @Override void setCredentials(AuthScope authScope, Credentials credentials); }
<reponame>SynthSys/BioDare2-BACK<gh_stars>0 /* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package ed.biodare2.backend.features.tsdata.dataimport; import ed.biodare2.backend.web.rest.HandlingException; import java.io.IOException; import java.nio.file.Path; import java.util.ArrayList; import java.util.List; import java.util.stream.Collectors; import java.util.stream.Stream; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * * @author tzielins */ public abstract class TableSimplifier { static final int DEF_NR_LENGTH = 8; static final int DEF_TOKEN_LENGTH = 10; static final int DEF_MAX_LENGTH = 30; static final String NON_BREAKIN_MINUS = Character.toString((char)8209); final Logger log = LoggerFactory.getLogger(this.getClass()); public List<List<String>> simplify(Path file,int rows) throws IOException, HandlingException { return this.simplify(file, rows, DEF_TOKEN_LENGTH,DEF_MAX_LENGTH); } public List<List<String>> simplify(Path file,int rows,int tokenLength,int maxLength) throws IOException, HandlingException { List<List<String>> table = readTable(file, rows); table = simplifyTable(table,tokenLength,maxLength); table = padTable(table); return table; } protected abstract List<List<String>> readTable(Path file,int rows) throws IOException, HandlingException; protected List<List<String>> simplifyTable(List<List<String>> table,int tokenLength,int maxLength) { return table.stream().map( row -> simplifyRow(row,tokenLength,maxLength)).collect(Collectors.toList()); } protected List<String> simplifyRow(List<String> org,int tokenLength,int maxLength) { return org.parallelStream().map( s -> simplify(s,tokenLength,maxLength)).collect(Collectors.toList()); } protected String simplify(String str,int tokenLength,int maxLength) { if (str == null) return ""; str = str.trim(); try { double v = Double.parseDouble(str); return simplifyNumber(v); } catch (NumberFormatException e) { return simplifyStr(str,tokenLength,maxLength); } } protected String simplifyStr(String str,int tokenLength,int maxLength) { str = tokenize(str,tokenLength,maxLength); if (str.length() > maxLength) { str = str.substring(0,maxLength-2)+".."; } return str; } protected String tokenize(String str,int tokenLength,int maxLength) { if (str.length() <= tokenLength) return str; if (str.length() > maxLength+1) str = str.substring(0,maxLength+1); return Stream.of(str.split("\\s")) .flatMap( token -> { if (token.length() <= tokenLength) return Stream.of(token); return Stream.of(token.substring(0,tokenLength),token.substring(tokenLength)); }) .collect(Collectors.joining(" ")); } protected String simplifyNumber(double val) { if (isInteger(val)) { String str = Long.toString(Math.round(val)); if (str.length() <= DEF_NR_LENGTH) return str; } val = simplifingRounding(val); String str = Double.toString(val); if (str.length() <= DEF_NR_LENGTH) return str; return toScfString(val); } protected final boolean isInteger(double val) { return Math.rint(val) == val; } protected double simplifingRounding(double val) { double abs = Math.abs(val); if (abs < 1) return val; if (abs < 10000) { return Math.rint(val*100)/100.0; }; return Math.rint(val); } protected String toScfString(double val) { if (val == 0) return "0"; double abs = Math.abs(val); int exp = (int)Math.floor(Math.log10(abs)); double pref = Math.rint(val*100/Math.pow(10, exp))/100; String str = Double.toString(pref)+"E"+(exp < 0 ? NON_BREAKIN_MINUS: "")+Math.abs(exp); /* if (exp < 0) { str = str.replace("-", NON_BREAKIN_MINUS); System.out.println(str); }*/ return str; } protected List<List<String>> padTable(List<List<String>> table) { int width = table.stream().mapToInt( l -> l.size()).max().getAsInt(); List<List<String>> res = new ArrayList<>(table.size()); table.forEach( org -> { List<String> row = new ArrayList<>(org); while(row.size() < width) row.add(""); res.add(row); }); return res; } }
#!/bin/bash # Copyright (c) 2021 SUSE LLC # # This program is free software; you can redistribute it and/or # modify it under the terms of version 3 of the GNU General Public License as # published by the Free Software Foundation. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, contact SUSE LLC. # # To contact SUSE about this file by physical or electronic mail, # you may find current contact information at www.suse.com MYNAME=$0 CURRENTDIR=$(dirname $(readlink -e $MYNAME)) # get the source code directory SRCDIR=$(dirname $CURRENTDIR) # check to see if we are running inside the python virtual environment if [[ "$VIRTUAL_ENV" == "" ]] ; then echo echo "ERROR: python virtualenv not detected. Please run $CURRENTDIR/create_venv.sh to create and activate the python venv first." echo exit 1 fi export POSTGRES_USER=snotty export POSTGRES_PASSWORD=MasterSlobs export POSTGRES_DB=postgres export POSTGRES_HOST=127.0.0.1 # NOTE(gyee): see https://www.postgresql.org/docs/11/libpq-connect.html#LIBPQ-CONNECT-SSLMODE #export POSTGRES_SSL_MODE=require #export POSTGRES_SSL_ROOT_CERTIFICATE=/var/task/rds-combined-ca-bundle.pem export FLASK_ENV=development env FLASK_APP=$SRCDIR/pint_server/app.py flask run -h 0.0.0.0
#!/usr/bin/env bash # This file is part of Plista Chimney. # # (c) plista GmbH # # For the full copyright and license information, please view the LICENSE # file that was distributed with this source code. # ================== # About this script: # ================== # This script is created to be used in the Debian-like releasing project: # - debian/changelog file has a new version entry (Plista Chimney can do this job); # - this change in debian/changelog is commited and pushed to origin/master; # - an automation server (e.g. Jenkins) triggers a job, that checks the debian/changelog file and creates; # a new version tag if there are new entries and pushes it; # function die() { printf "\nError: $1\n\n" exit } function info_exe() { echo "\$ $@"; } function exe() { info_exe $@ $@ } for INPUT_PARAM in "$@" do case $INPUT_PARAM in --package=*) PACKAGE="${INPUT_PARAM#*=}" shift ;; --version=*) VERSION="${INPUT_PARAM#*=}" shift ;; --changelog=*) CHANGELOG_FILE="${INPUT_PARAM#*=}" shift ;; esac done if [[ ! $PACKAGE ]]; then die "\"--package\" parameter missed" exit fi; if [[ ! $VERSION ]]; then die "\"--version\" parameter missed" exit fi; if [[ ! $CHANGELOG_FILE ]]; then die "\"--changelog\" parameter missed" exit fi; exe git checkout next exe git pull exe git commit -m "${PACKAGE} (${VERSION})" -- "${CHANGELOG_FILE}" exe git push exe git checkout master exe git pull exe git merge next exe git push exe git checkout next
import {LiquidityRemoveReq} from '../cells/liquidityRemoveReq' import {Sudt} from '../cells/sudt' import {Ckb} from '../cells/ckb' import {Transformation} from './interfaces/transformation' /* info_in_cell info_out_cell pool_in_cell pool_out_cell -------> matcher_in_cell(ckb) matcher_out_cell(ckb) [removed_liquidity_cell] [sudt_cell + ckb_cell] <--- this is Transformation [add_liquidity_cell] [liquidity_cell + (sudt_cell或者ckb_cell)] */ export class LiquidityRemoveTransformation implements Transformation { // after process, this is the data for sudt_cell + ckb_cell // total sudt to return sudtAmount: bigint // total ckb to return, sudt cell capacity + ckb cell capacity capacityAmount: bigint request: LiquidityRemoveReq processed: boolean skip: boolean output_sudt?: Sudt output_ckb?: Ckb constructor(request: LiquidityRemoveReq) { this.request = request this.sudtAmount = 0n this.capacityAmount = 0n this.processed = false this.skip = false } public minCapacity(): bigint { return Sudt.calcMinCapacity(this.request.originalUserLock) + Ckb.calcMinCapacity(this.request.originalUserLock) } process(): void { if (!this.processed) { this.output_sudt = Sudt.from(this.sudtAmount, this.request.originalUserLock) this.output_ckb = Ckb.from( this.capacityAmount - Sudt.calcMinCapacity(this.request.originalUserLock), this.request.originalUserLock, ) } this.processed = true } toCellInput(): CKBComponents.CellInput { return this.request.toCellInput() } toCellOutput(): Array<CKBComponents.CellOutput> { this.process() return [this.output_sudt!.toCellOutput(), this.output_ckb!.toCellOutput()] } toCellOutputData(): Array<string> { this.process() return [this.output_sudt!.toCellOutputData(), this.output_ckb!.toCellOutputData()] } }
#!/bin/bash echo -e '\u001b[31m \nWARNING! Access to this device is restricted to those individuals with specific Permissions. If you are not an authorized user, disconnect now. Any attempts to gain unauthorized access will be prosecuted to the fullest extent of the law.\n\u001b[0m' | tee -a /etc/issue /etc/issue.net >> /dev/null echo -e "\u001b[31m \nThis computer system is for authorized users only. Individuals using this system without authority or in excess of their authority are subject to having all their activities on this system monitored and recorded or examined by any authorized person, including law enforcement, as system personnel deem appropriate. In the course of monitoring individuals improperly using the system or in the course of system maintenance, the activities of authorized users may also be monitored and recorded. Any material so recorded may be disclosed as appropriate. Anyone using this system consents to these terms.\n\u001b[0m" >> /etc/motd echo "Banner /etc/issue.net" >> /etc/ssh/sshd_config
const http = require('http'); const hostname = '127.0.0.1'; const port = 3000; const server = http.createServer((req, res) => { let path = req.url.toLowerCase(); switch(path) { // Home page case '/': res.statusCode = 200; res.setHeader('Content-Type', 'text/html'); res.end(` <html> <head> <title>Home Page</title> </head> <body> <h1>Welcome to My Website</h1> </body> </html> `); break; // About page case '/about': res.statusCode = 200; res.setHeader('Content-Type', 'text/html'); res.end(` <html> <head> <title>About Page</title> </head> <body> <h1>About Me</h1> </body> </html> `); break; // 404 Not Found default: res.statusCode = 404; res.setHeader('Content-Type', 'text/plain'); res.end('404 Not Found'); break; } }); server.listen(port, hostname, () => { console.log(`Server running at http://${hostname}:${port}/`); });
nvm use 8 npm i npm run build-lib npm publish ./dist/common-consumption
"use strict"; Object.defineProperty(exports, "__esModule", { value: true }); exports.smallRight = void 0; var smallRight = { "viewBox": "0 0 20 20", "children": [{ "name": "path", "attribs": { "d": "M11,10L7.859,6.58c-0.268-0.27-0.268-0.707,0-0.978c0.268-0.27,0.701-0.27,0.969,0l3.83,3.908\r\n\tc0.268,0.271,0.268,0.709,0,0.979l-3.83,3.908c-0.267,0.272-0.701,0.27-0.969,0c-0.268-0.269-0.268-0.707,0-0.978L11,10z" } }] }; exports.smallRight = smallRight;
import tkinter as tk from tkinter import messagebox # Create the main application window root = tk.Tk() root.title("Text Display App") # Create a label lblText = tk.Label(root, text="Enter text:") lblText.pack() # Create a text entry field txtText = tk.Entry(root) txtText.pack() # Function to update the label and display a message box def btnOK_Clicked(): text = txtText.get() # Get the text from the entry field lblText.configure(text=text) # Update the label with the entered text messagebox.showinfo("Tkinter - example", text) # Display the entered text in a message box # Create a button btnOK = tk.Button(root, text="OK", command=btnOK_Clicked) btnOK.pack() # Run the application root.mainloop()
<reponame>HMarzban/ep_headings2 'use strict'; const eejs = require('ep_etherpad-lite/node/eejs/'); const Changeset = require('ep_etherpad-lite/static/js/Changeset'); exports.eejsBlock_editbarMenuLeft = (hookName, args, cb) => { args.content += eejs.require('ep_headings2/templates/editbarButtons.ejs'); return cb(); }; // Include CSS for HTML export exports.stylesForExport = () => ( // These should be consistent with client CSS. 'h1{font-size: 2.5em;}\n' + 'h2{font-size: 1.8em;}\n' + 'h3{font-size: 1.5em;}\n' + 'h4{font-size: 1.2em;}\n' + 'code{font-family: RobotoMono;}\n'); const _analyzeLine = (alineAttrs, apool) => { let header = null; if (alineAttrs) { const opIter = Changeset.opIterator(alineAttrs); if (opIter.hasNext()) { const op = opIter.next(); header = Changeset.opAttributeValue(op, 'heading', apool); } } return header; }; // line, apool,attribLine,text exports.getLineHTMLForExport = async (hookName, context) => { const header = _analyzeLine(context.attribLine, context.apool); if (header) { if (context.text.indexOf('*') === 0) { context.lineContent = context.lineContent.replace('*', ''); } const paragraph = context.lineContent.match(/<p([^>]+)?>/); if (paragraph) { context.lineContent = context.lineContent.replace('<p', `<${header} `); context.lineContent = context.lineContent.replace('</p>', `</${header}>`); } else { context.lineContent = `<${header}>${context.lineContent}</${header}>`; } return context.lineContent; } };
declare class HeapHandler { private expiration; private heapRootPath; private heap; private destructionTimeouts; private scope; constructor(expiration: number, heapRootPath?: string); hasInstance(id: string): boolean; getFromHeap(id: string): any; insertIntoHeap(id: string, content: any): boolean; markActivity(id: string): boolean; listColdStorage(): Array<string> | boolean; addToScope(theClass: any): boolean; } export interface HeapItem { saveState(): object; loadState(data: object): boolean; beforeDelete?(): boolean; } export default HeapHandler;
<gh_stars>0 const ava = require('ava') ava("Dummy test", t => { t.pass() })
package com.symulakr.dinstar.smsserver.handlers; import com.symulakr.dinstar.smsserver.message.Message; import com.symulakr.dinstar.smsserver.message.body.ResponseBody; import com.symulakr.dinstar.smsserver.message.body.SimpleResponseBody; public abstract class SimpleResponseHandler extends Handler { @Override protected ResponseBody createBody(Message message) { return new SimpleResponseBody(isOk(message)); } protected abstract boolean isOk(Message message); }
import { filter, map } from 'rxjs/operators'; import * as nanoid from 'nanoid/non-secure'; import { Subject } from 'rxjs'; const $subject = new Subject(); export const subscribeOnMessages = message => { $subject.next(message); }; type TMessagePromise = (value: any) => Promise<any>; export const messagePromise: TMessagePromise = value => { const id = nanoid(); const $responseMessage = $subject.pipe( filter((message: any) => message.id === id), map((message: any) => message.value) ); figma.ui.postMessage({ value, id }); return new Promise(resolve => { $responseMessage.subscribe(resolve); }); };
/* * $Id: rpsl_commands.c,v 1.10 2002/10/17 20:02:31 ljb Exp $ * originally Id: rpsl_commands.c,v 1.22 1998/07/31 15:42:39 gerald Exp */ #include <sys/types.h> #include <netinet/in.h> #include <arpa/inet.h> #include <stdio.h> #include <string.h> #include <time.h> #include <signal.h> #include <ctype.h> #include <fcntl.h> #include <glib.h> #include "mrt.h" #include "trace.h" #include "config_file.h" #include "irrd.h" /* a list of prefix range types */ enum EXPAND_TYPE { NO_EXPAND, ROUTE_SET_EXPAND, OTHER_EXPAND }; typedef struct _member_examined_hash_t { char *key; } member_examined_hash_t; /* local routines */ int str_p_cmp(gconstpointer, gconstpointer); void update_members_list (irr_database_t *database, char *range_op, u_short, enum EXPAND_TYPE expand_flag, GHashTable *hash_member_examined, LINKED_LIST *ll_setlist, LINKED_LIST *ll_set_names, GQueue *stack, irr_connection_t *irr); void mbrs_by_ref_set (irr_database_t *database, char *range_op, u_short, enum EXPAND_TYPE expand_flag, LINKED_LIST *ll_setlist, char *set_name, LINKED_LIST *ll_mbr_by_ref, irr_connection_t *irr); char *rpsl_macro_expand_add (char *range, char *name, irr_connection_t *irr, char *dbname); void SL_Add (LINKED_LIST *ll_setlist, char *member, char *range_op, u_short, enum EXPAND_TYPE expand_flag, irr_connection_t *irr); int chk_set_name (char *); void HashMemberExaminedDestroy(member_examined_hash_t *h) { if (h == NULL) return; if (h->key) irrd_free(h->key); irrd_free(h); } /* as-set/route-set expansion !ias-bar */ void irr_set_expand (irr_connection_t *irr, char *name) { irr_database_t *database; time_t start_time; GArray *array; GQueue *stack; GHashTable *hash_member_examined; member_examined_hash_t *member_examined_ptr; LINKED_LIST *ll_setlist; char *set_name, *last_set_name, *mstr, *db; char *range_op, abuf[BUFSIZE]; char *lasts = NULL; int i, first, dup, expand_flag = NO_EXPAND; hash_spec_t *hash_spec; if (strchr(name, ',') != NULL) { strtok_r(name, ",", &lasts); /* check if we are expanding a route-set */ if ( (set_name = strchr(name, ':')) != NULL) set_name++; else set_name = name; if (!strncasecmp (set_name, "rs-", 3)) expand_flag = ROUTE_SET_EXPAND; else expand_flag = OTHER_EXPAND; } start_time = time(NULL); convert_toupper (name); stack = g_queue_new(); hash_member_examined = g_hash_table_new_full(g_str_hash, g_str_equal, NULL, (GDestroyNotify)HashMemberExaminedDestroy); ll_setlist = LL_Create (LL_DestroyFunction, free, NULL); mstr = rpsl_macro_expand_add (" ", name, irr, NULL); g_queue_push_head(stack, mstr); member_examined_ptr = irrd_malloc(sizeof(member_examined_hash_t)); member_examined_ptr->key = strdup(name); g_hash_table_insert(hash_member_examined, member_examined_ptr->key, member_examined_ptr); while (!g_queue_is_empty(stack)) { if ( IRR.expansion_timeout > 0 ) { if ( (time(NULL) - start_time) > IRR.expansion_timeout ) { trace (ERROR, default_trace, "irr_set_expand(): Set expansion timeout\n"); sprintf(abuf, "Expansion maximum CPU time exceeded: %d seconds", IRR.expansion_timeout); irr_send_error(irr, abuf); goto getout; } } mstr = (char *) g_queue_pop_head(stack); /* might want to check the examined list to see if this set name has been examined already */ first = 1; lasts = NULL; range_op = strtok_r (mstr, ",", &lasts); if (!strcmp (range_op, " ")) range_op = NULL; set_name = strtok_r (NULL, ",", &lasts); irr_lock_all(irr); /* lock db's while searching */ while ((db = strtok_r (NULL, ",", &lasts)) != NULL) { if ((database = find_database (db)) == NULL) { trace (ERROR, default_trace, "irr_set_expand(): Database not found %s\n", db); sprintf(abuf, "Database not found: %s", db); irr_send_error(irr, abuf); goto getout; } make_setobj_key (abuf, set_name); if ((hash_spec = fetch_hash_spec (database, abuf, UNPACK)) != NULL) { first = 0; update_members_list (database, range_op, AF_INET, expand_flag, hash_member_examined, ll_setlist, hash_spec->ll_1, stack, irr); mbrs_by_ref_set (database, range_op, AF_INET, expand_flag, ll_setlist, set_name, hash_spec->ll_2, irr); Delete_hash_spec (hash_spec); } if (first == 0) break; } irr_unlock_all(irr); free (mstr); } first = 1; dup = 0; i = 0; last_set_name = ""; array = g_array_sized_new(FALSE, TRUE, sizeof(char*), ll_setlist->count); LL_ContIterate (ll_setlist, set_name) { g_array_append_val(array, set_name); } g_array_sort(array, (GCompareFunc)str_p_cmp); i = 0; while (i < ll_setlist->count) { set_name = g_array_index(array, char*, i++); if (!first) { /* since list is sorted, any duplicates should be consecutive */ if (strcmp (last_set_name, set_name) == 0) dup = 1; else /* add a space before each item */ irr_add_answer (irr, " "); } if (!dup) { /* only print this if not a duplicate */ irr_add_answer (irr, "%s", set_name); last_set_name = set_name; } else dup = 0; first = 0; } g_array_free(array, TRUE); irr_send_answer (irr); getout: LL_Destroy (ll_setlist); g_hash_table_destroy(hash_member_examined); g_queue_free(stack); } /* * Wrapper around strcmp for g_array_sort. * Takes two gconstpointers to strings (char pointers) and compares them */ int str_p_cmp(gconstpointer g_aa, gconstpointer g_bb) { char* aa = *((char**)g_aa); char* bb = *((char**)g_bb); return strcmp(aa, bb); } /* as-set/route-set expansion !i6as-bar */ void irr_set_expand6(irr_connection_t *irr, char *name) { irr_database_t *database; time_t start_time; GArray *array; GQueue *stack; GHashTable *hash_member_examined; member_examined_hash_t *member_examined_ptr; LINKED_LIST *ll_setlist; char *set_name, *last_set_name, *mstr, *db; char *range_op, abuf[BUFSIZE]; char *lasts = NULL; int i, first, dup, expand_flag = NO_EXPAND; hash_spec_t *hash_spec; if (strchr(name, ',') != NULL) { strtok_r(name, ",", &lasts); /* check if we are expanding a route-set */ if ((set_name = strchr(name, ':')) != NULL) set_name++; else set_name = name; if (!strncasecmp(set_name, "rs-", 3)) expand_flag = ROUTE_SET_EXPAND; else expand_flag = OTHER_EXPAND; } start_time = time(NULL); convert_toupper(name); stack = g_queue_new(); hash_member_examined = g_hash_table_new_full(g_str_hash, g_str_equal, NULL, (GDestroyNotify)HashMemberExaminedDestroy); ll_setlist = LL_Create(LL_DestroyFunction, free, NULL); mstr = rpsl_macro_expand_add(" ", name, irr, NULL); g_queue_push_head(stack, mstr); member_examined_ptr = irrd_malloc(sizeof(member_examined_hash_t)); member_examined_ptr->key = strdup(name); g_hash_table_insert(hash_member_examined, member_examined_ptr->key, member_examined_ptr); while (!g_queue_is_empty(stack)) { if (IRR.expansion_timeout > 0) { if ((time(NULL) - start_time) > IRR.expansion_timeout) { trace(ERROR, default_trace, "irr_set_expand6(): Set expansion timeout\n"); sprintf(abuf, "Expansion maximum CPU time exceeded: %d seconds", IRR.expansion_timeout); irr_send_error(irr, abuf); goto getout; } } mstr = (char *) g_queue_pop_head(stack); /* might want to check the examined list to see if this set name has been examined already */ first = 1; lasts = NULL; range_op = strtok_r(mstr, ",", &lasts); if (!strcmp(range_op, " ")) range_op = NULL; set_name = strtok_r(NULL, ",", &lasts); irr_lock_all(irr); /* lock db's while searching */ while ((db = strtok_r(NULL, ",", &lasts)) != NULL) { if ((database = find_database(db)) == NULL) { trace(ERROR, default_trace, "irr_set_expand6(): Database not found %s\n", db); sprintf(abuf, "Database not found: %s", db); irr_send_error(irr, abuf); goto getout; } make_setobj_key(abuf, set_name); if ((hash_spec = fetch_hash_spec (database, abuf, UNPACK)) != NULL) { first = 0; update_members_list(database, range_op, AF_INET6, expand_flag, hash_member_examined, ll_setlist, hash_spec->ll_1, stack, irr); mbrs_by_ref_set(database, range_op, AF_INET6, expand_flag, ll_setlist, set_name, hash_spec->ll_2, irr); Delete_hash_spec (hash_spec); } if (first == 0) break; } irr_unlock_all(irr); free (mstr); } first = 1; dup = 0; i = 0; last_set_name = ""; array = g_array_sized_new(FALSE, TRUE, sizeof(char*), ll_setlist->count); LL_ContIterate(ll_setlist, set_name) { g_array_append_val(array, set_name); } g_array_sort(array, (GCompareFunc)str_p_cmp); i = 0; while (i < ll_setlist->count) { set_name = g_array_index(array, char*, i++); if (!first) { /* since list is sorted, any duplicates should be consecutive */ if (strcmp(last_set_name, set_name) == 0) dup = 1; else /* add a space before each item */ irr_add_answer(irr, " "); } if (!dup) { /* only print this if not a duplicate */ irr_add_answer(irr, "%s", set_name); last_set_name = set_name; } else dup = 0; first = 0; } g_array_free(array, TRUE); irr_send_answer(irr); getout: LL_Destroy(ll_setlist); g_hash_table_destroy(hash_member_examined); g_queue_free(stack); } void mbrs_by_ref_set (irr_database_t *database, char *range_op, u_short afi, enum EXPAND_TYPE expand_flag, LINKED_LIST *ll_setlist, char *set_name, LINKED_LIST *ll_mbr_by_ref, irr_connection_t *irr) { char *member, *maint, key[BUFSIZE]; hash_spec_t *hash_spec; if (ll_mbr_by_ref == NULL) return; if (expand_flag == NO_EXPAND) { LL_ContIterate (ll_mbr_by_ref, member) { LL_Add(ll_setlist, strdup (member)); } return; } /* Find all the route or autnum's which reference the set name * (via 'members-of:'). */ LL_ContIterate (ll_mbr_by_ref, maint) { make_spec_key (key, maint, set_name); if ((hash_spec = fetch_hash_spec (database, key, UNPACK)) != NULL) { LL_ContIterate (hash_spec->ll_1, member) { SL_Add (ll_setlist, member, range_op, afi, expand_flag, irr); } Delete_hash_spec (hash_spec); } } } void update_members_list (irr_database_t *database, char *range_op, u_short afi, enum EXPAND_TYPE expand_flag, GHashTable *hash_member_examined, LINKED_LIST *ll_setlist, LINKED_LIST *ll_set_names, GQueue *stack, irr_connection_t *irr) { char *member, *p, *r; char buffer[BUFSIZE], range_buf[512]; int len = 0; member_examined_hash_t *member_examined_ptr; if (ll_set_names == NULL) return; LL_ContIterate (ll_set_names, member) { convert_toupper(member); /* #FIXME The following logic block is a inscrutable and needs documentation */ if ((expand_flag == NO_EXPAND) || !chk_set_name (member)) { SL_Add (ll_setlist, member, range_op, 0, expand_flag, irr); } else { /* we have a set name */ if (!g_hash_table_lookup(hash_member_examined, member)) { strcpy(range_buf, " "); /* initialize to empty range */ r = member; /* Need to seperate the range op from the set name for rpsl_macro_expand_add() */ if ((p = strchr (member, '^')) != NULL) { strncpy (buffer, member, p - member); buffer[p - member] = '\0'; len = strlen(p); if (len < 512) /* don't overflow buffer */ strcpy(range_buf,p); r = buffer; /* this is the set name without the range op */ } if (range_op != NULL) { /* append existing range_op */ if ( (len + strlen(range_op)) < 512 ) { if (p == NULL) range_buf[0] = '\0'; strcat(range_buf, range_op); } } p = rpsl_macro_expand_add (range_buf, r, irr, database->name); g_queue_push_head(stack, p); member_examined_ptr = irrd_malloc(sizeof(member_examined_hash_t)); member_examined_ptr->key = strdup(r); g_hash_table_insert(hash_member_examined, member_examined_ptr->key, member_examined_ptr); } } } } /* void update_members_list() */ /* a list of prefix range types */ enum PREFIX_RANGE_TYPE { INVALID_RANGE, EXCLUSIVE_RANGE, INCLUSIVE_RANGE, VALUE_RANGE }; enum PREFIX_RANGE_TYPE prefix_range_parse( char *range, unsigned int *start, unsigned int *end ) { char *p; p = range; if (*p == '+') return INCLUSIVE_RANGE; if (*p == '-') return EXCLUSIVE_RANGE; if (strchr(p, '-') != NULL) { if (sscanf(p, "%u-%u", start, end) != 2) return INVALID_RANGE; else return VALUE_RANGE; } if (sscanf(p, "%u", start) != 1) return INVALID_RANGE; else { *end = *start; return VALUE_RANGE; } } /* * Sorted and unique linked list add. *p is the potential item to be added. * Also appends a range operator to *p. * */ void SL_Add (LINKED_LIST *ll_setlist, char *member, char *range_op, u_short afi, enum EXPAND_TYPE expand_flag, irr_connection_t *irr) { char buffer[BUFSIZE], buf2[BUFSIZE], rangestr[32]; char *q, *temp_ptr, *range_ptr; char *last = NULL; unsigned int biggest_range; hash_spec_t *hash_spec; irr_database_t *db; unsigned int bitlen; enum PREFIX_RANGE_TYPE range_op_type, prefix_range_type; unsigned int range_op_start, range_op_end, prefix_range_start, prefix_range_end; /* if performing a route-set expansion, check for AS numbers and lookup * route prefixes which list the AS as their origin */ if ( expand_flag == ROUTE_SET_EXPAND && !strncasecmp(member, "AS", 2)) { make_gas_key(buffer, member + 2); make_6as_key(buf2, member + 2); LL_ContIterate (irr->ll_database, db) { /* search over all databases */ /* first check for IPv4 prefixes */ if ((afi == AF_INET) && (hash_spec = fetch_hash_spec(db, buffer, FAST)) != NULL) { if (hash_spec->len1 > 0) { q = strdup(hash_spec->gas_answer); temp_ptr = strtok_r(q, " ", &last); while (temp_ptr != NULL && *temp_ptr != '\0') { SL_Add(ll_setlist, temp_ptr, range_op, afi, expand_flag, irr); temp_ptr = strtok_r(NULL, " ", &last); } free(q); Delete_hash_spec(hash_spec); } } /* now check for IPv6 prefixes */ if ((afi == AF_INET6) && (hash_spec = fetch_hash_spec(db, buf2, FAST)) != NULL) { if (hash_spec->len1 > 0) { q = strdup(hash_spec->gas_answer); temp_ptr = strtok_r(q, " ", &last); while (temp_ptr != NULL && *temp_ptr != '\0') { SL_Add(ll_setlist, temp_ptr, range_op, afi, expand_flag, irr); temp_ptr = strtok_r(NULL, " ", &last); } free(q); Delete_hash_spec(hash_spec); } } } return; } if (afi != 0) { if (afi == AF_INET6) { if (!strchr(member, ':')) return; } else if (strchr(member, ':')) return; } strcpy (buffer, member); if (range_op == NULL) goto add_member; if (*range_op != '^') { /* should start with a '^' */ trace (ERROR, default_trace, "SL_Add(): range_op does not start with a '^' : %s\n", range_op); goto add_member; } range_ptr = strdup(range_op + 1); q = strtok_r(range_ptr, "^", &last); while (q != NULL && *q != '\0') { range_op_type = prefix_range_parse(q, &range_op_start, &range_op_end); if (range_op_type == INVALID_RANGE) { trace (ERROR, default_trace, "SL_Add(): range_op is invalid : %s\n", range_op); free(range_ptr); return; } /* should have a prefix length to be legal */ if ( (temp_ptr = strchr(member, '/')) != NULL ) bitlen = atoi(temp_ptr + 1); else { trace (ERROR, default_trace, "SL_Add(): prefix missing: %s\n", member); free(range_ptr); return; } if ( *buffer >= '0' && *buffer <= '9' && (strchr(buffer, ':') == NULL) ) /* check if IPv4 addr or IPv6 addr */ biggest_range = 32; else biggest_range = 128; if ( (temp_ptr = strchr(buffer, '^')) == NULL ) /* check for range on prefix */ prefix_range_start = (prefix_range_end = bitlen); else { prefix_range_type = prefix_range_parse(temp_ptr + 1, &prefix_range_start, &prefix_range_end); if (prefix_range_type == INVALID_RANGE) { trace (ERROR, default_trace, "SL_Add(): prefix range is invalid : %s\n", buffer); free(range_ptr); return; } *temp_ptr = 0; /* terminate string at range */ if (prefix_range_type == EXCLUSIVE_RANGE) { prefix_range_start = bitlen + 1; prefix_range_end = biggest_range; } else if (prefix_range_type == INCLUSIVE_RANGE) { prefix_range_start = bitlen; prefix_range_end = biggest_range; } } if (range_op_type == INCLUSIVE_RANGE) { range_op_start = prefix_range_start; range_op_end = biggest_range; } else if (range_op_type == EXCLUSIVE_RANGE) { range_op_start = prefix_range_start + 1; range_op_end = biggest_range; } else if (range_op_type == VALUE_RANGE) { if (range_op_end < prefix_range_start) { free(range_ptr); return; /* apply an less specific range to a more specific one */ } if (prefix_range_start > range_op_start) range_op_start = prefix_range_start; } if (range_op_start > range_op_end) { free(range_ptr); return; /* specific range exceeds maximum, toss prefix */ } if ( (bitlen == range_op_start) && (bitlen == range_op_end) ) strcpy(rangestr,""); else if (range_op_end == range_op_start) sprintf(rangestr,"^%d", range_op_start); else if (range_op_end == biggest_range && range_op_start == bitlen) strcpy(rangestr,"^+"); else if (range_op_end == biggest_range && range_op_start == (bitlen + 1)) strcpy(rangestr,"^-"); else sprintf(rangestr,"^%d-%d",range_op_start, range_op_end); strcat(buffer, rangestr); q = strtok_r(NULL, "^", &last); } free(range_ptr); add_member: LL_Add(ll_setlist, strdup (buffer)); } char *rpsl_macro_expand_add (char *range, char *name, irr_connection_t *irr, char *dbname) { irr_database_t *database; char buffer[BUFSIZE]; strcpy (buffer, range); strcat (buffer, ","); strcat (buffer, name); if (dbname != NULL) { strcat (buffer, ","); strcat (buffer, dbname); } LL_ContIterate (irr->ll_database, database) { if (dbname == NULL || strcasecmp (database->name, dbname)) { strcat (buffer, ","); strcat (buffer, database->name); } } return (strdup (buffer)); } int chk_set_name (char *name) { if (name == NULL) /* sanity check ? */ return 0; /* check for an IPv4 or IPv6 prefix */ if (*name >= '0' && *name <= '9') return 0; /* check AS number */ if (!strncasecmp (name, "as", 2) ) { /* Check for ':' as it could be a hierarchical set name */ if ( *(name + 2) != '-' && !strchr(name,':')) return 0; /* Must be an AS number */ } /* assume anything else is a set name */ return 1; }
#!/bin/bash -v ##### INSTALL PKGS sudo pacman --noconfirm --needed -S \ dunst feh flameshot slock acpilight xorg xorg-xinit xorg-xmessage \ xterm xclip xdo i3 picom #### STOW THE PACKAGES cd /data/dotfiles stow dunst -t ~/ stow picom -t ~/ stow x -t ~/ stow i3 -t ~/ stow i3status -t ~/ #### Wallpaper mkdir -p $HOME/.config/wallpapers wget https://i.redd.it/e7wkv05iujg61.png --output-document=$HOME/.config/wallpapers/landscape.png
<reponame>andrefillypesilva/clever-weather<filename>src/app/shared/services/weather.service.ts import { Injectable } from '@angular/core'; import { HttpClient } from '@angular/common/http'; import { Observable } from 'rxjs'; import { pluck } from 'rxjs/operators'; // Interfaces import { Place } from 'src/app/models/interfaces/place.interface'; import { ConsolidatedWeather } from 'src/app/models/interfaces/consolidated-weather.interface'; @Injectable({ providedIn: 'root', }) export class WeatherService { constructor( private readonly http: HttpClient ) {} public getPlace(lat: number, long: number): Observable<Place[]> { return this.http.get<Place[]>(`/api/location/search/?lattlong=${lat},${long}`); } public getPlaceByName(title: string): Observable<Place[]> { return this.http.get<Place[]>(`/api/location/search/?query=${title}`); } public getWeather(woeid: number): Observable<ConsolidatedWeather[]> { return this.http.get<ConsolidatedWeather[]>(`/api/location/${woeid}/`) .pipe( pluck('consolidated_weather') ); } }