text
stringlengths
1
1.05M
#!/bin/bash # $1 - test name # $2 - time mark (in case of periodic logs copying) if [ -z $1 ]; then echo "Test name missing" logs_dir="LOGS/nomane" else if [ -z $2 ]; then logs_dir="LOGS/$1" else logs_dir="LOGS/$1/$2" fi # rm -rf $logs_dir fi echo "Creating log dir in workspace $logs_dir" mkdir -p $logs_dir if [ $? -ne 0 ]; then echo "Error creating log dir" fi echo "log_dir: $logs_dir" echo "maxscale_sshkey: $maxscale_000_keyfile" echo "maxscale_IP: $maxscale_000_network" if [ $maxscale_IP != "127.0.0.1" ] ; then ssh -i ${maxscale_000_keyfile} -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -o LogLevel=quiet ${maxscale_000_whoami}@${maxscale_000_network} "rm -rf logs; mkdir logs; ${maxscale_000_access_sudo} cp ${maxscale_log_dir}/*.log logs/; ${maxscale_000_access_sudo} cp /tmp/core* logs; ${maxscale_000_access_sudo} chmod 777 -R logs" scp -i ${maxscale_000_keyfile} -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -o LogLevel=quiet ${maxscale_000_whoami}@${maxscale_000_network}:logs/* $logs_dir if [ $? -ne 0 ]; then echo "Error copying Maxscale logs" fi scp -i ${maxscale_000_keyfile} -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -o LogLevel=quiet ${maxscale_000_whoami}@${maxscale_000_network}:$maxscale_cnf $logs_dir chmod a+r $logs_dir/* else sudo cp $maxscale_log_dir/*.log $logs_dir sudo cp /tmp/core* $logs_dir sudo cp $maxscale_cnf $logs_dir sudo chmod a+r $logs_dir/* fi if [ -z $logs_publish_dir ] ; then echo "logs are in workspace only" else echo "Logs publish dir is $logs_publish_dir" rsync -a --no-o --no-g LOGS $logs_publish_dir fi for i in `find $logs_dir -name 'core*'` do test -e $i && echo "Test failed: core files generated" && exit 1 done
#!/bin/bash # Copyright (c) 2018 Intel Corporation. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. set -ex MULTICLOUD_PLUGIN_ENDPOINT=http://172.16.77.40:9006/api/multicloud-ocata/v0/openstack-hudson-dc_RegionOne TOKEN=$(curl -v -s -H "Content-Type: application/json" -X POST -d '{ }' $MULTICLOUD_PLUGIN_ENDPOINT/identity/v3/auth/tokens 2>&1 | grep X-Subject-Token | sed "s/\r//g" | sed "s/^.*: //" | cat -v) #curl -v -s -H "Content-Type: application/json" -H "X-Auth-Token: $TOKEN" -X GET $MULTICLOUD_PLUGIN_ENDPOINT/image/v2/images PROJECT_ID=$(curl -v -s -H "Content-Type: application/json" -H "X-Auth-Token: $TOKEN" -X GET $MULTICLOUD_PLUGIN_ENDPOINT/identity/v3/projects 2>/dev/null | python -mjson.tool | grep -B5 "name.*\"admin" | grep '\"id\"' | cut -f4 -d'"') curl -v -s -H "Content-Type: application/json" -H "X-Auth-Token: $TOKEN" -X GET $MULTICLOUD_PLUGIN_ENDPOINT/identity/v3/projects/$PROJECT_ID
<reponame>Boscotiam/client-web-transfer package models; import com.fasterxml.jackson.databind.node.ObjectNode; import play.libs.Json; /** * Created by mac on 18/02/2021. */ public class Connection { private int code; private String message; private int userId; private String name; private String surname; private String consumerId; private String consumerSecret; private String profil; public Connection() { } public int getCode() { return code; } public void setCode(int code) { this.code = code; } public String getMessage() { return message; } public void setMessage(String message) { this.message = message; } public int getUserId() { return userId; } public void setUserId(int userId) { this.userId = userId; } public String getName() { return name; } public void setName(String name) { this.name = name; } public String getSurname() { return surname; } public void setSurname(String surname) { this.surname = surname; } public String getConsumerId() { return consumerId; } public void setConsumerId(String consumerId) { this.consumerId = consumerId; } public String getConsumerSecret() { return consumerSecret; } public void setConsumerSecret(String consumerSecret) { this.consumerSecret = consumerSecret; } public String getProfil() { return profil; } public void setProfil(String profil) { this.profil = profil; } public ObjectNode toObjectNode() { ObjectNode objectNode = Json.newObject(); objectNode.put("code", code); objectNode.put("message", message); objectNode.put("userId", userId); objectNode.put("name", name); objectNode.put("surname", surname); objectNode.put("consumerId", consumerId); objectNode.put("consumerSecret", consumerSecret); objectNode.put("profil", profil); return objectNode; } public String toString(){ return toObjectNode().toString(); } }
<gh_stars>0 import { Provider, SuperfaceClient } from '@superfaceai/one-sdk'; import createDebug from 'debug'; import { GraphQLFieldResolver } from 'graphql'; import { DEBUG_PREFIX } from './constants'; const debug = createDebug(`${DEBUG_PREFIX}:onesdk`); let instance: SuperfaceClient; export interface PerformParams { profile: string; useCase: string; provider?: string; parameters?: Record<string, string>; input: Record<string, any>; } export function createInstance() { return new SuperfaceClient(); } export function getInstance(): SuperfaceClient { if (!instance) { instance = createInstance(); } return instance; } export async function perform(params: PerformParams) { const oneSdk = getInstance(); const profile = await oneSdk.getProfile(params.profile); const useCase = profile.getUseCase(params.useCase); let provider!: Provider; if (params.provider) { provider = await oneSdk.getProvider(params.provider); } return await useCase.perform(params.input, { provider, parameters: params.parameters, }); } export function createResolver( profile: string, useCase: string, ): GraphQLFieldResolver<any, any> { debug(`Creating resolver for ${profile}/${useCase}`); return async function oneSdkResolver(source: any, args: any): Promise<any> { debug(`Performing ${profile}/${useCase}`, { source, args }); try { const result = await perform({ profile, useCase, input: args?.input, provider: args?.options?.provider, parameters: args?.options?.parameters, }); debug('Perform result', result); if (result.isOk()) { return { result: result.value, }; } else { throw result.error; } } catch (err) { debug('Perform exception', err); throw err; } }; }
#pragma once #ifndef INPUT #define INPUT #include <glm/glm.hpp> #include <memory> namespace Graficas { /* * La clase Input provee una interfaz que permite al usuario hacer consultas sobre distintos dispositivos(teclado y mouse). * El motor internamente usa los metodos privados para actualizar cuando corresponda el estado de esta clase. */ class Input { public: /* * Enumerador que representa el estado del cursor. Hidden esconde la representacion visual del cursor al estar dentro * de la ventana de la aplicacion, mientras que Disabled como su nombre lo indica desabilita completamente el cursor. */ enum class CursorType { Hidden, Disabled, Normal }; friend class World; Input(); ~Input(); Input(const Input& input) = delete; Input& operator=(const Input& input) = delete; /* * Retorna verdadero si la tecla representada por keycode esta siendo presionada en este momento. */ bool IsKeyPressed(int keycode) const noexcept; /* * Retorna verdadero si el boton del mouse representado por button esta siendo presionado en este momento. */ bool IsMouseButtonPressed(int button) const noexcept; /* * Retorna un vector de dos dimensiones con la posicion en pixeles del mouse. * La posicion (0,0) corresponde a la esquina superior izquierda. */ glm::dvec2 GetMousePosition() const noexcept; /* * Retorna un vector de dos dimensiones con el offset de la rueda del mouse, usualmente unicamente la coordenada y sera distinta de cero. */ glm::dvec2 GetMouseWheelOffset() const noexcept; /* * Ajusta el tipo de cursor al tipo entregado */ void SetCursorType(CursorType type) noexcept; /* * Retorna verdadero si la combinacion de botones representada por chordcode esta siendo presionada en este momento. */ bool IsChordActive(int* keycodes) noexcept; /* * Funcion llamada cada iteracion del motor para actualizar el estado de los eventos de input */ void Update() noexcept; void StartUp() noexcept; void ShutDown() noexcept; private: class InputImplementation; std::unique_ptr<InputImplementation> p_Impl; }; } #endif
<filename>src/middleware/useChecksumSecret.js<gh_stars>0 function useChecksumSecret(secret) { return (req, _res, next) => { req.appShared.checksumSecret = secret; next(); }; } module.exports = useChecksumSecret;
<reponame>thr-consulting/thr-addons<filename>packages/unzipper/src/index.ts<gh_stars>1-10 export {default as unzipper} from './unzipper'; export type {OnFileCallback} from './unzipper';
package org.joeffice.presentation; /** Localizable strings for {@link org.joeffice.presentation}. */ @javax.annotation.Generated(value="org.netbeans.modules.openide.util.NbBundleProcessor") class Bundle { /** * @return <i>Slide Notes</i> * @see SlideNotesTopComponent */ static String CTL_SlideNotesAction() { return org.openide.util.NbBundle.getMessage(Bundle.class, "CTL_SlideNotesAction"); } /** * @return <i>Slide Notes Window</i> * @see SlideNotesTopComponent */ static String CTL_SlideNotesTopComponent() { return org.openide.util.NbBundle.getMessage(Bundle.class, "CTL_SlideNotesTopComponent"); } /** * @return <i>Slide Thumbnails</i> * @see SlideThumbnailsTopComponent */ static String CTL_SlideThumbnailsAction() { return org.openide.util.NbBundle.getMessage(Bundle.class, "CTL_SlideThumbnailsAction"); } /** * @return <i>Slide Thumbnails Window</i> * @see SlideThumbnailsTopComponent */ static String CTL_SlideThumbnailsTopComponent() { return org.openide.util.NbBundle.getMessage(Bundle.class, "CTL_SlideThumbnailsTopComponent"); } /** * @return <i>Slides</i> * @see SlidesTopComponent */ static String CTL_SlidesAction() { return org.openide.util.NbBundle.getMessage(Bundle.class, "CTL_SlidesAction"); } /** * @return <i>Slides Window</i> * @see SlidesTopComponent */ static String CTL_SlidesTopComponent() { return org.openide.util.NbBundle.getMessage(Bundle.class, "CTL_SlidesTopComponent"); } /** * @return <i>This is a Slide Notes window</i> * @see SlideNotesTopComponent */ static String HINT_SlideNotesTopComponent() { return org.openide.util.NbBundle.getMessage(Bundle.class, "HINT_SlideNotesTopComponent"); } /** * @return <i>This is a Slide Thumbnails window</i> * @see SlideThumbnailsTopComponent */ static String HINT_SlideThumbnailsTopComponent() { return org.openide.util.NbBundle.getMessage(Bundle.class, "HINT_SlideThumbnailsTopComponent"); } /** * @return <i>This is a Slides window</i> * @see SlidesTopComponent */ static String HINT_SlidesTopComponent() { return org.openide.util.NbBundle.getMessage(Bundle.class, "HINT_SlidesTopComponent"); } /** * @return <i>Powerpoint 2007 / 2010</i> * @see PptxDataObject */ static String LBL_Pptx_LOADER() { return org.openide.util.NbBundle.getMessage(Bundle.class, "LBL_Pptx_LOADER"); } private void Bundle() {} }
DIR=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd ) CD $DIR/.. find . -name "*.js" -not -path "./node_modules/*" | xargs node_modules/jshint/bin/jshint if [ $? -eq 0 ]; then node cl.js $@ else echo "Please fix jshint failures." fi
<filename>spec/unit/edge_set_spec.rb require 'spec_helper' describe 'EdgeSet' do let(:edge_set) { BipartiteGraph::EdgeSet.new } let(:edge1) { BipartiteGraph::Edge.new('0C0', '1C0') } let(:edge2) { BipartiteGraph::Edge.new('0C0', '1C1') } let(:edge3) { BipartiteGraph::Edge.new('1C0', '2C0') } let(:edge4) { BipartiteGraph::Edge.new('1C0', '2C1') } let(:edge5) { BipartiteGraph::Edge.new('1C1', '2C1') } let(:edge6) { BipartiteGraph::Edge.new('1C1', '2C2') } before do edge_set << edge1 edge_set << edge2 edge_set << edge3 edge_set << edge4 edge_set << edge5 edge_set << edge6 end describe "enumerablility" do it "(for example) can be converted to an array" do expect(edge_set.to_a).to match_array([edge1, edge2, edge3, edge4, edge5, edge6]) end it "(for example) can be mapped" do expect(edge_set.map(&:from)).to match_array(%w(0C0 0C0 1C0 1C0 1C1 1C1)) end it "(for example) can do an any?" do expect(edge_set.any? {|e| e.to == '2C2'}).to be true end end describe "length" do it "will give its length" do expect(edge_set.length).to eq(6) end end describe "filtering" do let(:filtered_edge_set) { edge_set.from('1C1').not_to('2C1') } it "returns an edge set" do expect(filtered_edge_set).to be_a BipartiteGraph::EdgeSet end it "filters appropriately" do expect(filtered_edge_set.to_a).to match_array([edge6]) end end end
<gh_stars>0 def cond(test, when_true, when_false): if test: return when_true() else: return when_false()
<gh_stars>0 $(function(){ var input = $('input#s'); var divInput = $('div.input'); var width = divInput.width(); var outerWidth = divInput.parent().width() - (divInput.outerWidth() - width) - 28; var submit = $('#searchSubmit'); var txt = input.val(); input.bind('focus', function() { if(input.val() === txt) { input.val(''); } $(this).animate({color: '#000'}, 300); // text color $(this).parent().animate({ width: outerWidth + 'px', backgroundColor: '#fff', // background color paddingRight: '43px' }, 300, function() { if(!(input.val() === '' || input.val() === txt)) { if(!($.browser.msie && $.browser.version < 9)) { submit.fadeIn(300); } else { submit.css({display: 'block'}); } } }).addClass('focus'); }).bind('blur', function() { $(this).animate({color: '#b4bdc4'}, 300); // text color $(this).parent().animate({ width: width + 'px', backgroundColor: '#e8edf1', // background color paddingRight: '15px' }, 300, function() { if(input.val() === '') { input.val(txt) } }).removeClass('focus'); if(!($.browser.msie && $.browser.version < 9)) { submit.fadeOut(100); } else { submit.css({display: 'none'}); } }).keyup(function() { if(input.val() === '') { if(!($.browser.msie && $.browser.version < 9)) { submit.fadeOut(300); } else { submit.css({display: 'none'}); } } else { if(!($.browser.msie && $.browser.version < 9)) { submit.fadeIn(300); } else { submit.css({display: 'block'}); } } }); });
#!/bin/sh # The MIT License (MIT) # # Copyright (c) 2017 Eficode Oy # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. set -- "$@" -- "$TIMEOUT" "$QUIET" "$PROTOCOL" "$HOST" "$PORT" "$result" TIMEOUT=15 QUIET=0 # The protocol to make the request with, either "tcp" or "http" PROTOCOL="tcp" echoerr() { if [ "$QUIET" -ne 1 ]; then printf "%s\n" "$*" 1>&2; fi } usage() { exitcode="$1" cat << USAGE >&2 Usage: $cmdname host:port|url [-t timeout] [-- command args] -q | --quiet Do not output any status messages -t TIMEOUT | --timeout=timeout Timeout in seconds, zero for no timeout -- COMMAND ARGS Execute command with args after the test finishes USAGE exit "$exitcode" } wait_for() { case "$PROTOCOL" in tcp) if ! command -v nc >/dev/null; then echoerr 'nc command is missing!' exit 1 fi ;; wget) if ! command -v wget >/dev/null; then echoerr 'nc command is missing!' exit 1 fi ;; esac while :; do case "$PROTOCOL" in tcp) nc -z "$HOST" "$PORT" > /dev/null 2>&1 ;; http) wget --timeout=1 -q "$HOST" -O /dev/null > /dev/null 2>&1 ;; *) echoerr "Unknown protocol '$PROTOCOL'" exit 1 ;; esac result=$? if [ $result -eq 0 ] ; then if [ $# -gt 7 ] ; then for result in $(seq $(($# - 7))); do result=$1 shift set -- "$@" "$result" done TIMEOUT=$2 QUIET=$3 PROTOCOL=$4 HOST=$5 PORT=$6 result=$7 shift 7 exec "$@" fi exit 0 fi if [ "$TIMEOUT" -le 0 ]; then break fi TIMEOUT=$((TIMEOUT - 1)) sleep 1 done echo "Operation timed out" >&2 exit 1 } while :; do case "$1" in http://*|https://*) HOST="$1" PROTOCOL="http" shift 1 ;; *:* ) HOST=$(printf "%s\n" "$1"| cut -d : -f 1) PORT=$(printf "%s\n" "$1"| cut -d : -f 2) shift 1 ;; -q | --quiet) QUIET=1 shift 1 ;; -q-*) QUIET=0 echoerr "Unknown option: $1" usage 1 ;; -q*) QUIET=1 result=$1 shift 1 set -- -"${result#-q}" "$@" ;; -t | --timeout) TIMEOUT="$2" shift 2 ;; -t*) TIMEOUT="${1#-t}" shift 1 ;; --timeout=*) TIMEOUT="${1#*=}" shift 1 ;; --) shift break ;; --help) usage 0 ;; -*) QUIET=0 echoerr "Unknown option: $1" usage 1 ;; *) QUIET=0 echoerr "Unknown argument: $1" usage 1 ;; esac done if ! [ "$TIMEOUT" -ge 0 ] 2>/dev/null; then echoerr "Error: invalid timeout '$TIMEOUT'" usage 3 fi case "$PROTOCOL" in tcp) if [ "$HOST" = "" -o "$PORT" = "" ]; then echoerr "Error: you need to provide a host and port to test." usage 2 fi ;; http) if [ "$HOST" = "" ]; then echoerr "Error: you need to provide a host to test." usage 2 fi ;; esac wait_for "$@"
const { prefix, token, api } = require("./config.json"); var Database = require('better-sqlite3'); var db = new Database('./database/db.db', { verbose: console.log }); const { Client, Intents, Collection } = require('discord.js-12'); const DiscordBot = new Client({ intents: [Intents.FLAGS.GUILDS, Intents.FLAGS.GUILD_MESSAGES,Intents.FLAGS.GUILD_MEMBERS] }); DiscordBot.Database = Database; DiscordBot.dbsql = db; DiscordBot.shuffle = require('./utils/shuffle.js').shuffle; DiscordBot.pubgAPI = require('./utils/pubg_api.js'); DiscordBot.ranks = require('./utils/ranks.js'); DiscordBot.config = require("./config.json"); const fs = require("fs"); DiscordBot.commands = new Collection(); const commandFiles = fs.readdirSync('./commands/').filter(f => f.endsWith('.js')) for (const file of commandFiles) { const props = require(`./commands/${file}`) console.log(`${file} loaded`) DiscordBot.commands.set(props.help.name, props) } const eventFiles = fs.readdirSync('./events/').filter(f => f.endsWith('.js')) for (const file of eventFiles) { const event = require(`./events/${file}`) if(event.once) { DiscordBot.once(event.name, (...args) => event.execute(...args, DiscordBot)) } else { DiscordBot.on(event.name, (...args) => event.execute(...args, DiscordBot)) } } DiscordBot.on("message", async Message => { if(Message.author.DiscordBot) return; if(Message.channel.type === "dm") return; let MessageArray = Message.content.split(" "); let cmd = MessageArray[0]; let args = MessageArray.slice(1); if(!cmd.startsWith(prefix)) return; let commandfile = DiscordBot.commands.get(cmd.slice(prefix.length)); if(commandfile) { commandfile.run(DiscordBot,Message,args); } else { Message.content = "!help"; MessageArray = Message.content.split(" "); cmd = MessageArray[0]; args = MessageArray.slice(1); commandfile = DiscordBot.commands.get(cmd.slice(prefix.length)); if(commandfile) { commandfile.run(DiscordBot,Message,args); } } }); DiscordBot.login(token);
package com.zyf.algorithm.bean; /** * Created by zyf on 2020/6/22. */ public class RandomListNode { public int label; public RandomListNode next, random; public RandomListNode(int x) { this.label = x; } }
#!/bin/bash EPHEMERAL_HDFS=/root/ephemeral-hdfs # Set hdfs url to make it easier HDFS_URL="hdfs://$PUBLIC_DNS:9000" echo "export HDFS_URL=$HDFS_URL" >> ~/.bash_profile pushd /root/spark-ec2/ephemeral-hdfs > /dev/null source ./setup-slave.sh for node in $SLAVES $OTHER_MASTERS; do echo $node ssh -t -t $SSH_OPTS root@$node "/root/spark-ec2/ephemeral-hdfs/setup-slave.sh" & sleep 0.3 done wait /root/spark-ec2/copy-dir $EPHEMERAL_HDFS/conf NAMENODE_DIR=/mnt/ephemeral-hdfs/dfs/name if [ -f "$NAMENODE_DIR/current/VERSION" ] && [ -f "$NAMENODE_DIR/current/fsimage" ]; then echo "Hadoop namenode appears to be formatted: skipping" else echo "Formatting ephemeral HDFS namenode..." $EPHEMERAL_HDFS/bin/hdfs namenode -format fi echo "Starting ephemeral HDFS..." # This is different depending on version. case "$HADOOP_MAJOR_VERSION" in 1) $EPHEMERAL_HDFS/bin/start-dfs.sh ;; 2) $EPHEMERAL_HDFS/sbin/start-dfs.sh ;; yarn) DFS_USER_DEFS='\ export HDFS_NAMENODE_USER=root\ export HDFS_DATANODE_USER=root\ export HDFS_SECONDARYNAMENODE_USER=root\ ' sed -i -e "/\/usr\/bin\/env bash/a $DFS_USER_DEFS" $EPHEMERAL_HDFS/sbin/start-dfs.sh sed -i -e "/\/usr\/bin\/env bash/a $DFS_USER_DEFS" $EPHEMERAL_HDFS/sbin/stop-dfs.sh $EPHEMERAL_HDFS/sbin/start-dfs.sh --config $EPHEMERAL_HDFS/conf YARN_USER_DEFS='\ YARN_RESOURCEMANAGER_USER=root\ HADOOP_SECURE_DN_USER=root\ YARN_NODEMANAGER_USER=root\ ' sed -i -e "/\/usr\/bin\/env bash/a $YARN_USER_DEFS" $EPHEMERAL_HDFS/sbin/start-yarn.sh sed -i -e "/\/usr\/bin\/env bash/a $YARN_USER_DEFS" $EPHEMERAL_HDFS/sbin/stop-yarn.sh echo "Starting YARN" $EPHEMERAL_HDFS/sbin/start-yarn.sh --config $EPHEMERAL_HDFS/conf ;; *) echo "ERROR: Unknown Hadoop version" return -1 esac popd > /dev/null
ALTER PROCEDURE usp_GetNext @channelId NVARCHAR(255), @rankIncrement FLOAT, @sentByUserId NVARCHAR(255), @userName NVARCHAR(255) OUTPUT, @userId NVARCHAR(255) OUTPUT, @ranking NVARCHAR(255) OUTPUT AS SET NOCOUNT ON; BEGIN TRANSACTION DECLARE @currentRank FLOAT; DECLARE @rankingInternalId INT; DECLARE @cobuInternalTable TABLE (User_internal_id INT, Internal_id INT); INSERT INTO @cobuInternalTable SELECT cobu.User_internal_id, cobu.Internal_id FROM[Channels] c INNER JOIN [ChannelsOtterBrassUser] cobu ON c.Internal_id = cobu.Channel_internal_id WHERE c.id = @channelId SELECT TOP(1) @userName=obu.Name, @userId=obu.Id, @currentRank=ur.Ranking, @rankingInternalId=ur.Internal_id FROM ( SELECT cobuInt.User_internal_id, cobuInt.Internal_id FROM @cobuInternalTable cobuInt LEFT JOIN [ChannelsOtterBrassUserOOF] cobuoof -- We want the ones that are either Active or None (maybe they have not been added as OOF ever) ON cobuInt.Internal_id = cobuoof.ChannelsOtterBrassUser_internal_id WHERE cobuoof.oofStatus = 0 OR cobuoof.oofStatus IS NULL -- TODO: Change this for a table make life easier on CRUD operations. ) AS r INNER JOIN [UserRanking] ur ON ur.Internal_id = r.Internal_id INNER JOIN [OtterBrassUser] obu ON obu.Internal_id = r.User_internal_id WHERE obu.Id != @sentByUserId ORDER BY Ranking, obu.Name ASC SET @currentRank = @currentRank + @rankIncrement; SET @ranking=@currentRank; UPDATE [UserRanking] SET Ranking= @currentRank WHERE Internal_id=@rankingInternalId; IF @@ERROR <> 0 BEGIN -- Rollback the transaction ROLLBACK -- Raise an error and return RAISERROR ('Error in while updating the ranking.', 16, 1) RETURN END COMMIT GO
<gh_stars>0 /* eslint-disable linebreak-style */ import { getUrlParams } from './utils'; export function getMenuMockData(req, res, u) { let url = u; if (!url || Object.prototype.toString.call(url) !== '[object String]') { url = req.url; // eslint-disable-line } const params = getUrlParams(url); const dataSource = [ { smMenuName: '基础数据管理', smUrl: 'baseData', smIcon: 'database', children: [ { smMenuName: '考试数据管理', smUrl: 'examData', }, { smMenuName: '考试计划数据管理', smUrl: 'examPlanData', }, { smMenuName: '准考证数据管理', smUrl: 'licenseData', }, { smMenuName: '工作人员数据管理', smUrl: 'workManData', }, { smMenuName: '考点数据管理', smUrl: 'placeData', }, { smMenuName: '数据包管理', smUrl: 'examDataData', }, { smMenuName: '科目数据管理', smUrl: 'subjectData', }, ], }, { smMenuName: '硬件设备管理', smUrl: 'machineData', smIcon: 'usb', children: [ { smMenuName: 'U盾使用管理', smUrl: 'ukeyUse', }, { smMenuName: 'U盾管理', smUrl: 'ukey', }, { smMenuName: 'U盾软件包管理', smUrl: 'ukeyProgram', }, { smMenuName: '硬件设备清单管理', smUrl: 'billPlace', }, { smMenuName: '硬件设备管理', smUrl: 'machine', }, ], }, { smMenuName: '皮肤管理', smUrl: 'skinData', smIcon: 'skin', children: [ { smMenuName: '模板管理', smUrl: 'template', }, { smMenuName: '模板使用管理', smUrl: 'skinUse', }, { smMenuName: '皮肤管理', smUrl: 'skinPage', }, ], }, { smMenuName: '消息管理', smUrl: 'msgData', smIcon: 'message', children: [ { smMenuName: '短信模板管理', smUrl: 'templatePage', }, { smMenuName: '短信发送任务', smUrl: 'taskPage', }, { smMenuName: '短信发送记录', smUrl: 'sendRecordPage', }, ], }, { smMenuName: '人像识别管理', smUrl: 'faceData', smIcon: 'safety', children: [ { smMenuName: '人像数据识别管理', smUrl: 'dataPage', }, { smMenuName: '人像识别终端数据管理', smUrl: 'clientInfoPage', }, ], }, { smMenuName: '系统权限管理', smUrl: 'permission', smIcon: 'safety', children: [ { smMenuName: '单位数据管理', smUrl: 'unitData', }, ], }]; const result = { status: '1', msg: '成功', dataMain: { list: dataSource, }, }; if (res && res.json) { res.json(result); } else { return result; } }
SELECT salary FROM ( SELECT DISTINCT salary FROM salary ORDER BY salary DESC LIMIT 3 ) s ORDER BY salary ASC LIMIT 1;
parallel --jobs 6 < ./results/exp_iterations/run-0/lustre_4n_6t_6d_1000f_617m_20i/jobs/jobs_n2.txt
/* Copyright 2010 Smartmobili SARL * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #ifndef CGSHADING_H_ #define CGSHADING_H_ #include <CoreGraphics/CGBase.h> #include <CoreGraphics/CGColorSpace.h> #include <CoreGraphics/CGFunction.h> #include <CoreGraphics/CGGeometry.h> #include <CoreFoundation/CFBase.h> typedef struct CGShading *CGShadingRef; CG_EXTERN CFTypeID CGShadingGetTypeID(void); CG_EXTERN CGShadingRef CGShadingCreateAxial(CGColorSpaceRef space, CGPoint start, CGPoint end, CGFunctionRef function, bool extendStart, bool extendEnd); CG_EXTERN CGShadingRef CGShadingCreateRadial(CGColorSpaceRef space, CGPoint start, CGFloat startRadius, CGPoint end, CGFloat endRadius, CGFunctionRef function, bool extendStart, bool extendEnd); CG_EXTERN CGShadingRef CGShadingRetain(CGShadingRef shading); CG_EXTERN void CGShadingRelease(CGShadingRef shading) CG_AVAILABLE_STARTING(__MAC_10_2, __IPHONE_2_0); #endif /* CGSHADING_H_ */
#!/bin/bash # # If this script is not run in the directory it is placed, # variable THIS_SCRIPT_DIRPATH must be set to an absolute dirpath. THIS_SCRIPT_DIRPATH=$(pwd) THIS_PARENT_DIRPATH=$(dirname ${THIS_SCRIPT_DIRPATH}) VTDM_CFG_PATH="${THIS_SCRIPT_DIRPATH}/vtdm.cfg.sh" # # if [ -z "${3}" ]; then echo "Usage:" echo " diff.sh <filepath1> <filepath2> <delta_filepath>" exit -1 fi # VTDM_FILEPATH1="${1}" VTDM_FILEPATH2="${2}" VTDM_DELTA_FILEPATH="${3}" # if [ ! -f "${VTDM_FILEPATH1}" ]; then echo "The filepath1: \"${VTDM_FILEPATH1}\" does not exist!" echo echo "Usage:" echo " diff.sh <filepath1> <filepath2> <delta_filepath>" exit -1 fi if [ ! -f "${VTDM_FILEPATH2}" ]; then echo "The filepath2: \"${VTDM_FILEPATH2}\" does not exist!" echo echo "Usage:" echo " diff.sh <filepath1> <filepath2> <delta_filepath>" exit -1 fi # # if [ ! -f "${VTDM_CFG_PATH}" ]; then echo echo "The file: \"${VTDM_CFG_PATH}\" does not exist!" echo "This file works as configuration that must be exist." echo exit -1; fi # # import the configured values source ${VTDM_CFG_PATH} # # VTDM_IMPL_TAG="--impl" VTDM_DIFF_OP_TAG="--diff_op" VTDM_LIBLZMA_SO_TAG="--liblzma_so" VTDM_LIBBZ2_SO_TAG="--libbz2_so" VTDM_CHECKSUM_ALGORITHM_TAG="--checksum_algorithm" VTDM_MAX_CHUNK_SIZE_TAG="--max_chunk_size" VTDM_DATA_GRAM_SIZE_TAG="--data_gram_size" VTDM_HASH_WINDOW_SIZE_TAG="--hash_window_size" VTDM_CONTINUE_MISMATCH_LIMIT_TAG="--continue_mismtach_limit" VTDM_MAX_MISMATCH_PERCENTAGE_TAG="--max_mismatch_percentage" VTDM_MAX_IDENTICAL_FIRST_IN_MATCH_TAG="--max_identical_first_in_match" VTDM_JMP_IDENTICAL_BLOCK_IN_MATCH_TAG="--jmp_identical_block_in_match" VTDM_NEED_KEEP_INTERIM_TEMP_FILES_TAG="--need_keep_interim_temp_files" VTDM_FILEPATH1_TAG="--filepath1" VTDM_FILEPATH2_TAG="--filepath2" VTDM_DELTA_FILEPATH_TAG="--delta_filepath" # VTDM_FPARAMS="\ ${VTDM_IMPL_TAG} ${VTDM_IMPL} \ ${VTDM_DIFF_OP_TAG} ${VTDM_DIFF_OP} \ ${VTDM_LIBLZMA_SO_TAG} ${VTDM_LIBLZMA_SO} \ ${VTDM_LIBBZ2_SO_TAG} ${VTDM_LIBBZ2_SO} \ ${VTDM_CHECKSUM_ALGORITHM_TAG} ${VTDM_CHECKSUM_ALGORITHM} \ ${VTDM_MAX_CHUNK_SIZE_TAG} ${VTDM_MAX_CHUNK_SIZE} \ ${VTDM_DATA_GRAM_SIZE_TAG} ${VTDM_DATA_GRAM_SIZE} \ ${VTDM_HASH_WINDOW_SIZE_TAG} ${VTDM_HASH_WINDOW_SIZE} \ ${VTDM_CONTINUE_MISMATCH_LIMIT_TAG} ${VTDM_CONTINUE_MISMATCH_LIMIT} \ ${VTDM_MAX_MISMATCH_PERCENTAGE_TAG} ${VTDM_MAX_MISMATCH_PERCENTAGE} \ ${VTDM_MAX_IDENTICAL_FIRST_IN_MATCH_TAG} ${VTDM_MAX_IDENTICAL_FIRST_IN_MATCH} \ ${VTDM_JMP_IDENTICAL_BLOCK_IN_MATCH_TAG} ${VTDM_JMP_IDENTICAL_BLOCK_IN_MATCH} \ ${VTDM_NEED_KEEP_INTERIM_TEMP_FILES_TAG} ${VTDM_NEED_KEEP_INTERIM_TEMP_FILES} \ ${VTDM_FILEPATH1_TAG} ${VTDM_FILEPATH1} \ ${VTDM_FILEPATH2_TAG} ${VTDM_FILEPATH2} \ ${VTDM_DELTA_FILEPATH_TAG} ${VTDM_DELTA_FILEPATH} \ " # VTDM_LOG_LEVEL_TAG="--log.level" VTDM_SYSLOG_IDENT_TAG="--syslog.ident" VTDM_SYSLOG_IDENT="${VTDM_IMPL}" VTDM_LPARAMS="${VTDM_LOG_LEVEL_TAG} ${VTDM_LOG_LEVEL} ${VTDM_SYSLOG_IDENT_TAG} ${VTDM_SYSLOG_IDENT}" # VTDM_FLAG="-d" # LD_LIBRARY_PATH_SAVED=${LD_LIBRARY_PATH} if [ -z "${LD_LIBRARY_PATH}" ]; then export LD_LIBRARY_PATH=${THIS_PARENT_DIRPATH}/lib else export LD_LIBRARY_PATH=${THIS_PARENT_DIRPATH}/lib:${LD_LIBRARY_PATH} fi # THE_CMD="${THIS_PARENT_DIRPATH}/bin/vtdiffrun" # ${THE_CMD} ${VTDM_FLAG} ${VTDM_LPARAMS} ${VTDM_FPARAMS} RETVAL="$?" echo if [ "$RETVAL" == "0" ]; then if [ -f "${VTDM_DELTA_FILEPATH}" ]; then echo "The generated delta filepath:" echo " \"${VTDM_DELTA_FILEPATH}\"" else echo "The command execute successfuly." echo "No generated delta filepath, due to the files are identical." fi else echo "Something wrong, check the related log required." fi echo # export LD_LIBRARY_PATH=${LD_LIBRARY_PATH_SAVED} #
<reponame>thitranthanh/Achilles<filename>achilles-core/src/main/java/info/archinnov/achilles/query/slice/SelectDSL.java /* * Copyright (C) 2012-2014 <NAME> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package info.archinnov.achilles.query.slice; import static info.archinnov.achilles.query.slice.SliceQueryProperties.SliceType; import info.archinnov.achilles.internal.metadata.holder.EntityMeta; import info.archinnov.achilles.internal.persistence.operations.SliceQueryExecutor; public class SelectDSL<TYPE> { private final SliceQueryExecutor sliceQueryExecutor; private final Class<TYPE> entityClass; private final EntityMeta meta; private final SliceType sliceType; protected SelectDSL(SliceQueryExecutor sliceQueryExecutor, Class<TYPE> entityClass, EntityMeta meta, SliceType sliceType) { this.sliceQueryExecutor = sliceQueryExecutor; this.entityClass = entityClass; this.meta = meta; this.sliceType = sliceType; } /** * * Start the Select DSL with provided partition components * * <pre class="code"><code class="java"> * * manager.sliceQuery(ArticleRating.class) * .forSelect() * .withPartitionComponents(articleId) * * </code></pre> * * Generated CQL3 query: * * <br/> * SELECT * FROM article_rating WHERE article_id=... * * @return slice DSL */ public SelectFromPartition<TYPE> withPartitionComponents(Object... partitionKeyComponents) { final SelectFromPartition<TYPE> selectFromPartition = new SelectFromPartition<>(sliceQueryExecutor, entityClass, meta, sliceType); selectFromPartition.withPartitionComponentsInternal(partitionKeyComponents); return selectFromPartition; } /** * * Start the Select DSL with provided partition components IN * * <pre class="code"><code class="java"> * * manager.sliceQuery(MessageEntity.class) * .forSelect() * .withPartitionComponents(10L) * .andPartitionComponentsIN(2013, 2014) * * </code></pre> * * Generated CQL3 query: * * <br/> * SELECT * FROM messages WHERE user_id=10 AND year IN (2013,2014) * * @return slice DSL */ public SelectWithPartition<TYPE> withPartitionComponentsIN(Object... partitionKeyComponents) { final SelectWithPartition<TYPE> selectWithPartition = new SelectWithPartition<>(sliceQueryExecutor, entityClass, meta, sliceType); selectWithPartition.withPartitionComponentsINInternal(partitionKeyComponents); return selectWithPartition; } }
module.exports = () => ({options, res}) => new Promise((resolve, reject) => { var body = [] res .on('data', (chunk) => body.push(chunk)) .on('end', () => { body = Buffer.concat(body) resolve({options, res, body}) }) .on('error', reject) })
if [[ -z "${IS_REVIEW_APP}" ]]; then ./manage.py shell -c "from django.contrib.auth import get_user_model; User = get_user_model(); User.objects.create_superuser('admin', '$EMAIL_ADMIN', 'admin')" ./manage.py populate-periodic-tasks else ./manage.py loaddata fixtures/fixtures.json fi
'use strict'; Object.defineProperty(exports, "__esModule", { value: true }); var _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; }; var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }(); var _react = require('react'); var _react2 = _interopRequireDefault(_react); var _propTypes = require('prop-types'); var _propTypes2 = _interopRequireDefault(_propTypes); var _DataCell = require('./DataCell'); var _DataCell2 = _interopRequireDefault(_DataCell); var _ComponentCell = require('./ComponentCell'); var _ComponentCell2 = _interopRequireDefault(_ComponentCell); var _HeaderCell = require('./HeaderCell'); var _HeaderCell2 = _interopRequireDefault(_HeaderCell); var _dataSheet = require('./utils/dataSheet'); var _utils = require('./utils/utils'); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } function _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; } function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; } // Utils var FixedDataSheet = function (_PureComponent) { _inherits(FixedDataSheet, _PureComponent); function FixedDataSheet(props) { _classCallCheck(this, FixedDataSheet); var _this = _possibleConstructorReturn(this, (FixedDataSheet.__proto__ || Object.getPrototypeOf(FixedDataSheet)).call(this, props)); _this.onMouseDown = _this.onMouseDown.bind(_this); _this.onMouseUp = _this.onMouseUp.bind(_this); _this.onMouseOver = _this.onMouseOver.bind(_this); _this.onDoubleClick = _this.onDoubleClick.bind(_this); _this.onContextMenu = _this.onContextMenu.bind(_this); _this.handleKey = _this.handleKey.bind(_this); _this.handleCopy = _this.handleCopy.bind(_this); _this.handlePaste = _this.handlePaste.bind(_this); _this.handleTableScroll = _this.handleTableScroll.bind(_this); _this.pageClick = _this.pageClick.bind(_this); _this.onChange = _this.onChange.bind(_this); _this.lastFixedColumn = _this.getLastFixedColumn(props.headerData); _this.defaultState = { start: {}, end: {}, selecting: false, forceEdit: false, editing: {}, reverting: {}, clear: {}, scrollTop: 0, scrollLeft: 0 }; _this.state = _this.defaultState; _this.removeAllListeners = _this.removeAllListeners.bind(_this); return _this; } _createClass(FixedDataSheet, [{ key: 'componentWillUnmount', value: function componentWillUnmount() { this.removeAllListeners(); this.dgDom && this.dgDom.removeEventListener('scroll', this.handleTableScroll); } }, { key: 'componentDidMount', value: function componentDidMount() { this.dgDom.addEventListener('scroll', this.handleTableScroll); } }, { key: 'componentDidUpdate', value: function componentDidUpdate(prevProps, prevState) { var prevEnd = prevState.end; if (!(0, _utils.isEmptyObj)(this.state.end) && !(this.state.end.i === prevEnd.i && this.state.end.j === prevEnd.j)) { this.props.onSelect && this.props.onSelect(this.props.data[this.state.end.i][this.state.end.j]); } } }, { key: 'componentWillReceiveProps', value: function componentWillReceiveProps(nextProps) { this.lastFixedColumn = this.getLastFixedColumn(nextProps.headerData); } }, { key: 'removeAllListeners', value: function removeAllListeners() { document.removeEventListener('keydown', this.handleKey); document.removeEventListener('mousedown', this.pageClick); document.removeEventListener('mouseup', this.onMouseUp); document.removeEventListener('copy', this.handleCopy); document.removeEventListener('paste', this.handlePaste); } }, { key: 'getLastFixedColumn', value: function getLastFixedColumn(headerData) { var lastI = headerData.length - 1; return headerData && headerData[lastI] && headerData[lastI].reduce(function (prev, cell, i) { return cell.fixed ? i : prev; }, null); } }, { key: 'pageClick', value: function pageClick(e) { if (!this.dgDom.contains(e.target)) { this.setState(this.defaultState); this.removeAllListeners(); } } }, { key: 'handleCopy', value: function handleCopy(e) { if ((0, _utils.isEmptyObj)(this.state.editing)) { e.clipboardData.setData('text/plain', (0, _dataSheet.handleCopyLogic)(e, this.props, this.state)); } } }, { key: 'handlePaste', value: function handlePaste(e) { if ((0, _utils.isEmptyObj)(this.state.editing)) { var _props = this.props, onChange = _props.onChange, onPaste = _props.onPaste; var _handlePasteLogic = (0, _dataSheet.handlePasteLogic)(e, this.props, this.state), pastedData = _handlePasteLogic.pastedData, end = _handlePasteLogic.end, changedCells = _handlePasteLogic.changedCells; this.setState({ end: end, editing: {} }, function () { if (onPaste) { onPaste(pastedData); } else { changedCells.forEach(function (c) { return onChange(c.cell, c.i, c.j, c.value); }); } }); } } }, { key: 'handleKey', value: function handleKey(e) { var onChange = this.props.onChange; var _handleKeyLogic = (0, _dataSheet.handleKeyLogic)(e, this.props, this.state), newState = _handleKeyLogic.newState, cleanCells = _handleKeyLogic.cleanCells; newState && this.setState(newState, function () { cleanCells && cleanCells.forEach(function (c) { return onChange(c.cell, c.i, c.j, ''); }); }); } /** * Handle table scroll event. Setting the left position (of the fixed columns) at the same * as the main container DOM scrollLeft will make it track the horizontal movement. The * same happens for the top to simulate the fixed header. * * @param {Event} e Event info object * @returns {void} */ }, { key: 'handleTableScroll', value: function handleTableScroll(e) { this.setState({ scrollTop: this.dgDom.scrollTop, scrollLeft: this.dgDom.scrollLeft }); } }, { key: 'onContextMenu', value: function onContextMenu(evt, i, j) { var _props2 = this.props, onContextMenu = _props2.onContextMenu, data = _props2.data; if (onContextMenu) { onContextMenu(evt, data[i][j], i, j); } } }, { key: 'onDoubleClick', value: function onDoubleClick(i, j) { if (!this.props.data[i][j].readOnly) { this.setState({ editing: { i: i, j: j }, forceEdit: true, clear: {} }); } } }, { key: 'onMouseDown', value: function onMouseDown(i, j) { var editing = (0, _utils.isEmptyObj)(this.state.editing) || this.state.editing.i !== i || this.state.editing.j !== j ? {} : this.state.editing; this.setState({ selecting: true, start: { i: i, j: j }, end: { i: i, j: j }, editing: editing, forceEdit: false }); // Keep listening to mouse if user releases the mouse (dragging outside) document.addEventListener('mouseup', this.onMouseUp); // Listen for any keyboard presses (there is no input so must attach to document) document.addEventListener('keydown', this.handleKey); // Listen for any outside mouse clicks document.addEventListener('mousedown', this.pageClick); // Copy paste event handler document.addEventListener('copy', this.handleCopy); document.addEventListener('paste', this.handlePaste); } }, { key: 'onMouseOver', value: function onMouseOver(i, j) { if (this.state.selecting && (0, _utils.isEmptyObj)(this.state.editing)) { this.setState({ end: { i: i, j: j } }); } } }, { key: 'onMouseUp', value: function onMouseUp() { this.setState({ selecting: false }); document.removeEventListener('mouseup', this.onMouseUp); } }, { key: 'onChange', value: function onChange(i, j, val) { this.props.onChange(this.props.data[i][j], i, j, val); this.setState({ editing: {} }); } }, { key: 'parseStyleSize', value: function parseStyleSize(dimension) { return typeof dimension === 'number' ? dimension + 'px' : dimension; } }, { key: 'buildHeader', value: function buildHeader() { var _this2 = this; var _props3 = this.props, headerData = _props3.headerData, valueRenderer = _props3.valueRenderer, attributesRenderer = _props3.attributesRenderer, keyFn = _props3.keyFn; var scrollLeft = this.state.scrollLeft; var rows = headerData.map(function (row, i) { var key = 'header_' + i; return _react2.default.createElement( 'tr', { key: keyFn ? keyFn(key) : key }, row.map(function (cell, j) { var isLastFixed = cell.fixed && _this2.lastFixedColumn === j; var className = [cell.className, isLastFixed && 'last', isLastFixed && scrollLeft && 'scrolling'].filter(function (cn) { return cn; }).join(' '); return _react2.default.createElement(_HeaderCell2.default, { key: cell.key ? cell.key : j, className: className, row: i, col: j, colSpan: cell.colSpan, rowSpan: cell.rowSpan, width: _this2.parseStyleSize(cell.width), overflow: cell.overflow, value: valueRenderer(cell, i, j, true), component: cell.component, attributes: attributesRenderer ? attributesRenderer(cell, i, j, true) : {}, fixed: cell.fixed, left: cell.fixed ? _this2.parseStyleSize(scrollLeft) : null }); }) ); }); return _react2.default.createElement( 'thead', null, rows ); } }, { key: 'buildBody', value: function buildBody() { var _this3 = this; var _props4 = this.props, data = _props4.data, dataRenderer = _props4.dataRenderer, valueRenderer = _props4.valueRenderer, attributesRenderer = _props4.attributesRenderer, keyFn = _props4.keyFn; var _state = this.state, reverting = _state.reverting, editing = _state.editing, clear = _state.clear, start = _state.start, end = _state.end, scrollLeft = _state.scrollLeft; var rows = data.map(function (row, i) { return _react2.default.createElement( 'tr', { key: keyFn ? keyFn(i) : i }, row.map(function (cell, j) { var isLastFixed = cell.fixed && _this3.lastFixedColumn === j; var className = [cell.className, isLastFixed && 'last', isLastFixed && scrollLeft && 'scrolling'].filter(function (cn) { return cn; }).join(' '); var props = { key: cell.key ? cell.key : j, className: className, row: i, col: j, selected: (0, _utils.isCellSelected)(start, end, i, j), onMouseDown: _this3.onMouseDown, onDoubleClick: _this3.onDoubleClick, onMouseOver: _this3.onMouseOver, onContextMenu: _this3.onContextMenu, editing: (0, _utils.cellStateComparison)(editing, i, j), reverting: (0, _utils.cellStateComparison)(reverting, i, j), colSpan: cell.colSpan, width: _this3.parseStyleSize(cell.width), overflow: cell.overflow, value: valueRenderer(cell, i, j, false), attributes: attributesRenderer ? attributesRenderer(cell, i, j, false) : {}, fixed: cell.fixed, left: cell.fixed ? _this3.parseStyleSize(scrollLeft) : null }; if (cell.disableEvents) { props.onMouseDown = function () {}; props.onDoubleClick = function () {}; props.onMouseOver = function () {}; props.onContextMenu = function () {}; } if (cell.component) { return _react2.default.createElement(_ComponentCell2.default, _extends({}, props, { forceComponent: cell.forceComponent || false, component: cell.component })); } return _react2.default.createElement(_DataCell2.default, _extends({}, props, { data: dataRenderer ? dataRenderer(cell, i, j) : null, clear: (0, _utils.cellStateComparison)(clear, i, j), rowSpan: cell.rowSpan, onChange: _this3.onChange, readOnly: cell.readOnly })); }) ); }); return _react2.default.createElement( 'tbody', null, rows ); } }, { key: 'render', value: function render() { var _this4 = this; var _props5 = this.props, className = _props5.className, overflow = _props5.overflow, width = _props5.width, height = _props5.height; var scrollTop = this.state.scrollTop; var fullCN = ['data-grid', className, overflow, scrollTop && 'scrolling'].filter(function (c) { return c; }).join(' '); var style = { width: this.parseStyleSize(width), height: this.parseStyleSize(height) }; var header = this.buildHeader(); var body = this.buildBody(); return _react2.default.createElement( 'div', { ref: function ref(r) { return _this4.dgDom = r; }, className: 'data-grid-wrapper fixed', style: style }, _react2.default.createElement( 'table', { className: 'dtg-virtual-header ' + fullCN, style: { top: scrollTop } }, header ), _react2.default.createElement( 'table', { className: 'dtg-main ' + fullCN }, header, body ) ); } }]); return FixedDataSheet; }(_react.PureComponent); exports.default = FixedDataSheet; FixedDataSheet.propTypes = { data: _propTypes2.default.array.isRequired, headerData: _propTypes2.default.array.isRequired, width: _propTypes2.default.string.isRequired, height: _propTypes2.default.string.isRequired, className: _propTypes2.default.string, overflow: _propTypes2.default.oneOf(['wrap', 'nowrap', 'clip']), onChange: _propTypes2.default.func.isRequired, onContextMenu: _propTypes2.default.func, valueRenderer: _propTypes2.default.func.isRequired, dataRenderer: _propTypes2.default.func, parsePaste: _propTypes2.default.func }; FixedDataSheet.defaultProps = { width: '800px', height: '400px' };
class BaseModel(object): def __init__(self, name, age): self.name = name self.age = age def make_sound(self): pass # This method will be overridden in the subclasses class Dog(BaseModel): def __init__(self, name, age, breed): super().__init__(name, age) self.breed = breed def make_sound(self): print("Woof!") class Cat(BaseModel): def __init__(self, name, age, color): super().__init__(name, age) self.color = color def make_sound(self): print("Meow!") class Bird(BaseModel): def __init__(self, name, age, species): super().__init__(name, age) self.species = species def make_sound(self): print("Chirp!") # Example usage dog = Dog("Buddy", 3, "Golden Retriever") dog.make_sound() # Output: Woof! cat = Cat("Whiskers", 5, "Gray") cat.make_sound() # Output: Meow! bird = Bird("Polly", 2, "Parrot") bird.make_sound() # Output: Chirp!
#!/bin/bash # /usr/libexec/oci-kvm-config.sh # Copyright (c) 2017, 2019 Oracle and/or its affiliates. All rights reserved. # Licensed under the Universal Permissive License v 1.0 as shown at http://oss.oracle.com/licenses/upl. defaultMTU=9000 defaultMaxWait=240 # To get the default number of VFs we use the number of CPU siblings which # corresponds to the BM model # BM.Standard1.36 have 36 siblings: 1 + 35 = 36 total allowable vNics # BM.Standard1.52 have 52 siblings: 2 + 25 = 52 total allowable vNics # (NOTE: Above is PhysicalNics + vNics) # Since we do not support assigning the vNic on the physical Nic to guests # the number of VFs end up being 35 on 1.36 and 50 on 2.52 declare -i siblings=$(head -11 /proc/cpuinfo \ | grep 'siblings' \ | awk -F: '{print $2}' \ | sed 's/ //g') defaultNumVFs=$((siblings - 1)) ((siblings > 36)) && defaultNumVFs=$((siblings / 2 - 1)) [[ -z "${defaultNumVFs}" ]] && defaultNumVFs=16 declare -i numVFs=${NUM_VFS:-${defaultNumVFs}} ((numVFs == 0)) && numVFs=${defaultNumVFs} declare -i vfMTU=${MTU:-${defaultMTU}} ((vfMTU < 1280)) && vfMTU=${defaultMTU} declare -i maxWait=${MAX_WAIT:-${defaultMaxWait}} ((maxWait < 5)) && maxWait=${defaultmaxWait} echo "Default MTU for interfaces: ${vfMTU}" echo "Default max virtual function count for interfaces: ${numVFs}" netSysPath=/sys/class/net for nic in ${netSysPath}/* do numVFDevPath=${nic}/device/sriov_numvfs if test -f "${numVFDevPath}" then nic_name=$(basename ${nic}) is_up=`/bin/cat ${nic}/carrier 2>/dev/null` if [ $? -ne 0 ] || [ "is_up" == 0 ] then # we have failed to open or content is '0', this means down echo "Bringing ${nic_name} link up" /sbin/ip link set ${nic_name} up if [ $? -ne 0 ] then echo "ERROR: Failed to bring up ${nic_name}" >&2 exit 1 fi fi echo "setting ${numVFs} as number of VFs for ${nic}" echo "${numVFs}" >${numVFDevPath} echo "setting hwmode node to vepa for ${nic}" /sbin/bridge link set dev ${nic_name} hwmode vepa vfNum=0 echo "Waiting for VFs to appear" while ((vfNum < numVFs)) do vfNetDir="${nic}/device/virtfn${vfNum}/net/" while ((maxWait > 0)) && ! test -d ${vfNetDir} do sleep 0.25 ((maxWait--)) done if ! test -d ${vfNetDir} then echo "ERROR: Virtual Function ${vfNum} never appeared!" >&2 exit 1 fi vfName="$(ls -1 ${vfNetDir} | head -1)" echo "Setting default MTU on VF ${vfName}" [[ -n "${vfName}" ]] && /sbin/ip link set dev ${vfName} mtu ${vfMTU} ((vfNum++)) done fi done echo "Calling /usr/libexec/oci-kvm-upgrade" /usr/bin/python3 /usr/libexec/oci-kvm-upgrade
#!/bin/bash # Copyright 2020-2021 Huawei Technologies Co., Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================ if [ $# != 2 ] && [ $# != 3 ] then echo "Usage: sh run_distribute_train.sh [MINDRECORD_FILE] [RANK_TABLE] [PRETRAINED_BACKBONE]" echo " or: sh run_distribute_train.sh [MINDRECORD_FILE] [RANK_TABLE]" exit 1 fi get_real_path(){ if [ "${1:0:1}" == "/" ]; then echo "$1" else echo "$(realpath -m $PWD/$1)" fi } current_exec_path=$(pwd) echo ${current_exec_path} dirname_path=$(dirname "$(pwd)") echo ${dirname_path} export PYTHONPATH=${dirname_path}:$PYTHONPATH SCRIPT_NAME='train.py' rm -rf ${current_exec_path}/device* ulimit -c unlimited MINDRECORD_FILE=$(get_real_path $1) RANK_TABLE=$(get_real_path $2) PRETRAINED_BACKBONE='' if [ $# == 3 ] then PRETRAINED_BACKBONE=$(get_real_path $3) if [ ! -f $PRETRAINED_BACKBONE ] then echo "error: PRETRAINED_PATH=$PRETRAINED_BACKBONE is not a file" exit 1 fi fi echo $MINDRECORD_FILE echo $RANK_TABLE echo $PRETRAINED_BACKBONE export RANK_TABLE_FILE=$RANK_TABLE export RANK_SIZE=8 echo 'start training' for((i=0;i<=$RANK_SIZE-1;i++)); do echo 'start rank '$i mkdir ${current_exec_path}/device$i cd ${current_exec_path}/device$i || exit export RANK_ID=$i dev=`expr $i + 0` export DEVICE_ID=$dev python ${dirname_path}/${SCRIPT_NAME} \ --mindrecord_path=$MINDRECORD_FILE \ --pretrained=$PRETRAINED_BACKBONE > train.log 2>&1 & done echo 'running'
package com.jensen.draculadaybyday.sql_lite.DateConstraintArg; import android.os.Parcel; import android.os.Parcelable; import android.util.Log; import java.time.LocalDateTime; public class ExactDateConstraintArg extends DateConstraintArg { public ExactDateConstraintArg(LocalDateTime dateTime) { this.localDateTime = dateTime; } public String getSQLText() { return "%s = ?"; } @Override public String toString() { return "Exact date"; } //region Parcelable public static final Parcelable.Creator<ExactDateConstraintArg> CREATOR = new Parcelable.Creator<ExactDateConstraintArg>() { public ExactDateConstraintArg createFromParcel(Parcel in) { return new ExactDateConstraintArg(in); } public ExactDateConstraintArg[] newArray(int size) { return new ExactDateConstraintArg[size]; } }; public int describeContents() { return 0; } public void writeToParcel(Parcel out, int flags) { try { out.writeSerializable(this.localDateTime); } catch (Exception e) { Log.d("DateConstraint", e.getMessage()); } } private ExactDateConstraintArg(Parcel in) { this.localDateTime = (LocalDateTime) in.readSerializable(); } //endregion }
import markovify # Create a Markov chain model with open("data.txt") as f: data = f.read() model = markovify.Text(data) # Generate the lyrics lyrics = [] for _ in range(20): lyrics.append(model.make_sentence()) print(lyrics)
docker build -t cpp_compile:1.1.0 .
<gh_stars>1-10 !(function(global, FileList) { var ExportImageError = function(message) { this.name = 'ExportImageError'; this.message = message; }; ExportImageError.prototype = Error.prototype; var ExportTypes = { JPG : 'JPG', SVG : 'SVG', PNG : 'PNG' }; var Exporter = function() { this.doc = app.activeDocument; } // Exporter.prototype.exportAllAsSVG = function(theFolder) { // // saveMultipleArtboards // // artboardRange // } Exporter.prototype.exportRangeAsSVG = function(theFile, theRange) { // theFolder = new Folder(theFolder); // // if (! theFolder.exists) { // try { // theFolder.create(); // } // catch(e) { // throw new Error('Folder ' + theFolder.name + ' does not exist and could not be created.'); // } // } if (typeof theRange !== 'string') { theRange = ''; } var options; options = new ExportOptionsSVG(); options.embedRasterImages = false; options.cssProperties = SVGCSSPropertyLocation.PRESENTATIONATTRIBUTES; options.fontSubsetting = SVGFontSubsetting.None; options.documentEncoding = SVGDocumentEncoding.UTF8; options.saveMultipleArtboards = true; options.artboardRange = theRange; options.coordinatePrecision = 3; this.doc.exportFile(new File(theFile), ExportType.SVG, options); } Exporter.prototype.export = function(theFolder, theType, sizes) { try { var func = 'exportAs' + theType.toUpperCase(); if (this[func] === undefined) return; if (! (this[func] instanceof Function)) return; if (! theFolder instanceof Folder) { theFolder = new Folder(theFolder); if (! theFolder.exists) { try { theFolder.create(); } catch(e) { throw new Error('Folder ' + theFolder.name + ' does not exist and could not be created.'); } } } theType = theType.toUpperCase(); if (theType === ExportTypes.SVG) { doExport(theFolder,100, ''); // fixFileNames(theFolder, FileTypes.SVG); } else { for (var i = 0; i < sizes.length; i++) { var div = Number(sizes[i] / 100).toFixed(1); doExport(theFolder, sizes[i], '@' + div); // fixFileNames(theFolder, FileTypes.PNG); } } function doExport(theFolder, scale, sfx) { var artboard; for (var i = this.doc.artboards.length - 1; i >= 0; i--) { this.doc.artboards.setActiveArtboardIndex(i); artboard = this.doc.artboards[i]; // Test_skin-tone-01@hand-gesture-bird-middle-finger.svg var parts = artboard.name.split('_'); var theName = parts.slice(1, parts.length-1).join('-'); console.info('[Export Name]', theName); var theFile = new File( theFolder.fsName + "/" + theName + sfx + "." + theType.toLowerCase() ); this[func].call(this, theFile, scale); } } function fixFileNames(theFolder, fileType) { var fileList = new FileList(theFolder, fileType); fileList.forEach(function(path) { var theFile = new File(path); if (theFile.exists) { var fileName = theFile.name; var newName = fileName.split('@').pop(); theFile.rename(newName); } }); } } catch(e) { throw e; } } Exporter.prototype.insureFile = function(theFile) { console.info('theFile instanceof File : ' + (theFile instanceof File)); if (! theFile instanceof File) { return new File(theFile); } return theFile; } Exporter.prototype.exportAsSVG = function(theFile) { theFile = this.insureFile(theFile); var options; options = new ExportOptionsSVG(); options.embedRasterImages = false; options.cssProperties = SVGCSSPropertyLocation.PRESENTATIONATTRIBUTES; options.fontSubsetting = SVGFontSubsetting.None; options.documentEncoding = SVGDocumentEncoding.UTF8; options.coordinatePrecision = 4; this.doc.exportFile(new File(theFile), ExportType.SVG, options); } Exporter.prototype.exportAsPNG = function(theFile, scale) { theFile = this.insureFile(theFile); var options; options = new ExportOptionsPNG24(); options.antiAliasing = true; options.transparency = false; options.artBoardClipping = true; options.verticalScale = scale; options.horizontalScale = scale; this.doc.exportFile(new File(theFile), ExportType.PNG24, options); } Exporter.prototype.exportAsJPG = function(theFile, scale) { theFile = this.insureFile(theFile); var options; options = new ExportOptionsJPEG(); options.antiAliasing = true; options.artBoardClipping = true; options.horizontalScale = scale; options.verticalScale = scale; this.doc.exportFile(new File(theFile), ExportType.JPEG, options); } global.ExportImageError = ExportImageError; global.ExportTypes = ExportTypes; global.Exporter = Exporter; return Exporter; })(this, FileList);
<gh_stars>0 package com.srini.learning.algods.udemy.ds.array; public class MyArray { }
#!/bin/bash set -eo pipefail dir="$(dirname "$(readlink -f "$BASH_SOURCE")")" image="$1" # Build a client image with cgi-fcgi for testing clientImage="librarytest/monica-fpm-run:fcgi-client" docker build -t "$clientImage" - > /dev/null <<'EOF' FROM debian:stretch-slim RUN set -x && apt-get update && apt-get install -y libfcgi0ldbl && rm -rf /var/lib/apt/lists/* ENTRYPOINT ["cgi-fcgi"] EOF dbImage='mysql:8.0' # ensure the dbImage is ready and available if ! docker image inspect "$dbImage" &> /dev/null; then docker pull "$dbImage" > /dev/null fi # Create an instance of the container-under-test mysqlCid="$(docker run -d \ -e MYSQL_RANDOM_ROOT_PASSWORD=true \ -e MYSQL_DATABASE=monica \ -e MYSQL_USER=homestead \ -e MYSQL_PASSWORD=secret \ "$dbImage")" trap "docker rm -vf $mysqlCid > /dev/null" EXIT cid="$(docker run -d \ --link "$mysqlCid":mysql \ -e DB_HOST=mysql \ "$image")" trap "docker rm -vf $cid $mysqlCid > /dev/null" EXIT fcgi-request() { local method="$1" local url="$2" local requestUri="$3" local queryString= if [[ "$url" == *\?* ]]; then queryString="${url#*\?}" url="${url%%\?*}" fi docker run --rm -i --link "$cid":fpm \ -e REQUEST_METHOD="$method" \ -e SCRIPT_NAME="$url" \ -e SCRIPT_FILENAME=/var/www/html/public/"${url#/}" \ -e QUERY_STRING="$queryString" \ -e REQUEST_URI="$requestUri" \ "$clientImage" \ -bind -connect fpm:9000 } # Make sure that PHP-FPM is listening and ready . "$dir/../../retry.sh" --tries 30 'fcgi-request GET /index.php' > /dev/null 2>&1 # Check that we can request /register and that it contains the pattern "Welcome" somewhere fcgi-request GET '/index.php' register | grep -i "Welcome" > /dev/null
<filename>__tests__/Domain/DatetimeRange.spec.ts<gh_stars>0 import DatetimeRange from "@app/Domain/DatetimeRange"; import each from "jest-each"; each([ [ new Date("2020-01-01T00:00:00+09:00"), new Date("2020-01-03T00:00:00+09:00"), new Date("2019-12-31T00:00:00+09:00"), false, ], [ new Date("2020-01-01T00:00:00+09:00"), new Date("2020-01-03T00:00:00+09:00"), new Date("2020-01-01T00:00:00+09:00"), true, ], [ new Date("2020-01-01T00:00:00+09:00"), new Date("2020-01-03T00:00:00+09:00"), new Date("2020-01-02T00:00:00+09:00"), true, ], [ new Date("2020-01-01T00:00:00+09:00"), new Date("2020-01-03T00:00:00+09:00"), new Date("2020-01-03T00:00:00+09:00"), true, ], [ new Date("2020-01-01T00:00:00+09:00"), new Date("2020-01-03T00:00:00+09:00"), new Date("2020-01-04T00:00:00+09:00"), false, ], ]).test( "contains/指定の日時が日時範囲に含まれていればtrue、さもなくばfalse", ( begin: Date, end: Date, aDatetime: Date, expectedResult: boolean, ) => { const dateTimeRange = new DatetimeRange(begin, end); expect(dateTimeRange.contains(aDatetime)).toBe(expectedResult); }, );
<reponame>rotationalio/whisper<filename>web/src/components/AboutUs.tsx<gh_stars>1-10 import { Box, Typography } from "@material-ui/core"; import { useModal } from "contexts"; import { useServerStatus } from "contexts/serverStatusContext"; import React from "react"; import { ModalType } from "utils/enums/modal"; import Badge from "./Badge"; import Modal from "./Modal"; const AboutUs: React.FC = () => { const { state, dispatch } = useModal(); const [status] = useServerStatus(); const StatusColor = { ok: "green", maintainance: "yellow", unhealthy: "red" }; const handleClose = () => dispatch({ type: ModalType.HIDE_MODAL }); return ( <Modal open={state.modalType === ModalType.SHOW_ABOUT_US_MODAL} onClose={handleClose}> <> <div> <h3>About Whisper</h3> <p> The Whisper service is an internal helper tool used at <a href="https://rotational.io">Rotational Labs</a>{" "} to quickly share secrets, configurations, environment files, credentials, certificates, and more. Whisper is designed to accelerate our own internal software engineering practice and is comprised of an API service that is accessed by both a web UI and a command line application. There are many tools like Whisper, but this one is ours! </p> <p> To download the CLI application, report bugs or issues, or learn more about Whisper, please see the README.md file in the Whisper GitHub repository:{" "} <a href="https://github.com/rotationalio/whisper">rotationalio/whisper</a>. </p> <p> Although Whisper is an internal tool at Rotational, We&apos;ve made the code open source and are happy to have general contributions that enhance the project (particularly if you&apos;re a member of the Rotational Engineering Team!) We&apos;ve made our releases and the code freely available under the{" "} <a href="https://github.com/rotationalio/whisper/blob/main/LICENSE">Apache License 2.0</a> and we&apos;d feel privileged if you used Whisper in your own organization. Please note, however, that Rotational Labs makes no guarantees about the security of this software project and provides all code and binaries as is for general use. Use with common sense and at your own risk! </p> <p> If you&apos;re a Rotational customer and are interested in Whisper, please let us know, we&apos;d be happy to deploy it for you as a single-tenant service. If you&apos;re not a Rotational customer but are interested, please get in touch with us at <a href="mailto:<EMAIL>"><EMAIL></a>. </p> </div> <footer> <Box display="flex" flexDirection="column" alignItems="center"> <Box display="flex" alignItems="center" gridGap=".5rem" marginBottom=".3rem"> {status.status && status.version && ( <> <Typography variant="caption"> {status.status && ( <span> <Badge content={`${status?.host || "server"} status`} color={StatusColor[status?.status]} /> </span> )} </Typography> <Typography variant="caption"> version: <span style={{ fontWeight: "bold" }}>{status?.version}</span> </Typography> </> )} </Box> <Typography> Made with &spades; by <a href="https://rotational.io/">Rotational Labs</a> </Typography> </Box> </footer> </> </Modal> ); }; export default AboutUs;
import '../img/icon-16.png'; import '../img/icon-19.png'; import '../img/icon-32.png'; import '../img/icon-38.png'; import '../img/icon-48.png'; import '../img/icon-128.png'; import '../css/options.css'; import I18n from './utils/i18n'; import OptionManager from './utils/option-manager'; const defaults = { like_what: 'subscribed', like_when: 'instantly', disabled: false, }; const optionManager = new OptionManager(defaults); const i18n = new I18n(); const bugReportTemplate = ` <!-- Thanks for reporting! A debug log is already attached. If you have any other info that might be helpful, please write above the line. --> __________________________ ### Log `; i18n.populateText(); const loadOptions = async () => { const options = await optionManager.get(); document .querySelectorAll('input') .forEach(field => { if (!options.hasOwnProperty(field.name)) return; const val = options[field.name]; if (field.type === 'radio' || field.type === 'checkbox') { field.checked = field.value === val; } else { field.value = val; } }); chrome.storage.sync.get({ log: '[no log found]' }, ({ log }) => { // Add options state to report issue link const reportLink = document.querySelector('#report-link'); const url = `https://github.com/austencm/youtube-auto-like/issues/new?labels=bug&body=${encodeURIComponent(bugReportTemplate + log)}`; reportLink.setAttribute('href', url); }); } const handleOptionsChange = async (e) => { const newOptions = {}; // Extract form data Array .from((new FormData(e.currentTarget)).entries()) .forEach(([name, val]) => newOptions[name] = val); setStatus('saving...'); await optionManager.set(newOptions); await loadOptions(); setTimeout(() => setStatus('saved'), 300); } function setStatus(status = '') { document.querySelector('.status').innerText = status; } loadOptions(); // When the user changes an option, save it document .querySelector('#options-form') .addEventListener('change', handleOptionsChange);
module.exports = { entry: './main.js', devtool: 'cheap-module-source-map', output: { path: `${__dirname}/dist`, filename: 'main.js', }, module: { rules: [{ test: /\.js$/, loader: 'babel-loader', }, { test: /\.scss$/, use: [ 'style-loader', { loader: 'css-loader', options: { importLoaders: 1, minimize: true, }, }, 'postcss-loader', 'sass-loader', ], }], }, mode: 'development', optimization: { // minimize: true }, };
python main.py --model-forward-with-file-name --num-workers 3 --epochs 100 --no-best-epochs 50 --batch-size 64 --sampler block_shuffle_by_length --lr-decay-factor 0.5 --lr-scheduler-type 1 --lr 0.0003 --seed 100000 > log_train 2>log_err
#include <vector> #include <fftw3.h> std::vector<std::vector<double>> calculateDCT(const std::vector<std::vector<double>>& inputImage, int width, int height) { // Initialize FFTW plans and arrays fftw_plan plan; int n = width * height; double* in = (double*) fftw_malloc(sizeof(double) * n); double* out = (double*) fftw_malloc(sizeof(double) * n); fftw_complex* inDCT = (fftw_complex*) fftw_malloc(sizeof(fftw_complex) * n); fftw_complex* outDCT = (fftw_complex*) fftw_malloc(sizeof(fftw_complex) * n); // Create plan for forward DCT plan = fftw_plan_r2r_2d(width, height, in, out, FFTW_REDFT10, FFTW_REDFT10, FFTW_ESTIMATE); // Populate input array with image data for (int i = 0; i < height; i++) { for (int j = 0; j < width; j++) { in[i * width + j] = inputImage[i][j]; } } // Execute the plan to compute the DCT fftw_execute(plan); // Copy the computed DCT coefficients to the output array for (int i = 0; i < height; i++) { for (int j = 0; j < width; j++) { inDCT[i * width + j][0] = out[i * width + j]; inDCT[i * width + j][1] = 0.0; // Imaginary part is zero for real input } } // Clean up and free memory fftw_destroy_plan(plan); fftw_free(in); fftw_free(out); // Convert the DCT coefficients to a 2D vector for output std::vector<std::vector<double>> dctCoefficients(height, std::vector<double>(width, 0.0)); for (int i = 0; i < height; i++) { for (int j = 0; j < width; j++) { dctCoefficients[i][j] = inDCT[i * width + j][0]; } } // Free memory for DCT coefficients fftw_free(inDCT); fftw_free(outDCT); return dctCoefficients; }
import service from 'Src/infra/http'; const LoginService = { login(name: string, password: string): Promise<any> { return service.post('/login', { name, password, }); }, }; export default LoginService;
#!/bin/bash # Copyright 2017 Johns Hopkins University (Shinji Watanabe) # Apache 2.0 (http://www.apache.org/licenses/LICENSE-2.0) echo "$0 $*" >&2 # Print the command line for logging . ./path.sh nj=1 cmd=run.pl nlsyms="" lang="" feat="" # feat.scp oov="<unk>" bpecode="" bpe_dropout=0 allow_one_column=false verbose=0 trans_type=char filetype="" preprocess_conf="" category="" out="" # If omitted, write in stdout text="" multilingual=false help_message=$(cat << EOF Usage: $0 <data-dir> <dict> e.g. $0 data/train data/lang_1char/train_units.txt Options: --nj <nj> # number of parallel jobs --cmd (utils/run.pl|utils/queue.pl <queue opts>) # how to run jobs. --feat <feat-scp> # feat.scp or feat1.scp,feat2.scp,... --oov <oov-word> # Default: <unk> --out <outputfile> # If omitted, write in stdout --filetype <mat|hdf5|sound.hdf5> # Specify the format of feats file --preprocess-conf <json> # Apply preprocess to feats when creating shape.scp --verbose <num> # Default: 0 EOF ) . utils/parse_options.sh if [ $# != 2 ]; then echo "${help_message}" 1>&2 exit 1; fi set -euo pipefail dir=$1 dic=$2 tmpdir=$(mktemp -d ${dir}/tmp-XXXXX) trap 'rm -rf ${tmpdir}' EXIT if [ -z ${text} ]; then text=${dir}/text fi # 1. Create scp files for inputs # These are not necessary for decoding mode, and make it as an option input= if [ -n "${feat}" ]; then _feat_scps=$(echo "${feat}" | tr ',' ' ' ) read -r -a feat_scps <<< $_feat_scps num_feats=${#feat_scps[@]} for (( i=1; i<=num_feats; i++ )); do feat=${feat_scps[$((i-1))]} mkdir -p ${tmpdir}/input_${i} input+="input_${i} " cat ${feat} > ${tmpdir}/input_${i}/feat.scp # Dump in the "legacy" style JSON format if [ -n "${filetype}" ]; then awk -v filetype=${filetype} '{print $1 " " filetype}' ${feat} \ > ${tmpdir}/input_${i}/filetype.scp fi feat_to_shape.sh --cmd "${cmd}" --nj ${nj} \ --filetype "${filetype}" \ --preprocess-conf "${preprocess_conf}" \ --verbose ${verbose} ${feat} ${tmpdir}/input_${i}/shape.scp done fi # 2. Create scp files for outputs mkdir -p ${tmpdir}/output if [ -n "${bpecode}" ]; then if [ ${multilingual} = true ]; then # remove a space before the language ID paste -d " " <(awk '{print $1}' ${text}) <(cut -f 2- -d" " ${text} \ | spm_encode --model=${bpecode} --output_format=piece --alpha=${bpe_dropout} | cut -f 2- -d" ") \ > ${tmpdir}/output/token.scp else paste -d " " <(awk '{print $1}' ${text}) <(cut -f 2- -d" " ${text} \ | spm_encode --model=${bpecode} --output_format=piece --alpha=${bpe_dropout}) \ > ${tmpdir}/output/token.scp fi elif [ -n "${nlsyms}" ]; then text2token.py -s 1 -n 1 -l ${nlsyms} ${text} --trans_type ${trans_type} > ${tmpdir}/output/token.scp else text2token.py -s 1 -n 1 ${text} --trans_type ${trans_type} > ${tmpdir}/output/token.scp fi < ${tmpdir}/output/token.scp utils/sym2int.pl --map-oov ${oov} -f 2- ${dic} > ${tmpdir}/output/tokenid.scp # +2 comes from CTC blank and EOS vocsize=$(tail -n 1 ${dic} | awk '{print $2}') odim=$(echo "$vocsize + 2" | bc) < ${tmpdir}/output/tokenid.scp awk -v odim=${odim} '{print $1 " " NF-1 "," odim}' > ${tmpdir}/output/shape.scp cat ${text} > ${tmpdir}/output/text.scp # 3. Create scp files for the others mkdir -p ${tmpdir}/other if [ ${multilingual} == true ]; then awk '{ n = split($1,S,"[-]"); lang=S[n]; print $1 " " lang }' ${text} > ${tmpdir}/other/lang.scp elif [ -n "${lang}" ]; then awk -v lang=${lang} '{print $1 " " lang}' ${text} > ${tmpdir}/other/lang.scp fi if [ -n "${category}" ]; then awk -v category=${category} '{print $1 " " category}' ${dir}/text \ > ${tmpdir}/other/category.scp fi cat ${dir}/utt2spk > ${tmpdir}/other/utt2spk.scp # 4. Merge scp files into a JSON file opts="" if [ -n "${feat}" ]; then intypes="${input} output other" else intypes="output other" fi for intype in ${intypes}; do if [ -z "$(find "${tmpdir}/${intype}" -name "*.scp")" ]; then continue fi if [ ${intype} != other ]; then opts+="--${intype%_*}-scps " else opts+="--scps " fi for x in "${tmpdir}/${intype}"/*.scp; do k=$(basename ${x} .scp) if [ ${k} = shape ]; then opts+="shape:${x}:shape " else opts+="${k}:${x} " fi done done if ${allow_one_column}; then opts+="--allow-one-column true " else opts+="--allow-one-column false " fi if [ -n "${out}" ]; then opts+="-O ${out}" fi merge_scp2json.py --verbose ${verbose} ${opts} rm -fr ${tmpdir}
<reponame>hyena/mutgos_server<gh_stars>0 /* * message_AuthenticationRequest.cpp */ #include <string> #include "osinterface/osinterface_OsTypes.h" #include "message_MessageFactory.h" #include "utilities/json_JsonUtilities.h" #include "clientmessages/message_ClientMessage.h" #include "message_AuthenticationRequest.h" namespace { // Static registration const bool CLIENT_AUTHENTICATION_REQUEST_FACTORY_REG = mutgos::message::MessageFactory::register_message( mutgos::message::CLIENTMESSAGE_AUTHENTICATION_REQUEST, mutgos::message::AuthenticationRequest::make_instance); const static std::string PLAYER_NAME_KEY = "name"; const static std::string PLAYER_PASSWORD_KEY = "password"; const static std::string PLAYER_SITE_ID_KEY = "site"; const static std::string PLAYER_RECONNECT_KEY = "isReconnect"; const static std::string WINDOW_SIZE_KEY = "windowSize"; } namespace mutgos { namespace message { // ---------------------------------------------------------------------- AuthenticationRequest::AuthenticationRequest(void) : ClientMessage(CLIENTMESSAGE_AUTHENTICATION_REQUEST), player_site_id(0), player_reconnect(false), window_size(0) { } // ---------------------------------------------------------------------- AuthenticationRequest::AuthenticationRequest( const AuthenticationRequest &rhs) : ClientMessage(rhs), player_name(rhs.player_name), player_password(rhs.player_password), player_site_id(rhs.player_site_id), player_reconnect(rhs.player_reconnect), window_size(rhs.window_size) { } // ---------------------------------------------------------------------- AuthenticationRequest::~AuthenticationRequest() { } // ---------------------------------------------------------------------- ClientMessage *AuthenticationRequest::make_instance(void) { return new AuthenticationRequest(); } // ---------------------------------------------------------------------- ClientMessage *AuthenticationRequest::clone(void) const { return new AuthenticationRequest(*this); } // ---------------------------------------------------------------------- bool AuthenticationRequest::save( json::JSONRoot &root, json::JSONNode &node) const { return false; } // ---------------------------------------------------------------------- bool AuthenticationRequest::restore(const json::JSONNode &node) { bool success = ClientMessage::restore(node); success = json::get_key_value( PLAYER_NAME_KEY, node, player_name) and success; success = json::get_key_value( PLAYER_PASSWORD_KEY, node, player_password) and success; success = json::get_key_value( PLAYER_SITE_ID_KEY, node, player_site_id) and success; success = json::get_key_value( PLAYER_RECONNECT_KEY, node, player_reconnect) and success; success = json::get_key_value( WINDOW_SIZE_KEY, node, window_size) and success; return success; } } }
<filename>App/src/main/java/com/honyum/elevatorMan/base/ListItemCallback.java package com.honyum.elevatorMan.base; /** * Created by Star on 2017/6/10. */ public interface ListItemCallback<T> { void performItemCallback(T data); }
def bubble_sort(list): # set swapped to true swapped = True # while swapped is true while swapped: # set swapped to false swapped = False # loop over elements in the list for index in range(len(list) - 1): # check if the elements are out of order if list[index] > list[index + 1]: # swap elements list[index], list[index + 1] = list[index + 1], list[index] # set swapped to true swapped = True # return sorted list return list # Example result = bubble_sort([5, 12, 43, 25, 1]) print(result)
// eslint-disable-next-line import/no-extraneous-dependencies import axios from 'axios'; // eslint-disable-next-line import/no-extraneous-dependencies import pino from 'pino'; const logger = pino({ level: process.env.LOG_LEVEL || 'info', prettyPrint: true, colorize: true }); class Requests { constructor() { this.options = {}; } url(url) { this.options = { ...this.options, url }; return this; } method(method) { this.options = { ...this.options, method }; return this; } body(data) { const { ...object } = data; this.options = { ...this.options, data: object }; return this; } params(params) { this.options = { ...this.options, params }; return this; } headers(token) { const headers = (token === null) ? null : { Authorization: `JWT ${token}` }; // const headers = { Authorization: `JWT ${token}` }; this.options = { ...this.options, headers }; return this; } async send(nameRequest) { logger.info(`${nameRequest} request: method is ${this.options.method}, url is ${this.options.url}, body is ${JSON.stringify(this.options.data, null, 4)}`); try { const response = await axios({ ...this.options, }); logger.info(`${nameRequest} response: status is ${response.status}, body is ${JSON.stringify(response.data, null, 4)}`); return response; } catch (e) { logger.info(`${nameRequest} response: status is ${e.response.status}, body is ${JSON.stringify(e.response.data, null, 4)}`); return e.response; } } } export default Requests;
<filename>test/com/twu/biblioteca/options/AbstractOptionTest.java package com.twu.biblioteca.options; import com.twu.biblioteca.Biblioteca; import org.junit.Test; import static org.junit.Assert.*; /** * Created by gdias on 7/31/15. */ public class AbstractOptionTest { @Test public void equals_ShouldReturnFalse_WhenTheParameterIsNotAnOption() throws Exception { AbstractOption optionA = getConcreteOption(1, "List books"); String fakeOption = "AHuahaa"; assertFalse(optionA.equals(fakeOption)); } @Test public void equals_ShouldReturnTrue_WhenOptionsAreTheSame() throws Exception { AbstractOption optionA = getConcreteOption(1, "List books"); AbstractOption optionB = optionA; assertTrue(optionA.equals(optionB)); } @Test public void equals_ShouldReturnFalse_WhenOptionsAreDifferent() throws Exception { AbstractOption optionA = getConcreteOption(1, "List books"); AbstractOption optionB = getConcreteOption(2, "List books"); assertFalse(optionA.equals(optionB)); } @Test public void execute_ShouldOverrideExecuteMethod() throws Exception { AbstractOption overridenOption = getConcreteOption(0, ""); assertEquals("", overridenOption.execute(new Biblioteca())); } @Test public void showDetails_ShouldReturnIdAndName() throws Exception { AbstractOption option = getConcreteOption(1, "List books"); assertEquals("1 - List books", option.showDetails()); } private AbstractOption getConcreteOption(int id, String name) { AbstractOption optionA = new AbstractOption(id, name) { @Override public String execute(Biblioteca biblioteca) { return ""; } }; return optionA; } }
package aggregate import ( "log" "net/http" "reflect" "github.com/a-zara-n/Matchlock/src/domain/value" ) //HTTPMessages はHTTPリクエストとレスポンスをまとめたものになります type HTTPMessages struct { value.Identifier IsEdit bool Request *Request EditRequest *Request Response *Response } //HTTPDataDefinitionByJSON はWSで利用されるJSONのデータ定義 type HTTPDataDefinitionByJSON struct { Identifier string `json:"Identifier"` RequestMethod string `json:"Method"` RequestPath string `json:"Path"` RequestProto string `json:"Proto"` RequestHost string `json:"Host"` RequestHeaders string `json:"Header"` RequestParam string `json:"Param"` RequestEditMethod string `json:"EditMethod"` RequestEditPath string `json:"EditPath"` RequestEditProto string `json:"EditProto"` RequestEditHost string `json:"EditHost"` RequestEditHeaders string `json:"EditHeader"` RequestEditParam string `json:"EditParam"` ResponseHeaders string `json:"ResHeader"` Body string `json:"ReqBody"` } //NewHTTPMessage は新規でHTTPMessageを生成します func NewHTTPMessage() HTTPMessages { return HTTPMessages{ Identifier: value.Identifier{}, IsEdit: false, Request: &Request{}, EditRequest: &Request{}, Response: &Response{}, } } //IsEdited はリクエストが編集されたかを確認するためのmethod func (h *HTTPMessages) IsEdited() bool { var flag bool for _, key := range h.EditRequest.Header.GetKeys() { if h.Request.Header.Get(key) != h.EditRequest.Header.Header.Get(key) { flag = true } } if flag || !reflect.DeepEqual(h.Request.Info, h.EditRequest.Info) || !reflect.DeepEqual(h.Request.Data, h.EditRequest.Data) { log.Println("変更が発生しました") h.IsEdit = true return true } return false } //SetRequest は func (h *HTTPMessages) SetRequest(req *http.Request) { h.Identifier.Set(req.URL.String()) h.Request = NewHTTPRequestByRequest(req) } //SetEditedRequest は func (h *HTTPMessages) SetEditedRequest(req *http.Request) { h.EditRequest = NewHTTPRequestByRequest(req) } //SetResponse は func (h *HTTPMessages) SetResponse(resp *http.Response) { h.Response = NewHTTPResponseByResponse(resp) } //FetchRequest は func (h *HTTPMessages) FetchRequest() *http.Request { return h.Request.GetHTTPRequestByRequest() } //FetchEditRequest は func (h *HTTPMessages) FetchEditRequest() *http.Request { return h.EditRequest.GetHTTPRequestByRequest() } //FetchResponse は func (h *HTTPMessages) FetchResponse() *http.Response { return h.Response.GetHTTPRequestByResponse() }
/** * Copyright (c) 2018, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause */ package com.demandware.carbonj.service.engine; import java.io.IOException; import java.util.List; import javax.servlet.ServletConfig; import javax.servlet.ServletException; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import com.demandware.carbonj.service.events.EventsLogger; import com.demandware.carbonj.service.db.TimeSeriesStore; import com.demandware.carbonj.service.db.model.Series; import com.demandware.carbonj.service.db.util.SystemTime; import com.google.gson.Gson; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.web.context.WebApplicationContext; import org.springframework.web.context.support.WebApplicationContextUtils; /** * Servlet to serve series data to graphite. */ public class GraphiteSeriesDataServlet extends HttpServlet { private Logger LOG = LoggerFactory.getLogger( GraphiteSeriesDataServlet.class ); @Autowired(required = false) private TimeSeriesStore store; @Autowired @Qualifier("queryBlacklist") private MetricList queryBlacklist; @Autowired(required = false) @Qualifier("CarbonjEventsLogger") EventsLogger logger; @Override public void init( ServletConfig config ) throws ServletException { super.init( config ); // wire spring beans referenced by this servlet WebApplicationContext ctx = WebApplicationContextUtils.getWebApplicationContext( config.getServletContext() ); if ( ctx != null ) { ctx.getAutowireCapableBeanFactory().autowireBean( this ); } LOG.info( GraphiteSeriesDataServlet.class.getSimpleName() + " initialized." ); } @Override public void destroy() { } @Override protected void doGet( HttpServletRequest req, HttpServletResponse res ) throws IOException { if( store == null ) { throw new RuntimeException( "Time Series store is not configured." ); } String format = req.getParameter("format"); String target = req.getParameter( "target" ); String from = req.getParameter( "from" ); String until = req.getParameter( "until" ); String nowText = req.getParameter("now"); int now = SystemTime.nowEpochSecond(); if( nowText != null ) { now = Integer.parseInt( nowText ); } boolean randomTest = req.getParameter("randomTest") != null; boolean json = "json".equals( format ); if( json ) { res.setContentType( "application/json" ); } else { res.setContentType( "application/pickle" ); } res.setHeader("Cache-Control","no-cache"); res.setHeader("Pragma","no-cache"); if( randomTest ) { if( target == null ) { target = store.selectRandomMetric().name; } if( from == null && until == null) { until = String.valueOf(now); from = String.valueOf(now - 24 * 60 * 60 + 120); } } if( target.equals("random") ) { target = store.selectRandomMetric().name; } if( queryBlacklist.match(target) ) { String msg = String.format("Query pattern [%s] is blacklisted.", target); LOG.warn(msg); logger.log(new BlackListedQueryEvent(target, from, until, nowText)); throw new RuntimeException(msg); } if( json ) { List<Series> series = store.fetchSeriesData( new Query(target, Integer.parseInt( from ), Integer.parseInt( until ), now, System.currentTimeMillis()) ); Gson gson = new Gson(); res.getWriter().write( gson.toJson( series ) ); res.getWriter().close(); } else { ResponseStream seriesStream = new GraphitePickler( false, res.getOutputStream() ); try { store.streamSeriesData(new Query(target, Integer.parseInt(from), Integer.parseInt(until), now, System.currentTimeMillis()), seriesStream); } finally { seriesStream.close(); } } } }
<gh_stars>1-10 /* * */ package jnlp.sample.util.log; import java.util.Map; import java.util.ResourceBundle; import java.util.TreeMap; import javax.servlet.ServletConfig; /** * <P>Copyright 2008 as per GPLv2</P> * * @author <NAME>. * @since Feb 23, 2009 2:42:20 PM */ public class DefaultLoggerManager implements LoggerManager { public DefaultLoggerManager () { super(); } private DefaultLogger _defaultLogger; /* * @see jnlp.sample.util.log.LoggerManager#initLogger(javax.servlet.ServletConfig, java.util.ResourceBundle) */ @Override public void initLogger (ServletConfig config, ResourceBundle resources) { if (null == _defaultLogger) _defaultLogger = new DefaultLogger(config, resources); } private final Map<String,DefaultLogger> _loggersMap=new TreeMap<String,DefaultLogger>(String.CASE_INSENSITIVE_ORDER); /* * @see jnlp.sample.util.log.LoggerManager#getLogger(java.lang.String) */ @Override public Logger getLogger (String loggerName) { DefaultLogger l=_defaultLogger; if ((loggerName != null) && (loggerName.length() > 0)) { synchronized(_loggersMap) { if ((l=_loggersMap.get(loggerName)) != null) return l; if (_defaultLogger != null) { try { l = _defaultLogger.clone(); } catch(CloneNotSupportedException e) { throw new UnsupportedOperationException("getLogger(" + loggerName + ") failed to clone"); } l.setLoggerName(loggerName); _loggersMap.put(loggerName, l); } } } if (null == l) throw new IllegalStateException("getLogger(" + loggerName + ") no logger initializer"); return l; } }
#!/bin/sh set -e UNSIGNED=$1 SIGNATURE=$2 ARCH=x86_64 ROOTDIR=dist BUNDLE=${ROOTDIR}/Darkpaycoin-Qt.app TEMPDIR=signed.temp OUTDIR=signed-app if [ -z "$UNSIGNED" ]; then echo "usage: $0 <unsigned app> <signature>" exit 1 fi if [ -z "$SIGNATURE" ]; then echo "usage: $0 <unsigned app> <signature>" exit 1 fi rm -rf ${TEMPDIR} && mkdir -p ${TEMPDIR} tar -C ${TEMPDIR} -xf ${UNSIGNED} tar -C ${TEMPDIR} -xf ${SIGNATURE} if [ -z "${PAGESTUFF}" ]; then PAGESTUFF=${TEMPDIR}/pagestuff fi if [ -z "${CODESIGN_ALLOCATE}" ]; then CODESIGN_ALLOCATE=${TEMPDIR}/codesign_allocate fi for i in `find ${TEMPDIR} -name "*.sign"`; do SIZE=`stat -c %s ${i}` TARGET_FILE=`echo ${i} | sed 's/\.sign$//'` echo "Allocating space for the signature of size ${SIZE} in ${TARGET_FILE}" ${CODESIGN_ALLOCATE} -i ${TARGET_FILE} -a ${ARCH} ${SIZE} -o ${i}.tmp OFFSET=`${PAGESTUFF} ${i}.tmp -p | tail -2 | grep offset | sed 's/[^0-9]*//g'` if [ -z ${QUIET} ]; then echo "Attaching signature at offset ${OFFSET}" fi dd if=$i of=${i}.tmp bs=1 seek=${OFFSET} count=${SIZE} 2>/dev/null mv ${i}.tmp ${TARGET_FILE} rm ${i} echo "Success." done mv ${TEMPDIR}/${ROOTDIR} ${OUTDIR} rm -rf ${TEMPDIR} echo "Signed: ${OUTDIR}"
from typing import List import heapq class MinPathSum: def __init__(self, grid: List[List[int]]): self.grid = grid def minPathSum(self, grid: List[List[int]]) -> int: m, n = len(grid), len(grid[0]) distance = [[float('inf')] * (n + 1) for _ in range(m + 1)] distance[0][1] = 0 # Initialize the top-left cell pq = [(0, 0, 0)] # Priority queue to store (distance, row, column) directions = [(1, 0), (0, 1)] # Down and right movements while pq: dist, r, c = heapq.heappop(pq) if r == m - 1 and c == n - 1: # Reached bottom-right cell return dist if dist > distance[r][c]: # Skip if already found a shorter path continue for dr, dc in directions: nr, nc = r + dr, c + dc if 0 <= nr < m and 0 <= nc < n: new_dist = dist + grid[nr][nc] if new_dist < distance[nr][nc]: distance[nr][nc] = new_dist heapq.heappush(pq, (new_dist, nr, nc)) return -1 # No valid path found
#!/usr/bin/env bash # FishTank - Gnome Terminal color scheme install script for Base24 # Iterm2B24 [[ -z "$PROFILE_NAME" ]] && PROFILE_NAME="Base 24 FishTank" [[ -z "$PROFILE_SLUG" ]] && PROFILE_SLUG="base-24-fishtank" [[ -z "$DCONF" ]] && DCONF=dconf [[ -z "$UUIDGEN" ]] && UUIDGEN=uuidgen dset() { local key="$1"; shift local val="$1"; shift if [[ "$type" == "string" ]]; then val="'$val'" fi "$DCONF" write "$PROFILE_KEY/$key" "$val" } # Because dconf still doesn't have "append" dlist_append() { local key="$1"; shift local val="$1"; shift local entries="$( { "$DCONF" read "$key" | tr -d '[]' | tr , "\n" | fgrep -v "$val" echo "'$val'" } | head -c-1 | tr "\n" , )" "$DCONF" write "$key" "[$entries]" } # Newest versions of gnome-terminal use dconf if which "$DCONF" > /dev/null 2>&1; then # Check that uuidgen is available type $UUIDGEN >/dev/null 2>&1 || { echo >&2 "Requires uuidgen but it's not installed. Aborting!"; exit 1; } [[ -z "$BASE_KEY_NEW" ]] && BASE_KEY_NEW=/org/gnome/terminal/legacy/profiles: if [[ -n "`$DCONF list $BASE_KEY_NEW/`" ]]; then if which "$UUIDGEN" > /dev/null 2>&1; then PROFILE_SLUG=`uuidgen` fi if [[ -n "`$DCONF read $BASE_KEY_NEW/default`" ]]; then DEFAULT_SLUG=`$DCONF read $BASE_KEY_NEW/default | tr -d \'` else DEFAULT_SLUG=`$DCONF list $BASE_KEY_NEW/ | grep '^:' | head -n1 | tr -d :/` fi DEFAULT_KEY="$BASE_KEY_NEW/:$DEFAULT_SLUG" PROFILE_KEY="$BASE_KEY_NEW/:$PROFILE_SLUG" # Copy existing settings from default profile $DCONF dump "$DEFAULT_KEY/" | $DCONF load "$PROFILE_KEY/" # Add new copy to list of profiles dlist_append $BASE_KEY_NEW/list "$PROFILE_SLUG" # Update profile values with theme options dset visible-name "'$PROFILE_NAME'" dset palette "['#03063c', '#c60049', '#abf157', '#fdcd5e', '#525fb8', '#976f81', '#968662', '#eceffc', '#6c5a30', '#d94a8a', '#daffa8', '#fee6a8', '#b1bdf9', '#fda4cc', '#a4bc86', '#f6ffec']" dset background-color "'#222436'" dset foreground-color "'#ccc9c9'" dset bold-color "'#ccc9c9'" dset bold-color-same-as-fg "true" dset cursor-colors-set "true" dset cursor-background-color "'#ccc9c9'" dset cursor-foreground-color "'#03063c'" dset use-theme-colors "false" dset use-theme-background "false" unset PROFILE_NAME unset PROFILE_SLUG unset DCONF unset UUIDGEN exit 0 fi fi # Fallback for Gnome 2 and early Gnome 3 [[ -z "$GCONFTOOL" ]] && GCONFTOOL=gconftool [[ -z "$BASE_KEY" ]] && BASE_KEY=/apps/gnome-terminal/profiles PROFILE_KEY="$BASE_KEY/$PROFILE_SLUG" gset() { local type="$1"; shift local key="$1"; shift local val="$1"; shift "$GCONFTOOL" --set --type "$type" "$PROFILE_KEY/$key" -- "$val" } # Because gconftool doesn't have "append" glist_append() { local type="$1"; shift local key="$1"; shift local val="$1"; shift local entries="$( { "$GCONFTOOL" --get "$key" | tr -d '[]' | tr , "\n" | fgrep -v "$val" echo "$val" } | head -c-1 | tr "\n" , )" "$GCONFTOOL" --set --type list --list-type $type "$key" "[$entries]" } # Append the Base24 profile to the profile list glist_append string /apps/gnome-terminal/global/profile_list "$PROFILE_SLUG" gset string visible_name "$PROFILE_NAME" gset string palette "#03063c:#c60049:#abf157:#fdcd5e:#525fb8:#976f81:#968662:#eceffc:#6c5a30:#d94a8a:#daffa8:#fee6a8:#b1bdf9:#fda4cc:#a4bc86:#f6ffec" gset string background_color "#222436" gset string foreground_color "#ccc9c9" gset string bold_color "#ccc9c9" gset bool bold_color_same_as_fg "true" gset bool cursor-colors-set "true" gset string cursor-background-color "'#ccc9c9'" gset string cursor-foreground-color "'#03063c'" gset bool use_theme_colors "false" gset bool use_theme_background "false" unset PROFILE_NAME unset PROFILE_SLUG unset DCONF unset UUIDGEN
#!/bin/bash set -e # Usage usage() { echo "Usage:" echo " ${0} -c <host> -p <port> -A <username> -P <password> -k <KEY> -u <USER>" exit 1 } # Constants SLEEP_TIME=10 while getopts "c:p:A:P:k:u:" opt; do case $opt in c) host=${OPTARG} ;; p) port=${OPTARG} ;; A) username=${OPTARG} ;; P) password=${OPTARG} ;; k) key=${OPTARG} ;; u) user=${OPTARG} ;; *) echo "Invalid parameter(s) or option(s)." usage ;; esac done if [ -z "${host}" ] || [ -z "${port}" ] || [ -z "${username}" ] || [ -z "${password}" ] || [ -z "${key}" ] || [ -z "${user}" ]; then echo "Parameters missing" usage fi echo "Testing Jenkins Connection & Key Presence" until curl --location --output /dev/null --silent --write-out "%{http_code}\\n" \ --user ${username}:${password} \ "http://${host}:${port}/jenkins/userContent/${key}" | grep "200" &> /dev/null do echo "Jenkins or key unavailable, sleeping for ${SLEEP_TIME}" sleep "${SLEEP_TIME}" done echo "Retrieving value: ${key}" ssh_key=$(curl --silent --request GET --user ${username}:${password} "http://${host}:${port}/jenkins/userContent/${key}") echo "Checking if \"${user}\" exists" if curl --location --output /dev/null --silent --write-out "%{http_code}\\n" "http://localhost:8080/gerrit/accounts/${user}" | grep "404" &> /dev/null; then echo "User does not exist: ${user}" exit 1 fi echo "*** Verify key already exists... Gerrit does not do this ..." # Download the stored key and decode from to UTF-8 using echo -e the -n switch from echo allows to remove the trailing \n that echo would add. # The decode part is necessary as Gerrit correctly encode the SSH key and as a result = sign is converted to \u003d stored_key=$(echo -e $(curl --user ${username}:${password} --silent http://localhost:8080/gerrit/a/accounts/${user}/sshkeys | grep "ssh_public_key" | awk '{split($0, a, ": "); print a[2]}' | sed 's/[",]//g')) [[ "$stored_key" == "$ssh_key" ]] && exit 0 || echo "****** Stored key is not same as downloaded, uploading it ..." echo "Uploading key to Gerrit user \"${user}\"" curl --request POST --user "${username}:${password}" --data "${ssh_key}" "http://localhost:8080/gerrit/a/accounts/${user}/sshkeys"
require 'timeout' module Eye::Process::System def load_pid_from_file File.read(self[:pid_file_ex]).to_i rescue nil end def save_pid_to_file if self.pid File.open(self[:pid_file_ex], 'w') do |f| f.write self.pid end true else false end end def clear_pid_file(check_content = false) return if check_content && self.pid && load_pid_from_file != self.pid info "delete pid_file: #{self[:pid_file_ex]}" File.unlink(self[:pid_file_ex]) true rescue nil end def pid_file_ctime File.ctime(self[:pid_file_ex]) rescue Time.now end def get_identity File.mtime(self[:pid_file_ex]) rescue Errno::ENOENT nil end def compare_identity(pid = self.pid) return :ok unless self[:check_identity] return :no_pid unless pid id = get_identity return :no_pid_file unless id st = Eye::SystemResources.start_time(pid) return :no_start_time unless st st1 = st.to_i id1 = id.to_i if (id1 - st1).abs > self[:check_identity_grace] args = Eye::SystemResources.args(pid) msg = "pid_file: '#{Eye::Utils.human_time2(id)}', process: '#{Eye::Utils.human_time2(st)}' (#{args})" res = (id1 < st1) ? :fail : :touched warn "compare_identity: #{res}, #{msg}" res else :ok end end def process_really_running? process_pid_running?(self.pid) end def process_pid_running?(pid) Eye::System.pid_alive?(pid) end def send_signal(code) res = Eye::System.send_signal(self.pid, code) msg = "send_signal #{code} to <#{self.pid}>" msg += ", error<#{res[:error]}>" if res[:error] info msg res[:result] == :ok end def wait_for_condition(timeout, step = 0.1, &_block) res = nil sumtime = 0 loop do tm = Time.now res = yield # note that yield can block actor here and timeout can be overhead return res if res sleep step.to_f sumtime += (Time.now - tm) return false if sumtime > timeout end end def execute(cmd, cfg = {}) defer { Eye::System.execute cmd, cfg }.tap do |res| notify(:debug, "Bad exit status of command #{cmd.inspect}(#{res[:exitstatus].inspect})") if res[:exitstatus] != 0 end end def daemonize(cmd, cfg = {}) Eye::System.daemonize(cmd, cfg) end def execute_sync(cmd, opts = { timeout: 1.second }) execute(cmd, self.config.merge(opts)).tap do |res| info "execute_sync `#{cmd}` with res: #{res}" end end def execute_async(cmd, opts = {}) daemonize(cmd, self.config.merge(opts)).tap do |res| info "execute_async `#{cmd}` with res: #{res}" end end def failsafe_load_pid pid = load_pid_from_file unless pid # this is can be symlink changed case sleep 0.1 pid = load_pid_from_file end pid if pid && pid > 0 end def failsafe_save_pid save_pid_to_file true rescue => ex log_ex(ex) false end def expand_path(path) File.expand_path(path, self[:working_dir]) end end
<gh_stars>1-10 package io.github.talhahasanzia.annotation; import io.github.talhahasanzia.constants.CharacterSet; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; @Retention(RetentionPolicy.RUNTIME) @Target(ElementType.FIELD) public @interface CharacterConfig { /** * Length of string to be generated, defaults t0 8 characters * * @return length */ int length() default 8; /** * Describes the set of characters that will be used in string's random generation! * Must be of type {@link io.github.talhahasanzia.constants.CharacterSet} */ int characterSet() default CharacterSet.ALL; }
#!/bin/sh B="" # blue G="" # green R="" # red Y="" # yellow C="" # cyan N="" # end color text, return to [N]ormal text clear cat <<EOF ${R}RED – CFEngine reserved word${N} ${B}BLUE – User's choice$N ${G}GREEN – Promise Context$N ${C}CYAN – Punctuation$N # What is it? What is it for? What is it called? ${R}bundle agent|edit_line|server|monitor|common|...$N ${B}my_example_bundle$N ${C}{${N} ${R}files|processes|packages|commands|...${C}:${N} # Type of promise. What part of the system does it affect? ${G}(linux|Sunday)&(ITALY|webserver)${C}::${N} # Context. When/where the promise applies. ${C}"${B}object${C}"${N} # What is the affected object? (Promiser) # Can be the name of or the pattern for names of # system objects: files, processes, packages, commands, # services, database objects, etc. # Or can be a CFEngine internal object name, such as # a class or a report. ${R}comment${N} ${C}=> "${B}The intention: to illustrate CF3 syntax pattern.", ${R}handle${N} ${C}=> "${B}syntax_pattern_example_1", ${R}attribute1${N} ${C}=> "${B}literal_value1", ${R}attribute2${N} ${C}=> "${B}\$(scalar_variable_name)", ${R}attribute3${N} ${C}=> { "${B}literal_value1${C}", "${B}literal_value2${C}", ${N}...${C} }, ${R}attribute4${N} ${C}=> ${C}{ @(${B}list_variable_name${C}) }, ${R}attribute5${N} ${C}=> ${B}group_of_promises${C}, ${R}attribute6${N} ${C}=> ${B}group_of_promise_attributes${C}, ${R}attribute7${N} ${C}=> ${B}function${C}(${N}...${C}); ${C}}${N} EOF
#!/bin/sh rm -i rahw.tgz cd .. GZIP=-9 tar cvzf rahw.tgz --sort=name --exclude=".*" --exclude="*.in" --exclude="make-defs.sh" --exclude="make-tar.sh" --exclude="runinfiles.sh" --exclude="head.txt" --exclude="stats.sh" --exclude="TODO" rahw/ #GZIP=-9 tar cvzf rahw.tgz --sort=name --exclude=".*" rahw/ mv rahw.tgz rahw/
#!/bin/bash # DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" # locust -f $DIR/WeVoteLocust.py -H http://localhost:8000 -c 100 -r 10 -n 1000 --no-web --print-stats WeVoteLocust DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" locust -f $DIR/WeVoteLocust.py -H https://api.wevoteusa.org -c 100 -r 10 -n 1000 --no-web --print-stats WeVoteLocust
require_relative 'shared' Pork::API.describe Rack::Handler do def app lambda do |_| [200, {}, ["OK\n"]] end end def run name, port, &block fork do if server = get(name) trap 'TERM' do server.shutdown end if name == 'webrick' # the way to stop webrick server.run(app, :Port => port){ block.call(name) } else block.call end exit! end end paste :shared end unless RUBY_ENGINE == 'jruby'
<filename>dist/lib/shared.js<gh_stars>10-100 const getParenthesisContent = str => { return str .slice(str.indexOf('(') + 1, str.lastIndexOf(')')) .split(',') .map(item => item.trim()) .filter(item => item !== '') } const convertToNumberMaybe = str => (Number.isNaN(Number(str)) ? str : Number(str)) const roundToMaxTenDecimals = num => +num.toFixed(10) export const getCoordinate = (x, y) => { return { x: roundToMaxTenDecimals(x), y: roundToMaxTenDecimals(y), } } export const getFunctionArguments = functionAsString => { return getParenthesisContent(functionAsString).map(arg => convertToNumberMaybe(arg)) }
#!/bin/sh set -e set -x mkdir -p build cd build cmake -DPostgreSQL_TYPE_INCLUDE_DIR=/usr/include/postgresql .. make -j 6
#!/bin/bash curl -s -w 'Testing Website Response Time for :%{url_effective}\n\nLookup Time:\t\t%{time_namelookup}\nConnect Time:\t\t%{time_connect}\nAppCon Time:\t\t%{time_appconnect}\nRedirect Time:\t\t%{time_redirect}\nPre-transfer Time:\t%{time_pretransfer}\nStart-transfer Time:\t%{time_starttransfer}\n\nTotal Time:\t\t%{time_total}\n' -o /dev/null WEBSITE
<reponame>DawChihLiou/ci-boilerplate<gh_stars>0 import React from 'react' import Test from '../common/Test' const About = () => ( <div className="text-center"> <h1>This is About View</h1> </div> ) export default About
#!/bin/sh sudo systemctl stop uwsgi-webapp.service pkill -f "/usr/local/bin/uwsgi --ini /etc/uwsgi/webapp.ini"
<reponame>kindraywind/SimpleBlackJack<filename>src/main/java/com/Card/Suit.java package com.Card; public enum Suit { HEARTS("♥"), SPADES("♠"), CLUBS("♣"), DIAMONDS("♦"); private String symbol; private Suit (String symbol) { this.symbol = symbol; } @Override public String toString() { return this.symbol; } }
// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 export const mockBatchWrite = jest.fn(); jest.mock('aws-sdk/clients/dynamodb', () => ({ DocumentClient: jest.fn(() => ({ batchWrite: mockBatchWrite })) })); jest.mock('../../lib/utils', () => ({ getAwsSdkOptions: jest.fn().mockReturnValue({}) })); export const consoleWarnSpy = jest.spyOn(console, 'warn'); export const consoleErrorSpy = jest.spyOn(console, 'error');
package main import "fmt" func main() { a, b := 5, 7 product := a * b fmt.Printf("Product is %d", product) }
######################################################################## # AwsEip is the +aws_eip+ terrform resource, # # {https://www.terraform.io/docs/providers/aws/r/eip.html Terraform Docs} ######################################################################## # Currently geo can't create EIPs - only codify existing ones # It does this by requiring the '_public_ip' attribute and hard-coding the '_geo_id' to that class GeoEngineer::Resources::AwsEip < GeoEngineer::Resource validate :validate_instance_or_network_interface validate -> { validate_required_attributes([:_public_ip]) } after :initialize, -> { _terraform_id -> { NullObject.maybe(remote_resource)._terraform_id } } after :initialize, -> { _geo_id -> { _public_ip } } # Can't associate both an instance and a network interface with an elastic IP def validate_instance_or_network_interface errors = [] unless instance.nil? || network_interface.nil? errors << "Must associate and Elastic IP with either and EC2 instance or a network interface" end errors end def support_tags? false end # Always create within a VPC def vpc true end def self._fetch_remote_resources(provider) AwsClients.ec2(provider).describe_addresses['addresses'].map(&:to_h).map do |address| address[:_terraform_id] = address[:allocation_id] address[:_geo_id] = address[:public_ip] address end end end
#!/bin/sh export ACTIVEMQ_DATADIR=/var/lib/activemq envsubst < activemq.xml.template > $ACTIVEMQ_HOME/conf/activemq.xml java -Xms1G -Xmx1G -Djava.util.logging.config.file=logging.properties \ -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote \ -Djava.io.tmpdir=$ACTIVEMQ_HOME/tmp -Dactivemq.classpath=$ACTIVEMQ_HOME/conf \ -Dactivemq.home=$ACTIVEMQ_HOME -Dactivemq.base=$ACTIVEMQ_HOME \ -Dactivemq.conf=$ACTIVEMQ_HOME/conf -Dactivemq.data=$ACTIVEMQ_DATADIR -jar \ $ACTIVEMQ_HOME/bin/activemq.jar start
<reponame>barkbay/elasticsearch-k8s-metrics-adapter // Licensed to Elasticsearch B.V. under one or more contributor // license agreements. See the NOTICE.txt file distributed with // this work for additional information regarding copyright // ownership. Elasticsearch B.V. licenses this file to you under // the Apache License, Version 2.0 (the "License"); you may // not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package monitoring import ( "encoding/json" "fmt" "net/http" "sync" "github.com/elastic/elasticsearch-adapter/pkg/client" "github.com/elastic/elasticsearch-adapter/pkg/config" "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promauto" "github.com/prometheus/client_golang/prometheus/promhttp" "k8s.io/klog/v2" "sigs.k8s.io/custom-metrics-apiserver/pkg/provider" ) const defaultFailureThreshold = 3 var ( clientErrors = promauto.NewCounterVec(prometheus.CounterOpts{ Name: "client_errors_total", Help: "The total number of errors raised by a client", }, []string{"client", "type"}) clientSuccess = promauto.NewCounterVec(prometheus.CounterOpts{ Name: "client_success_total", Help: "The total number of successful call to a metrics server", }, []string{"client", "type"}) metrics = promauto.NewGaugeVec(prometheus.GaugeOpts{ Name: "metrics_count", Help: "The current number of metrics served by this metrics server", }, []string{"client", "type"}) ) type Counters struct { CustomMetrics map[string]int `json:"customMetrics,omitempty"` ExternalMetrics map[string]int `json:"externalMetrics,omitempty"` } func NewCounters() *Counters { return &Counters{ CustomMetrics: make(map[string]int), ExternalMetrics: make(map[string]int), } } func NewServer(adapterCfg *config.Config, port int, enablePrometheusMetrics bool) *Server { failureThreshold := adapterCfg.ReadinessProbe.FailureThreshold if failureThreshold == 0 { failureThreshold = defaultFailureThreshold } clientSuccesses := NewCounters() for _, clientCfg := range adapterCfg.MetricServers { if clientCfg.MetricTypes.HasType(config.CustomMetricType) { clientSuccesses.CustomMetrics[clientCfg.Name] = 0 } if clientCfg.MetricTypes.HasType(config.ExternalMetricType) { clientSuccesses.ExternalMetrics[clientCfg.Name] = 0 } } return &Server{ lock: sync.RWMutex{}, adapterCfg: adapterCfg, monitoringPort: port, enablePrometheusMetrics: enablePrometheusMetrics, clientFailures: NewCounters(), clientSuccesses: clientSuccesses, failureThreshold: failureThreshold, } } type Server struct { lock sync.RWMutex adapterCfg *config.Config monitoringPort int failureThreshold int enablePrometheusMetrics bool clientFailures *Counters clientSuccesses *Counters } func (m *Server) OnError(c client.Interface, metricType config.MetricType, err error) { clientName := c.GetConfiguration().Name m.lock.Lock() defer m.lock.Unlock() if metricType == config.CustomMetricType { m.clientFailures.CustomMetrics[clientName]++ } if metricType == config.ExternalMetricType { m.clientFailures.ExternalMetrics[clientName]++ } clientErrors.WithLabelValues(c.GetConfiguration().Name, string(metricType)).Inc() } func (m *Server) UpdateExternalMetrics(c client.Interface, ems map[provider.ExternalMetricInfo]struct{}) { clientName := c.GetConfiguration().Name m.lock.Lock() defer m.lock.Unlock() // reset client failures as we got some metrics m.clientFailures.ExternalMetrics[clientName] = 0 // increment success counters m.clientSuccesses.ExternalMetrics[clientName]++ clientSuccess.WithLabelValues(c.GetConfiguration().Name, string(config.ExternalMetricType)).Inc() // update external metrics stats metrics.WithLabelValues(c.GetConfiguration().Name, string(config.ExternalMetricType)).Set(float64(len(ems))) } func (m *Server) UpdateCustomMetrics(c client.Interface, cms map[provider.CustomMetricInfo]struct{}) { clientName := c.GetConfiguration().Name m.lock.Lock() defer m.lock.Unlock() // reset client failures as we got some metrics m.clientFailures.CustomMetrics[clientName] = 0 // increment success counters m.clientSuccesses.CustomMetrics[clientName]++ clientSuccess.WithLabelValues(c.GetConfiguration().Name, string(config.CustomMetricType)).Inc() // update custom metrics stats metrics.WithLabelValues(c.GetConfiguration().Name, string(config.CustomMetricType)).Set(float64(len(cms))) } func (m *Server) Start() { http.Handle("/metrics", promhttp.Handler()) http.Handle("/readyz", m) _ = http.ListenAndServe(fmt.Sprintf(":%d", m.monitoringPort), nil) } func (m *Server) ServeHTTP(writer http.ResponseWriter, request *http.Request) { status := http.StatusOK m.lock.RLock() defer m.lock.RUnlock() for _, server := range m.adapterCfg.MetricServers { if customMetricsSuccess, hasCustomMetrics := m.clientSuccesses.CustomMetrics[server.Name]; hasCustomMetrics && customMetricsSuccess == 0 { status = http.StatusServiceUnavailable klog.Errorf("client %s has not retrieved an initial set of custom metrics yet", server.Name) break } if externalMetricsSuccess, hasExternalMetrics := m.clientSuccesses.ExternalMetrics[server.Name]; hasExternalMetrics && externalMetricsSuccess == 0 { status = http.StatusServiceUnavailable klog.Errorf("client %s has not retrieved an initial set of external metrics yet", server.Name) break } if m.clientFailures.CustomMetrics[server.Name] >= m.failureThreshold { status = http.StatusServiceUnavailable klog.Errorf("client %s got %d consecutive failures while retrieving custom metrics", server.Name, m.clientFailures.CustomMetrics[server.Name]) break } if m.clientFailures.ExternalMetrics[server.Name] >= m.failureThreshold { status = http.StatusServiceUnavailable klog.Errorf("client %s got %d consecutive failures while retrieving external metrics", server.Name, m.clientFailures.ExternalMetrics[server.Name]) break } } err := writeJSONResponse(writer, status, ClientsHealthResponse{ClientFailures: m.clientFailures, ClientOk: m.clientSuccesses}) if err != nil { klog.Errorf("Failed to write monitoring JSON response: %v", err) } } type ClientsHealthResponse struct { ClientFailures *Counters `json:"consecutiveFailures,omitempty"` ClientOk *Counters `json:"successTotal,omitempty"` } func writeJSONResponse(w http.ResponseWriter, code int, resp interface{}) error { enc, err := json.MarshalIndent(resp, "", "\t") if err != nil { w.WriteHeader(http.StatusInternalServerError) return err } w.Header().Set("Content-Type", "application/json") w.WriteHeader(code) _, err = w.Write(enc) if err != nil { return err } return nil }
<filename>lang/py/cookbook/v2/source/cb2_18_9_exm_1.py class cond(object): def __getitem__(self, sl): if sl.start: return sl.stop else: return sl.step cond = cond()
var _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; }; function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } function _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; } function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; } var _Promise = typeof Promise === 'undefined' ? require('es6-promise').Promise : Promise; var _require = require('preact'), h = _require.h, Component = _require.Component; var AuthView = require('./AuthView'); var Browser = require('./Browser'); var LoaderView = require('./Loader'); var generateFileID = require('../../utils/generateFileID'); var getFileType = require('../../utils/getFileType'); var isPreviewSupported = require('../../utils/isPreviewSupported'); /** * Array.prototype.findIndex ponyfill for old browsers. */ function findIndex(array, predicate) { for (var i = 0; i < array.length; i++) { if (predicate(array[i])) return i; } return -1; } var CloseWrapper = function (_Component) { _inherits(CloseWrapper, _Component); function CloseWrapper() { _classCallCheck(this, CloseWrapper); return _possibleConstructorReturn(this, _Component.apply(this, arguments)); } CloseWrapper.prototype.componentWillUnmount = function componentWillUnmount() { this.props.onUnmount(); }; CloseWrapper.prototype.render = function render() { return this.props.children[0]; }; return CloseWrapper; }(Component); /** * Class to easily generate generic views for plugins * * * This class expects the plugin instance using it to have the following * accessor methods. * Each method takes the item whose property is to be accessed * as a param * * isFolder * @return {Boolean} for if the item is a folder or not * getItemData * @return {Object} that is format ready for uppy upload/download * getItemIcon * @return {Object} html instance of the item's icon * getItemSubList * @return {Array} sub-items in the item. e.g a folder may contain sub-items * getItemName * @return {String} display friendly name of the item * getMimeType * @return {String} mime type of the item * getItemId * @return {String} unique id of the item * getItemRequestPath * @return {String} unique request path of the item when making calls to uppy server * getItemModifiedDate * @return {object} or {String} date of when last the item was modified * getItemThumbnailUrl * @return {String} */ module.exports = function () { /** * @param {object} instance of the plugin */ function ProviderView(plugin, opts) { _classCallCheck(this, ProviderView); this.plugin = plugin; this.Provider = plugin[plugin.id]; // set default options var defaultOptions = { viewType: 'list', showTitles: true, showFilter: true, showBreadcrumbs: true // merge default options with the ones set by user };this.opts = _extends({}, defaultOptions, opts); // Logic this.addFile = this.addFile.bind(this); this.filterItems = this.filterItems.bind(this); this.filterQuery = this.filterQuery.bind(this); this.toggleSearch = this.toggleSearch.bind(this); this.getFolder = this.getFolder.bind(this); this.getNextFolder = this.getNextFolder.bind(this); this.logout = this.logout.bind(this); this.checkAuth = this.checkAuth.bind(this); this.handleAuth = this.handleAuth.bind(this); this.handleDemoAuth = this.handleDemoAuth.bind(this); this.sortByTitle = this.sortByTitle.bind(this); this.sortByDate = this.sortByDate.bind(this); this.isActiveRow = this.isActiveRow.bind(this); this.isChecked = this.isChecked.bind(this); this.toggleCheckbox = this.toggleCheckbox.bind(this); this.handleError = this.handleError.bind(this); this.handleScroll = this.handleScroll.bind(this); this.donePicking = this.donePicking.bind(this); this.cancelPicking = this.cancelPicking.bind(this); this.clearSelection = this.clearSelection.bind(this); // Visual this.render = this.render.bind(this); this.clearSelection(); } ProviderView.prototype.tearDown = function tearDown() { // Nothing. }; ProviderView.prototype._updateFilesAndFolders = function _updateFilesAndFolders(res, files, folders) { var _this2 = this; this.plugin.getItemSubList(res).forEach(function (item) { if (_this2.plugin.isFolder(item)) { folders.push(item); } else { files.push(item); } }); this.plugin.setPluginState({ folders: folders, files: files }); }; ProviderView.prototype.checkAuth = function checkAuth() { var _this3 = this; this.plugin.setPluginState({ checkAuthInProgress: true }); this.Provider.checkAuth().then(function (authenticated) { _this3.plugin.setPluginState({ checkAuthInProgress: false }); _this3.plugin.onAuth(authenticated); }).catch(function (err) { _this3.plugin.setPluginState({ checkAuthInProgress: false }); _this3.handleError(err); }); }; /** * Based on folder ID, fetch a new folder and update it to state * @param {String} id Folder id * @return {Promise} Folders/files in folder */ ProviderView.prototype.getFolder = function getFolder(id, name) { var _this4 = this; return this._loaderWrapper(this.Provider.list(id), function (res) { var folders = []; var files = []; var updatedDirectories = void 0; var state = _this4.plugin.getPluginState(); var index = findIndex(state.directories, function (dir) { return id === dir.id; }); if (index !== -1) { updatedDirectories = state.directories.slice(0, index + 1); } else { updatedDirectories = state.directories.concat([{ id: id, title: name || _this4.plugin.getItemName(res) }]); } _this4.username = _this4.username ? _this4.username : _this4.plugin.getUsername(res); _this4._updateFilesAndFolders(res, files, folders); _this4.plugin.setPluginState({ directories: updatedDirectories }); }, this.handleError); }; /** * Fetches new folder * @param {Object} Folder * @param {String} title Folder title */ ProviderView.prototype.getNextFolder = function getNextFolder(folder) { var id = this.plugin.getItemRequestPath(folder); this.getFolder(id, this.plugin.getItemName(folder)); this.lastCheckbox = undefined; }; ProviderView.prototype.addFile = function addFile(file) { var tagFile = { id: this.providerFileToId(file), source: this.plugin.id, data: this.plugin.getItemData(file), name: this.plugin.getItemName(file) || this.plugin.getItemId(file), type: this.plugin.getMimeType(file), isRemote: true, body: { fileId: this.plugin.getItemId(file) }, remote: { serverUrl: this.plugin.opts.serverUrl, url: '' + this.Provider.fileUrl(this.plugin.getItemRequestPath(file)), body: { fileId: this.plugin.getItemId(file) }, providerOptions: this.Provider.opts } }; var fileType = getFileType(tagFile); // TODO Should we just always use the thumbnail URL if it exists? if (fileType && isPreviewSupported(fileType)) { tagFile.preview = this.plugin.getItemThumbnailUrl(file); } this.plugin.uppy.log('Adding remote file'); try { this.plugin.uppy.addFile(tagFile); } catch (err) { // Nothing, restriction errors handled in Core } }; ProviderView.prototype.removeFile = function removeFile(id) { var _plugin$getPluginStat = this.plugin.getPluginState(), currentSelection = _plugin$getPluginStat.currentSelection; this.plugin.setPluginState({ currentSelection: currentSelection.filter(function (file) { return file.id !== id; }) }); }; /** * Removes session token on client side. */ ProviderView.prototype.logout = function logout() { var _this5 = this; this.Provider.logout(location.href).then(function (res) { if (res.ok) { var newState = { authenticated: false, files: [], folders: [], directories: [] }; _this5.plugin.setPluginState(newState); } }).catch(this.handleError); }; ProviderView.prototype.filterQuery = function filterQuery(e) { var state = this.plugin.getPluginState(); this.plugin.setPluginState(_extends({}, state, { filterInput: e ? e.target.value : '' })); }; ProviderView.prototype.toggleSearch = function toggleSearch(inputEl) { var state = this.plugin.getPluginState(); this.plugin.setPluginState({ isSearchVisible: !state.isSearchVisible, filterInput: '' }); }; ProviderView.prototype.filterItems = function filterItems(items) { var _this6 = this; var state = this.plugin.getPluginState(); if (state.filterInput === '') { return items; } return items.filter(function (folder) { return _this6.plugin.getItemName(folder).toLowerCase().indexOf(state.filterInput.toLowerCase()) !== -1; }); }; ProviderView.prototype.sortByTitle = function sortByTitle() { var _this7 = this; var state = _extends({}, this.plugin.getPluginState()); var files = state.files, folders = state.folders, sorting = state.sorting; var sortedFiles = files.sort(function (fileA, fileB) { if (sorting === 'titleDescending') { return _this7.plugin.getItemName(fileB).localeCompare(_this7.plugin.getItemName(fileA)); } return _this7.plugin.getItemName(fileA).localeCompare(_this7.plugin.getItemName(fileB)); }); var sortedFolders = folders.sort(function (folderA, folderB) { if (sorting === 'titleDescending') { return _this7.plugin.getItemName(folderB).localeCompare(_this7.plugin.getItemName(folderA)); } return _this7.plugin.getItemName(folderA).localeCompare(_this7.plugin.getItemName(folderB)); }); this.plugin.setPluginState(_extends({}, state, { files: sortedFiles, folders: sortedFolders, sorting: sorting === 'titleDescending' ? 'titleAscending' : 'titleDescending' })); }; ProviderView.prototype.sortByDate = function sortByDate() { var _this8 = this; var state = _extends({}, this.plugin.getPluginState()); var files = state.files, folders = state.folders, sorting = state.sorting; var sortedFiles = files.sort(function (fileA, fileB) { var a = new Date(_this8.plugin.getItemModifiedDate(fileA)); var b = new Date(_this8.plugin.getItemModifiedDate(fileB)); if (sorting === 'dateDescending') { return a > b ? -1 : a < b ? 1 : 0; } return a > b ? 1 : a < b ? -1 : 0; }); var sortedFolders = folders.sort(function (folderA, folderB) { var a = new Date(_this8.plugin.getItemModifiedDate(folderA)); var b = new Date(_this8.plugin.getItemModifiedDate(folderB)); if (sorting === 'dateDescending') { return a > b ? -1 : a < b ? 1 : 0; } return a > b ? 1 : a < b ? -1 : 0; }); this.plugin.setPluginState(_extends({}, state, { files: sortedFiles, folders: sortedFolders, sorting: sorting === 'dateDescending' ? 'dateAscending' : 'dateDescending' })); }; ProviderView.prototype.sortBySize = function sortBySize() { var _this9 = this; var state = _extends({}, this.plugin.getPluginState()); var files = state.files, sorting = state.sorting; // check that plugin supports file sizes if (!files.length || !this.plugin.getItemData(files[0]).size) { return; } var sortedFiles = files.sort(function (fileA, fileB) { var a = _this9.plugin.getItemData(fileA).size; var b = _this9.plugin.getItemData(fileB).size; if (sorting === 'sizeDescending') { return a > b ? -1 : a < b ? 1 : 0; } return a > b ? 1 : a < b ? -1 : 0; }); this.plugin.setPluginState(_extends({}, state, { files: sortedFiles, sorting: sorting === 'sizeDescending' ? 'sizeAscending' : 'sizeDescending' })); }; ProviderView.prototype.isActiveRow = function isActiveRow(file) { return this.plugin.getPluginState().activeRow === this.plugin.getItemId(file); }; ProviderView.prototype.isChecked = function isChecked(file) { var _plugin$getPluginStat2 = this.plugin.getPluginState(), currentSelection = _plugin$getPluginStat2.currentSelection; return currentSelection.some(function (item) { return item === file; }); }; /** * Adds all files found inside of specified folder. * * Uses separated state while folder contents are being fetched and * mantains list of selected folders, which are separated from files. */ ProviderView.prototype.addFolder = function addFolder(folder) { var _this10 = this; var folderId = this.providerFileToId(folder); var state = this.plugin.getPluginState(); var folders = state.selectedFolders || {}; if (folderId in folders && folders[folderId].loading) { return; } folders[folderId] = { loading: true, files: [] }; this.plugin.setPluginState({ selectedFolders: folders }); return this.Provider.list(this.plugin.getItemRequestPath(folder)).then(function (res) { var files = []; _this10.plugin.getItemSubList(res).forEach(function (item) { if (!_this10.plugin.isFolder(item)) { _this10.addFile(item); files.push(_this10.providerFileToId(item)); } }); state = _this10.plugin.getPluginState(); state.selectedFolders[folderId] = { loading: false, files: files }; _this10.plugin.setPluginState({ selectedFolders: folders }); var dashboard = _this10.plugin.uppy.getPlugin('Dashboard'); var message = void 0; if (files.length) { message = dashboard.i18n('folderAdded', { smart_count: files.length, folder: _this10.plugin.getItemName(folder) }); } else { message = dashboard.i18n('emptyFolderAdded'); } _this10.plugin.uppy.info(message); }).catch(function (e) { state = _this10.plugin.getPluginState(); delete state.selectedFolders[folderId]; _this10.plugin.setPluginState({ selectedFolders: state.selectedFolders }); _this10.handleError(e); }); }; /** * Toggles file/folder checkbox to on/off state while updating files list. * * Note that some extra complexity comes from supporting shift+click to * toggle multiple checkboxes at once, which is done by getting all files * in between last checked file and current one. */ ProviderView.prototype.toggleCheckbox = function toggleCheckbox(e, file) { e.stopPropagation(); e.preventDefault(); var _plugin$getPluginStat3 = this.plugin.getPluginState(), folders = _plugin$getPluginStat3.folders, files = _plugin$getPluginStat3.files; var items = this.filterItems(folders.concat(files)); // Shift-clicking selects a single consecutive list of items // starting at the previous click and deselects everything else. if (this.lastCheckbox && e.shiftKey) { var _currentSelection = void 0; var prevIndex = items.indexOf(this.lastCheckbox); var currentIndex = items.indexOf(file); if (prevIndex < currentIndex) { _currentSelection = items.slice(prevIndex, currentIndex + 1); } else { _currentSelection = items.slice(currentIndex, prevIndex + 1); } this.plugin.setPluginState({ currentSelection: _currentSelection }); return; } this.lastCheckbox = file; var _plugin$getPluginStat4 = this.plugin.getPluginState(), currentSelection = _plugin$getPluginStat4.currentSelection; if (this.isChecked(file)) { this.plugin.setPluginState({ currentSelection: currentSelection.filter(function (item) { return item !== file; }) }); } else { this.plugin.setPluginState({ currentSelection: currentSelection.concat([file]) }); } }; ProviderView.prototype.providerFileToId = function providerFileToId(file) { return generateFileID({ data: this.plugin.getItemData(file), name: this.plugin.getItemName(file) || this.plugin.getItemId(file), type: this.plugin.getMimeType(file) }); }; ProviderView.prototype.handleDemoAuth = function handleDemoAuth() { var state = this.plugin.getPluginState(); this.plugin.setPluginState({}, state, { authenticated: true }); }; ProviderView.prototype.handleAuth = function handleAuth() { var _this11 = this; var authState = btoa(JSON.stringify({ origin: location.origin })); var link = this.Provider.authUrl() + '?state=' + authState; var authWindow = window.open(link, '_blank'); var noProtocol = function noProtocol(url) { return url.replace(/^(https?:|)\/\//, ''); }; var handleToken = function handleToken(e) { var allowedOrigin = new RegExp(noProtocol(_this11.plugin.opts.serverPattern)); if (!allowedOrigin.test(noProtocol(e.origin)) || e.source !== authWindow) { _this11.plugin.uppy.log('rejecting event from ' + e.origin + ' vs allowed pattern ' + _this11.plugin.opts.serverPattern); return; } authWindow.close(); window.removeEventListener('message', handleToken); _this11.Provider.setAuthToken(e.data.token); _this11._loaderWrapper(_this11.Provider.checkAuth(), _this11.plugin.onAuth, _this11.handleError); }; window.addEventListener('message', handleToken); }; ProviderView.prototype.handleError = function handleError(error) { var uppy = this.plugin.uppy; var message = uppy.i18n('uppyServerError'); uppy.log(error.toString()); uppy.info({ message: message, details: error.toString() }, 'error', 5000); }; ProviderView.prototype.handleScroll = function handleScroll(e) { var _this12 = this; var scrollPos = e.target.scrollHeight - (e.target.scrollTop + e.target.offsetHeight); var path = this.plugin.getNextPagePath ? this.plugin.getNextPagePath() : null; if (scrollPos < 50 && path && !this._isHandlingScroll) { this.Provider.list(path).then(function (res) { var _plugin$getPluginStat5 = _this12.plugin.getPluginState(), files = _plugin$getPluginStat5.files, folders = _plugin$getPluginStat5.folders; _this12._updateFilesAndFolders(res, files, folders); }).catch(this.handleError).then(function () { _this12._isHandlingScroll = false; }); // always called this._isHandlingScroll = true; } }; ProviderView.prototype.donePicking = function donePicking() { var _this13 = this; var _plugin$getPluginStat6 = this.plugin.getPluginState(), currentSelection = _plugin$getPluginStat6.currentSelection; var promises = currentSelection.map(function (file) { if (_this13.plugin.isFolder(file)) { return _this13.addFolder(file); } else { return _this13.addFile(file); } }); this._loaderWrapper(_Promise.all(promises), function () { _this13.clearSelection(); var dashboard = _this13.plugin.uppy.getPlugin('Dashboard'); if (dashboard) dashboard.hideAllPanels(); }, function () {}); }; ProviderView.prototype.cancelPicking = function cancelPicking() { this.clearSelection(); var dashboard = this.plugin.uppy.getPlugin('Dashboard'); if (dashboard) dashboard.hideAllPanels(); }; ProviderView.prototype.clearSelection = function clearSelection() { this.plugin.setPluginState({ currentSelection: [] }); }; // displays loader view while asynchronous request is being made. ProviderView.prototype._loaderWrapper = function _loaderWrapper(promise, then, catch_) { var _this14 = this; promise.then(then).catch(catch_).then(function () { return _this14.plugin.setPluginState({ loading: false }); }); // always called. this.plugin.setPluginState({ loading: true }); }; ProviderView.prototype.render = function render(state) { var _plugin$getPluginStat7 = this.plugin.getPluginState(), authenticated = _plugin$getPluginStat7.authenticated, checkAuthInProgress = _plugin$getPluginStat7.checkAuthInProgress, loading = _plugin$getPluginStat7.loading; if (loading) { return h( CloseWrapper, { onUnmount: this.clearSelection }, h(LoaderView, null) ); } if (!authenticated) { return h( CloseWrapper, { onUnmount: this.clearSelection }, h(AuthView, { pluginName: this.plugin.title, pluginIcon: this.plugin.icon, demo: this.plugin.opts.demo, checkAuth: this.checkAuth, handleAuth: this.handleAuth, handleDemoAuth: this.handleDemoAuth, checkAuthInProgress: checkAuthInProgress }) ); } var browserProps = _extends({}, this.plugin.getPluginState(), { username: this.username, getNextFolder: this.getNextFolder, getFolder: this.getFolder, filterItems: this.filterItems, filterQuery: this.filterQuery, toggleSearch: this.toggleSearch, sortByTitle: this.sortByTitle, sortByDate: this.sortByDate, logout: this.logout, demo: this.plugin.opts.demo, isActiveRow: this.isActiveRow, isChecked: this.isChecked, toggleCheckbox: this.toggleCheckbox, getItemId: this.plugin.getItemId, getItemName: this.plugin.getItemName, getItemIcon: this.plugin.getItemIcon, handleScroll: this.handleScroll, done: this.donePicking, cancel: this.cancelPicking, title: this.plugin.title, viewType: this.opts.viewType, showTitles: this.opts.showTitles, showFilter: this.opts.showFilter, showBreadcrumbs: this.opts.showBreadcrumbs, pluginIcon: this.plugin.icon, i18n: this.plugin.uppy.i18n }); return h( CloseWrapper, { onUnmount: this.clearSelection }, h(Browser, browserProps) ); }; return ProviderView; }(); //# sourceMappingURL=index.js.map
#!/bin/bash shopt -s extglob SCRIPT_PATH="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null && pwd )" if [ ! -f ~/.ccm/CURRENT ]; then echo "Unable to find an active ccm cluster" exit 2 fi if [ ! -f ${SCRIPT_PATH}/../target/ecaudit*.jar ]; then echo "No jar file found. Build project and try again." exit 3 fi CCM_CLUSTER_NAME=`cat ~/.ccm/CURRENT` echo "Installing ecAudit into ${CCM_CLUSTER_NAME}" CLUSTER_PATH=~/.ccm/${CCM_CLUSTER_NAME} mkdir -p ${CLUSTER_PATH}/lib rm -f ${CLUSTER_PATH}/lib/ecaudit.jar ln -s ${SCRIPT_PATH}/../target/ecaudit*.jar ${CLUSTER_PATH}/lib/ecaudit.jar grep -sq ecaudit.jar ${CLUSTER_PATH}/cassandra.in.sh if [ $? -ne 0 ]; then echo "CLASSPATH=\"\$CLASSPATH:${CLUSTER_PATH}/lib/ecaudit.jar\"" >> ${CLUSTER_PATH}/cassandra.in.sh echo "JVM_EXTRA_OPTS=\"\$JVM_EXTRA_OPTS -Dcassandra.custom_query_handler_class=com.ericsson.bss.cassandra.ecaudit.handler.AuditQueryHandler\"" >> ${CLUSTER_PATH}/cassandra.in.sh fi for NODE_PATH in ${CLUSTER_PATH}/node*; do sed -i 's/^authenticator:.*/authenticator: com.ericsson.bss.cassandra.ecaudit.auth.AuditPasswordAuthenticator/' ${NODE_PATH}/conf/cassandra.yaml sed -i 's/^authorizer:.*/authorizer: com.ericsson.bss.cassandra.ecaudit.auth.AuditAuthorizer/' ${NODE_PATH}/conf/cassandra.yaml sed -i 's/^role_manager:.*/role_manager: com.ericsson.bss.cassandra.ecaudit.auth.AuditRoleManager/' ${NODE_PATH}/conf/cassandra.yaml done
<reponame>tliang1/Java-Practice package main; /** * @author <NAME> * */ public class TestMyDate { public static void main(String[] args) { MyDate myDate1 = new MyDate(); MyDate myDate2 = new MyDate(34355555133101L); System.out.println("Current time is " + myDate1.displayTime()); System.out.println("The time for myDate2 is " + myDate2.displayTime()); } }
package utils import ( "net" "strings" "github.com/rancher/go-rancher-metadata/metadata" "github.com/rancher/log" ) const ( hostLabelKeyword = "__host_label__" ) // UpdateCNIConfigByKeywords takes in the given CNI config, replaces the rancher // specific keywords with the appropriate values. func UpdateCNIConfigByKeywords(config interface{}, host metadata.Host) interface{} { props, isMap := config.(map[string]interface{}) if !isMap { return config } for aKey, aValue := range props { if v, isString := aValue.(string); isString { if strings.HasPrefix(v, hostLabelKeyword) { props[aKey] = "" splits := strings.SplitN(v, ":", 2) if len(splits) > 1 { label := strings.TrimSpace(splits[1]) labelValue := host.Labels[label] if labelValue != "" { props[aKey] = labelValue } } } } else { props[aKey] = UpdateCNIConfigByKeywords(aValue, host) } } return props } // GetBridgeInfo is used to figure out the bridge information from the // CNI config of the network specified func GetBridgeInfo(network metadata.Network, host metadata.Host) (bridge string, bridgeSubnet string) { conf, _ := network.Metadata["cniConfig"].(map[string]interface{}) for _, file := range conf { file = UpdateCNIConfigByKeywords(file, host) props, _ := file.(map[string]interface{}) cniType, _ := props["type"].(string) checkBridge, _ := props["bridge"].(string) bridgeSubnet, _ = props["bridgeSubnet"].(string) if cniType == "rancher-bridge" && checkBridge != "" { bridge = checkBridge break } } return bridge, bridgeSubnet } // GetLocalNetworksAndRouters fetches networks and network containers // related to that networks running in the current environment func GetLocalNetworksAndRouters(networks []metadata.Network, host metadata.Host, services []metadata.Service) ([]metadata.Network, map[string]metadata.Container) { localRouters := map[string]metadata.Container{} var cniDriverServices, unfilteredCniDriverServices []metadata.Service var networkService metadata.Service // Trick to select the primary service of the network plugin // stack // TODO: Need to check if it's needed for Calico? for _, service := range services { if service.Kind == "networkDriverService" { unfilteredCniDriverServices = append(unfilteredCniDriverServices, service) } } if len(unfilteredCniDriverServices) != 1 { log.Debugf("found multiple cni driver services, filtering. unfilteredCniDriverServices=%v", unfilteredCniDriverServices) for _, service := range unfilteredCniDriverServices { if service.Name != "cni-driver" { continue } cniDriverServices = append(cniDriverServices, service) } } else { cniDriverServices = unfilteredCniDriverServices } log.Debugf("cniDriverServices=%v", cniDriverServices) if len(cniDriverServices) != 1 { log.Errorf("utils: error: expected one CNI driver service, but found: %v", len(cniDriverServices)) } if len(cniDriverServices) > 0 { // Find the other service in the same stack as cniDriver for _, service := range services { if service.StackUUID == cniDriverServices[0].StackUUID && service.UUID != cniDriverServices[0].UUID && service.Name == service.PrimaryServiceName { networkService = service break } } } for _, aContainer := range networkService.Containers { if aContainer.HostUUID == host.UUID { localRouters[aContainer.NetworkUUID] = aContainer } } localNetworks := []metadata.Network{} for _, aNetwork := range networks { if aNetwork.EnvironmentUUID != host.EnvironmentUUID { continue } _, ok := aNetwork.Metadata["cniConfig"].(map[string]interface{}) if !ok { continue } // Hack to handle multiple networks in the environment during upgrade // Context: pre cni-driver service release to cni-driver release //_, ok = localRouters[aNetwork.UUID] //if !ok { //continue //} localNetworks = append(localNetworks, aNetwork) } log.Debugf("localNetworks=%v, localRouters=%v", localNetworks, localRouters) return localNetworks, localRouters } // GetLocalNetworksAndRoutersFromMetadata is used to fetch networks local to the current environment func GetLocalNetworksAndRoutersFromMetadata(mc metadata.Client) ([]metadata.Network, map[string]metadata.Container, error) { networks, err := mc.GetNetworks() if err != nil { return nil, nil, err } host, err := mc.GetSelfHost() if err != nil { return nil, nil, err } services, err := mc.GetServices() if err != nil { return nil, nil, err } networks, routers := GetLocalNetworksAndRouters(networks, host, services) return networks, routers, nil } // IsContainerConsideredRunning function is used to test if the container is in any of // the states that are considered running. func IsContainerConsideredRunning(aContainer metadata.Container) bool { return (aContainer.State == "running" || aContainer.State == "starting" || aContainer.State == "stopping") } // IsIPInSubnet checks if the given IP address belongs to the given subnet. // ip without subnet mask, subnet in CIDR format func IsIPInSubnet(ipStr, subnetStr string) (bool, error) { _, subnet, err := net.ParseCIDR(subnetStr) if err != nil { return false, err } ip := net.ParseIP(ipStr) return subnet.Contains(ip), nil } // IsIPInSubnets checks if the given ip address is part of any of the given subnets func IsIPInSubnets(ip string, subnets []string) (bool, error) { for _, subnet := range subnets { in, err := IsIPInSubnet(ip, subnet) if err != nil { continue } if in { return true, nil } } return false, nil }
<reponame>I-Diva/Home-sessions-day2<gh_stars>0 Uber.getProducts(lat, lon).then(function(response) { console.log(response.rate_limit); });
<reponame>rsuite/rsuite-icons // Generated by script, don't edit it please. import createSvgIcon from '../../createSvgIcon'; import CcAmexSvg from '@rsuite/icon-font/lib/legacy/CcAmex'; const CcAmex = createSvgIcon({ as: CcAmexSvg, ariaLabel: 'cc amex', category: 'legacy', displayName: 'CcAmex' }); export default CcAmex;
<gh_stars>0 'use strict'; angular.module('myApp.communities', ['ngRoute','ngResource']) .factory('Community',function($resource) { return $resource('http://localhost:4000/community'); }) .controller('communitiesCtrl', function($scope, Community,$http,$route,$window,$location) { $scope.communityData = {}; $scope.newCommunity = function() { var c1 = document.getElementById('c1'), c2 = document.getElementById('c2') if (c1.checked) $scope.communityData.is_community= true; if (c2.checked) $scope.communityData.is_community= false; $scope.community = new Community($scope.communityData); $scope.community.$save(); /* $scope.go = function(param) { $location.path('#/communityProfile/'+); };*/ } $scope.communities = Community.query(); });
#!/usr/bin/env node // Uncomment this line to launch modul.io in dev mode // process.env.NODE_ENV = 'dev'; var modulio = require('../server/modul.io.js'); modulio.start();
## # Copyright (c) 2010-2017 Apple Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ## set -e # Break on error shopt -s nullglob # Expand foo* to nothing if nothing matches # Names of database backends that can be benchmarked. BACKENDS=(filesystem postgresql) # Location of the CalendarServer source. Will automatically be # updated to the appropriate version, config edited to use the right # backend, and PID files will be discovered beneath it. SOURCE=~/Projects/CalendarServer/trunk # The plist the server will respect. CONF=$SOURCE/conf/caldavd-dev.plist # Names of benchmarks we can run. Since ordering makes a difference to how # benchmarks are split across multiple hosts, new benchmarks should be appended # to this list, not inserted earlier on. BENCHMARKS="find_calendars find_events event_move event_delete_attendee event_add_attendee event_change_date event_change_summary event_delete vfreebusy event bounded_recurrence unbounded_recurrence event_autoaccept bounded_recurrence_autoaccept unbounded_recurrence_autoaccept vfreebusy_vary_attendees" # Custom scaling parameters for benchmarks that merit it. Be careful # not to exceed the 99 user limit for benchmarks where the scaling # parameter represents a number of users! SCALE_PARAMETERS="--parameters find_events:1,10,100,1000,10000 --parameters vfreebusy_vary_attendees:1,9,30" # Names of metrics we can collect. STATISTICS=(HTTP SQL read write pagein pageout) # Codespeed add-result location. ADDURL=http://localhost:8000/result/add/ EXTRACT=$(PWD)/extractconf # Change the config beneath $SOURCE to use a particular database backend. function setbackend() { ./setbackend $SOURCE/conf/caldavd-test.plist $1 > $CONF } # Clean up $SOURCE, update to the specified revision, and build the # extensions (in-place). function update_and_build() { pushd $SOURCE stop svn st --no-ignore | grep '^[?I]' | cut -c9- | xargs rm -r svn up -r$1 . python setup.py build_ext -i popd } # Ensure that the required configuration file is present, exit if not. function check_conf() { if [ ! -e $CONF ]; then echo "Configuration file $CONF is missing." exit 1 fi } # Start a CalendarServer in the current directory. Only return after # the specified number of slave processes have written their PID files # (which is only a weak metric for "the server is ready to use"). function start() { NUM_INSTANCES=$1 check_conf PIDDIR=$SOURCE/$($EXTRACT $CONF ServerRoot)/$($EXTRACT $CONF RunRoot) shift ./run -d $* while sleep 2; do instances=($PIDDIR/*instance*) if [ "${#instances[*]}" -eq "$NUM_INSTANCES" ]; then echo "instance pid files: ${instances[*]}" break fi done } # Stop the CalendarServer in the current directory. Only return after # it has exited. function stop() { if [ ! -e $CONF ]; then return fi PIDFILE=$SOURCE/$($EXTRACT $CONF ServerRoot)/$($EXTRACT $CONF RunRoot)/$($EXTRACT $CONF PIDFile) ./run -k || true while :; do pid=$(cat $PIDFILE 2>/dev/null || true) if [ ! -e $PIDFILE ]; then break fi if ! $(kill -0 $pid); then break fi echo "Waiting for server to exit..." sleep 1 done }
<gh_stars>0 package test.backend.www.configuration; import org.apache.catalina.filters.CorsFilter; import org.springframework.boot.autoconfigure.web.WebMvcAutoConfiguration.WebMvcAutoConfigurationAdapter; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.ComponentScan; import org.springframework.context.annotation.Configuration; import org.springframework.http.converter.json.Jackson2ObjectMapperBuilder; import com.fasterxml.jackson.databind.SerializationFeature; @Configuration @ComponentScan public class WebConfig extends WebMvcAutoConfigurationAdapter { @Bean(name = "corsFilter") public CorsFilter buildCorsFilter() { return new CorsFilter(); } @Bean public Jackson2ObjectMapperBuilder jacksonBuilder() { final Jackson2ObjectMapperBuilder builder = new Jackson2ObjectMapperBuilder(); builder.indentOutput(true); builder.featuresToDisable(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS); return builder; } }
#!/bin/sh # # Utility tools for building configure/packages by AntPickax # # Copyright 2018 Yahoo Japan Corporation. # # AntPickax provides utility tools for supporting autotools # builds. # # These tools retrieve the necessary information from the # repository and appropriately set the setting values of # configure, Makefile, spec,etc file and so on. # These tools were recreated to reduce the number of fixes and # reduce the workload of developers when there is a change in # the project configuration. # # For the full copyright and license information, please view # the license file that was distributed with this source code. # # AUTHOR: Takeshi Nakatani # CREATE: Fri, Apr 13 2018 # REVISION: # # # Autobuid for debian package # func_usage() { echo "" echo "Usage: $1 [-buildnum <build number>] [-nodebuild] [-rootdir] [-product <product name>] [-class <class name>] [-disttype <os/version>] [-y] [additional debuild options]" echo " -buildnum specify build number for packaging(default 1)" echo " -nodebuild stops before do debuild command." echo " -rootdir layout \"debian\" directory for packaging under source top directory" echo " -product specify product name(use PACKAGE_NAME in Makefile s default)" echo " -class specify package class name(optional)" echo " -disttype specify \"OS/version name\", ex: ubuntu/trusty" echo " -y runs no interactive mode." echo " additional debuild options this script run debuild with \"-uc -us\", can specify additional options." echo " -h print help" echo "" } func_get_default_class() { dh_make -h 2>/dev/null | grep '\--multi' >/dev/null 2>&1 if [ $? -eq 0 ]; then echo "multi" else echo "library" fi } PRGNAME=$(basename "$0") MYSCRIPTDIR=$(dirname "$0") MYSCRIPTDIR=$(cd "${MYSCRIPTDIR}" || exit 1; pwd) SRCTOP=$(cd "${MYSCRIPTDIR}/.." || exit 1; pwd) BUILDDEBDIR=${SRCTOP}/debian_build # # Check options # IS_DEBUILD=1 IS_INTERACTIVE=1 IS_ROOTDIR=0 DH_MAKE_AUTORUN_OPTION="-y" BUILD_NUMBER=1 IS_OS_UBUNTU=0 OS_VERSION_NAME= DEBUILD_OPT="" PKGCLASSNAME=$(func_get_default_class) while [ $# -ne 0 ]; do if [ "X$1" = "X" ]; then echo "WARNING: (null) option is specified, so skip this." 1>&2 elif [ "X$1" = "X-h" ] || [ "X$1" = "X-help" ]; then func_usage "${PRGNAME}" exit 0 elif [ "X$1" = "X-buildnum" ]; then shift if [ $# -eq 0 ]; then echo "ERROR: -buildnum option needs parameter." 1>&2 exit 1 fi BUILD_NUMBER=$1 elif [ "X$1" = "X-nodebuild" ]; then IS_DEBUILD=0 BUILD_NUMBER= elif [ "X$1" = "X-rootdir" ]; then IS_ROOTDIR=1 elif [ "X$1" = "X-product" ]; then shift if [ $# -eq 0 ]; then echo "ERROR: -product option needs parameter." 1>&2 exit 1 fi PACKAGE_NAME=$1 elif [ "X$1" = "X-class" ]; then shift if [ $# -eq 0 ]; then echo "ERROR: -class option needs parameter." 1>&2 exit 1 fi PKGCLASSNAME=$1 elif [ "X$1" = "X-disttype" ]; then shift if [ $# -eq 0 ]; then echo "ERROR: -disttype option needs parameter." 1>&2 exit 1 fi OS_VERSION_NAME=$1 echo "${OS_VERSION_NAME}" | grep -i 'ubuntu' >/dev/null 2>&1 if [ $? -eq 0 ]; then IS_OS_UBUNTU=1 OS_VERSION_NAME=$(echo "${OS_VERSION_NAME}" | sed 's#[Uu][Bb][Uu][Nn][Tt][Uu]/##g') else echo "${OS_VERSION_NAME}" | grep -i 'debian' >/dev/null 2>&1 if [ $? -ne 0 ]; then echo "ERROR: -disttype option parameter must be ubuntu or debian." 1>&2 exit 1 fi IS_OS_UBUNTU=0 OS_VERSION_NAME=$(echo "${OS_VERSION_NAME}" | sed 's#[Dd][Ee][Bb][Ii][Aa][Nn]/##g') fi elif [ "X$1" = "X-y" ]; then IS_INTERACTIVE=0 DH_MAKE_AUTORUN_OPTION="-y" else if [ "X${DEBUILD_OPT}" != "X" ]; then DEBUILD_OPT="${DEBUILD_OPT} $1" else DEBUILD_OPT="$1" fi fi shift done # # Package name # if [ "X${PACKAGE_NAME}" = "X" ]; then PACKAGE_NAME=$(grep "^PACKAGE_NAME" "${SRCTOP}/Makefile" 2>/dev/null | awk '{print $3}' 2>/dev/null) if [ "X${PACKAGE_NAME}" = "X" ]; then echo "ERROR: no product name" 1>&2 exit 1 fi fi # # Welcome message and confirming for interactive mode # if [ "${IS_INTERACTIVE}" -eq 1 ]; then echo "---------------------------------------------------------------" echo " Do you change these file and commit to github?" echo " - ChangeLog modify / add changes like dch tool format" echo " - Git TAG stamp git tag for release" echo "---------------------------------------------------------------" while true; do echo "Confirm: [y/n] " | tr -d '\n' read -r CONFIRM if [ "X${CONFIRM}" = "XY" ] || [ "X${CONFIRM}" = "Xy" ]; then break; elif [ "X${CONFIRM}" = "XN" ] || [ "X${CONFIRM}" = "Xn" ]; then echo "Bye..." exit 1 fi done echo "" fi # # Make dist package by make dist # "${SRCTOP}/autogen.sh" || exit 1 "${SRCTOP}/configure" "${CONFIGUREOPT}" || exit 1 PACKAGE_VERSION=$("${MYSCRIPTDIR}/make_variables.sh" -pkg_version) # shellcheck disable=SC2034 PACKAGE_MAJOR_VER=$("${MYSCRIPTDIR}/make_variables.sh" -major_number) echo "===== make dist: start ==============================" make dist || exit 1 echo "===== make dist: end ==============================" # # Create debian package directory and change current # echo "===== prepare working directory: start =============" if [ -f "${BUILDDEBDIR}" ]; then echo "ERROR: debian file exists, could not make debian directory." 1>&2 exit 1 fi if [ -d "${BUILDDEBDIR}" ]; then echo "WANING: debian directory exists, remove and remake it." 1>&2 rm -rf "${BUILDDEBDIR}" || exit 1 fi mkdir "${BUILDDEBDIR}" || exit 1 cd "${BUILDDEBDIR}" || exit 1 # # copy dist package and expand source files # cp "${SRCTOP}/${PACKAGE_NAME}-${PACKAGE_VERSION}.tar.gz" . || exit 1 tar xvfz "${PACKAGE_NAME}-${PACKAGE_VERSION}.tar.gz" || exit 1 # # change current directory # EXPANDDIR="${BUILDDEBDIR}/${PACKAGE_NAME}-${PACKAGE_VERSION}" cd "${EXPANDDIR}" || exit 1 # # initialize debian directory # if [ "X${LOGNAME}" = "X" ] && [ "X${USER}" = "X" ]; then # [NOTE] # if run in docker container, Neither LOGNAME nor USER may be set in the environment variables. # dh_make needs one of these environments. # export USER="root" export LOGNAME="root" fi dh_make -f "${BUILDDEBDIR}/${PACKAGE_NAME}-${PACKAGE_VERSION}.tar.gz" --createorig --"${PKGCLASSNAME}" "${DH_MAKE_AUTORUN_OPTION}" || exit 1 # # remove unnecessary template files # rm -rf "${EXPANDDIR}"/debian/*.ex "${EXPANDDIR}"/debian/*.EX "${EXPANDDIR}/debian/${PACKAGE_NAME}"-doc.* "${EXPANDDIR}"/debian/README.* "${EXPANDDIR}"/debian/docs "${EXPANDDIR}"/debian/*.install # # adding some lines into rules file # mv "${EXPANDDIR}/debian/rules" "${EXPANDDIR}/debian/rules.base" head -1 "${EXPANDDIR}/debian/rules.base" > "${EXPANDDIR}/debian/rules" || exit 1 sed '/^#/d' "${EXPANDDIR}/debian/rules.base" | sed '/^$/{N; /^\n$/D;}' >> "${EXPANDDIR}/debian/rules" || exit 1 echo "" >> "${EXPANDDIR}/debian/rules" || exit 1 echo "# for ${PACKAGE_NAME} package" >> "${EXPANDDIR}/debian/rules" || exit 1 echo "override_dh_auto_install:" >> "${EXPANDDIR}/debian/rules" || exit 1 echo " dh_auto_install --destdir=debian/${PACKAGE_NAME}" >> "${EXPANDDIR}/debian/rules" || exit 1 if [ "X${CONFIGUREOPT}" != "X" ]; then echo "" >> "${EXPANDDIR}/debian/rules" || exit 1 echo "override_dh_auto_configure:" >> "${EXPANDDIR}/debian/rules" || exit 1 echo " dh_auto_configure -- ${CONFIGUREOPT}" >> "${EXPANDDIR}/debian/rules" || exit 1 fi rm "${EXPANDDIR}/debian/rules.base" # # files for other # echo "src/libexec/database-k8s/CREDIT usr/libexec/k2hr3/database-k8s" >> "${EXPANDDIR}/debian/${PACKAGE_NAME}.install" || exit echo "src/libexec/database-k8s/VERSION usr/libexec/k2hr3/database-k8s" >> "${EXPANDDIR}/debian/${PACKAGE_NAME}.install" || exit echo "src/libexec/database-k8s/certification.sh usr/libexec/k2hr3/database-k8s" >> "${EXPANDDIR}/debian/${PACKAGE_NAME}.install" || exit echo "src/libexec/database-k8s/command.sh usr/libexec/k2hr3/database-k8s" >> "${EXPANDDIR}/debian/${PACKAGE_NAME}.install" || exit echo "src/libexec/database-k8s/functions.sh usr/libexec/k2hr3/database-k8s" >> "${EXPANDDIR}/debian/${PACKAGE_NAME}.install" || exit echo "src/libexec/database-k8s/help.sh usr/libexec/k2hr3/database-k8s" >> "${EXPANDDIR}/debian/${PACKAGE_NAME}.install" || exit echo "src/libexec/database-k8s/options.sh usr/libexec/k2hr3/database-k8s" >> "${EXPANDDIR}/debian/${PACKAGE_NAME}.install" || exit echo "src/libexec/database-k8s/summary.sh usr/libexec/k2hr3/database-k8s" >> "${EXPANDDIR}/debian/${PACKAGE_NAME}.install" || exit echo "src/libexec/database-k8s/variables.sh usr/libexec/k2hr3/database-k8s" >> "${EXPANDDIR}/debian/${PACKAGE_NAME}.install" || exit echo "src/libexec/database-k8s/k8sapi.sh usr/libexec/k2hr3/database-k8s" >> "${EXPANDDIR}/debian/${PACKAGE_NAME}.install" || exit echo "src/libexec/database-k8s/configuration.sh usr/libexec/k2hr3/database-k8s" >> "${EXPANDDIR}/debian/${PACKAGE_NAME}.install" || exit echo "src/libexec/database-k8s/constvariables.sh usr/libexec/k2hr3/database-k8s" >> "${EXPANDDIR}/debian/${PACKAGE_NAME}.install" || exit echo "src/libexec/database-k8s/dbaas-k8s.config usr/libexec/k2hr3/database-k8s" >> "${EXPANDDIR}/debian/${PACKAGE_NAME}.install" || exit echo "src/libexec/database-k8s/k2hdkc.config usr/libexec/k2hr3/database-k8s" >> "${EXPANDDIR}/debian/${PACKAGE_NAME}.install" || exit echo "src/libexec/database-k8s/k2hr3.config usr/libexec/k2hr3/database-k8s" >> "${EXPANDDIR}/debian/${PACKAGE_NAME}.install" || exit echo "src/libexec/database-k8s/dbaas-k2hdkc-chmpxproc-wrap.sh usr/libexec/k2hr3/database-k8s" >> "${EXPANDDIR}/debian/${PACKAGE_NAME}.install" || exit echo "src/libexec/database-k8s/dbaas-k2hdkc.ini.templ usr/libexec/k2hr3/database-k8s" >> "${EXPANDDIR}/debian/${PACKAGE_NAME}.install" || exit echo "src/libexec/database-k8s/dbaas-k2hdkc-ini-update.sh usr/libexec/k2hr3/database-k8s" >> "${EXPANDDIR}/debian/${PACKAGE_NAME}.install" || exit echo "src/libexec/database-k8s/dbaas-k2hdkc-k2hr3-registration.sh usr/libexec/k2hr3/database-k8s" >> "${EXPANDDIR}/debian/${PACKAGE_NAME}.install" || exit echo "src/libexec/database-k8s/dbaas-k2hdkc-kustomization.yaml.templ usr/libexec/k2hr3/database-k8s" >> "${EXPANDDIR}/debian/${PACKAGE_NAME}.install" || exit echo "src/libexec/database-k8s/dbaas-k2hdkc-serverproc-wrap.sh usr/libexec/k2hr3/database-k8s" >> "${EXPANDDIR}/debian/${PACKAGE_NAME}.install" || exit echo "src/libexec/database-k8s/dbaas-k2hdkc-server.yaml.templ usr/libexec/k2hr3/database-k8s" >> "${EXPANDDIR}/debian/${PACKAGE_NAME}.install" || exit echo "src/libexec/database-k8s/dbaas-k2hdkc-slave.yaml.templ usr/libexec/k2hr3/database-k8s" >> "${EXPANDDIR}/debian/${PACKAGE_NAME}.install" || exit echo "src/libexec/database-k8s/dbaas-k2hdkc-variables-setup.sh usr/libexec/k2hr3/database-k8s" >> "${EXPANDDIR}/debian/${PACKAGE_NAME}.install" || exit echo "src/libexec/database-k8s/k2hr3-sa.yaml.templ usr/libexec/k2hr3/database-k8s" >> "${EXPANDDIR}/debian/${PACKAGE_NAME}.install" || exit echo "src/libexec/database-k8s/k2hr3-kustomization.yaml.templ usr/libexec/k2hr3/database-k8s" >> "${EXPANDDIR}/debian/${PACKAGE_NAME}.install" || exit echo "src/libexec/database-k8s/k2hr3-k2hr3api.yaml.templ usr/libexec/k2hr3/database-k8s" >> "${EXPANDDIR}/debian/${PACKAGE_NAME}.install" || exit echo "src/libexec/database-k8s/k2hr3-k2hr3app.yaml.templ usr/libexec/k2hr3/database-k8s" >> "${EXPANDDIR}/debian/${PACKAGE_NAME}.install" || exit echo "src/libexec/database-k8s/k2hr3-k2hdkc.yaml.templ usr/libexec/k2hr3/database-k8s" >> "${EXPANDDIR}/debian/${PACKAGE_NAME}.install" || exit echo "src/libexec/database-k8s/k2hr3-k2hdkc.ini.templ usr/libexec/k2hr3/database-k8s" >> "${EXPANDDIR}/debian/${PACKAGE_NAME}.install" || exit echo "src/libexec/database-k8s/k2hr3-k2hdkc-ini-update.sh usr/libexec/k2hr3/database-k8s" >> "${EXPANDDIR}/debian/${PACKAGE_NAME}.install" || exit echo "src/libexec/database-k8s/k2hr3-api-nodeport.yaml.templ usr/libexec/k2hr3/database-k8s" >> "${EXPANDDIR}/debian/${PACKAGE_NAME}.install" || exit echo "src/libexec/database-k8s/k2hr3-api-production.json.templ usr/libexec/k2hr3/database-k8s" >> "${EXPANDDIR}/debian/${PACKAGE_NAME}.install" || exit echo "src/libexec/database-k8s/k2hr3-api-wrap.sh usr/libexec/k2hr3/database-k8s" >> "${EXPANDDIR}/debian/${PACKAGE_NAME}.install" || exit echo "src/libexec/database-k8s/k2hr3-app-nodeport.yaml.templ usr/libexec/k2hr3/database-k8s" >> "${EXPANDDIR}/debian/${PACKAGE_NAME}.install" || exit echo "src/libexec/database-k8s/k2hr3-app-production.json.templ usr/libexec/k2hr3/database-k8s" >> "${EXPANDDIR}/debian/${PACKAGE_NAME}.install" || exit echo "src/libexec/database-k8s/k2hr3-app-wrap.sh usr/libexec/k2hr3/database-k8s" >> "${EXPANDDIR}/debian/${PACKAGE_NAME}.install" || exit echo "src/libexec/database-k8s/k2hr3-app-init.sh usr/libexec/k2hr3/database-k8s" >> "${EXPANDDIR}/debian/${PACKAGE_NAME}.install" || exit # # copy copyright # cp "${MYSCRIPTDIR}/copyright" "${EXPANDDIR}/debian/copyright" || exit 1 # # copy control file # cp "${MYSCRIPTDIR}/control" "${EXPANDDIR}/debian/control" || exit 1 # # copy changelog with converting build number # CHLOG_ORG_MENT=$(grep "^ --" ChangeLog | head -1) CHLOG_NEW_MENT=$(grep "^ --" "${EXPANDDIR}/debian/changelog" | head -1) if [ "X${BUILD_NUMBER}" = "X" ]; then if [ ${IS_OS_UBUNTU} -eq 1 ]; then sed -e "s/${CHLOG_ORG_MENT}/${CHLOG_NEW_MENT}/g" -e "s/ trusty;/ ${OS_VERSION_NAME};/g" < ChangeLog > "${EXPANDDIR}/debian/changelog" || exit 1 else sed -e "s/${CHLOG_ORG_MENT}/${CHLOG_NEW_MENT}/g" -e 's/ trusty;/ unstable;/g' < ChangeLog > "${EXPANDDIR}/debian/changelog" || exit 1 fi else if [ ${IS_OS_UBUNTU} -eq 1 ]; then sed -e "s/${PACKAGE_VERSION}/${PACKAGE_VERSION}-${BUILD_NUMBER}/g" -e "s/${CHLOG_ORG_MENT}/${CHLOG_NEW_MENT}/g" -e "s/ trusty;/ ${OS_VERSION_NAME};/g" < ChangeLog > "${EXPANDDIR}/debian/changelog" || exit 1 else sed -e "s/${PACKAGE_VERSION}/${PACKAGE_VERSION}-${BUILD_NUMBER}/g" -e "s/${CHLOG_ORG_MENT}/${CHLOG_NEW_MENT}/g" -e "s/ trusty;/ unstable;/g" < ChangeLog > "${EXPANDDIR}/debian/changelog" || exit 1 fi fi if [ ! -f "${EXPANDDIR}/debian/compat" ]; then echo "9" > "${EXPANDDIR}/debian/compat" fi echo "===== prepare working directory: end ===============" # # change debian directory to source top directory # if [ ${IS_ROOTDIR} -eq 1 ]; then if [ -f "${SRCTOP}/debian" ]; then echo "ERROR: ${SRCTOP}/debian file exists, could not make debian directory." 1>&2 exit 1 fi if [ -d "${SRCTOP}/debian" ]; then echo "${SRCTOP}/debian directory exists, remove and remake it..." 1>&2 rm -rf "${SRCTOP}/debian" || exit 1 fi cp -rp "${EXPANDDIR}/debian" "${SRCTOP}/." || exit 1 # # change current directory # cd "${SRCTOP}" || exit 1 # # base directory is changed # BUILDDEBDIR=${SRCTOP} fi # # Check stop before debuild(for manually) # if [ ${IS_DEBUILD} -ne 1 ]; then # # Not run debuild (this means just stop preparing the file) # echo "MESSAGE: ${PRGNAME} exits immediately before debuild is executed," echo " that is, it prepares only files and directories." echo " By running \"debuild -uc -us(-tc -b)\", you can create" echo " the debian package manually and find the created package" echo " in \"${BUILDDEBDIR}/..\" directory." echo "" exit 0 fi # # Run debuild # echo "===== build package: start =========================" debuild -us -uc || exit 1 echo "===== build package: end ===========================" # # Check and show debian package # ls "${BUILDDEBDIR}/${PACKAGE_NAME}_${PACKAGE_VERSION}-${BUILD_NUMBER}"*.deb >/dev/null 2>&1 if [ $? -eq 0 ]; then echo "===== show ${BUILDDEBDIR}/${PACKAGE_NAME}_${PACKAGE_VERSION}-${BUILD_NUMBER}*.deb package: start =====" dpkg -c "${BUILDDEBDIR}/${PACKAGE_NAME}_${PACKAGE_VERSION}-${BUILD_NUMBER}"*.deb echo "" dpkg -I "${BUILDDEBDIR}/${PACKAGE_NAME}_${PACKAGE_VERSION}-${BUILD_NUMBER}"*.deb echo "===== show ${BUILDDEBDIR}/${PACKAGE_NAME}_${PACKAGE_VERSION}-${BUILD_NUMBER}*.deb package: end =====" fi # # finish # echo "" echo "You can find ${PACKAGE_NAME} ${PACKAGE_VERSION}-${BUILD_NUMBER} version debian package in ${BUILDDEBDIR} directory." echo "" exit 0 # # Local variables: # tab-width: 4 # c-basic-offset: 4 # End: # vim600: noexpandtab sw=4 ts=4 fdm=marker # vim<600: noexpandtab sw=4 ts=4 #
import styled from "styled-components"; import { colors } from "renderer/style/theme"; export const Container = styled.div` display: flex; align-items: center; justify-content: center; img { grid-area: logo; height: 1.2rem; } `; export const Title = styled.span` line-height: 26px; border-left: 1px solid ${colors.mutedTextDark}; color: ${colors.title}; margin-left: 1.1em; padding-left: 1.1em; font-size: 1.4em; font-weight: 600; `;
package interpreter; import commands.*; import expressions.Expression; import java.util.Arrays; public class MyInterpreter { public MyInterpreter() { Utilities.setCommand("print", new PrintCommand()); Utilities.setCommand("return", new ReturnCommand()); Utilities.setCommand("sleep", new SleepCommand()); Utilities.setCommand("var", new DefineVarCommand()); Utilities.setCommand("=", new PlacementCommand()); Utilities.setCommand("while", new WhileCommand()); Utilities.setCommand("openDataServer", new OpenServerCommand()); Utilities.setCommand("connect", new ConnectCommand()); Utilities.setCommand("disconnect", new DisconnectCommand()); } public int start(String text) { Expression result = null; Parser parser = new Parser(); Parser.ParsedData info = parser.parse(Arrays.asList(text.split("\\n"))); if (!info.errors.isEmpty()){ System.out.println("List of Errors:"); System.out.println(info.errors.toString()); return -1; } while (!info.cmdQ.isEmpty()) { try { Command cmd = info.cmdQ.poll(); result = cmd.execute(info.argsQ.poll()); } catch (Exception e) { e.printStackTrace(); } } try { Thread.sleep(110); } catch (InterruptedException e) { e.printStackTrace(); } if (result != null) return (int) result.calculate(); else return -1; } public void stop() { Utilities.stop = true; } }
<reponame>adamsrsen/watchinsync<filename>pages/404.tsx import {Component} from 'react' import Head from 'next/head' import CenteredContent from '../components/CenteredContent' import Header from '../components/Header' import User from '../objects/User' import FadeAnimation from '../components/FadeAnimation' interface Props { user: User setUser: Function } export default class Error404 extends Component<Props> { render() { return ( <div> <Head> <title>404: Page not found - WatchInSync</title> <link rel="icon" href="/favicon.ico" /> </Head> <Header user={this.props.user} setUser={this.props.setUser} /> <FadeAnimation> <CenteredContent> <h2 className="title">404</h2> <p>Page not found</p> </CenteredContent> </FadeAnimation> </div> ) } }
package elasta.orm.delete.loader.impl; import com.google.common.collect.ImmutableList; import elasta.core.promise.intfs.Promise; import elasta.orm.delete.loader.DependencyDataLoader; import elasta.sql.core.ColumnToColumnMapping; import elasta.sql.SqlDB; import elasta.sql.core.*; import elasta.orm.upsert.TableData; import java.util.Arrays; import java.util.List; import java.util.Objects; import java.util.Optional; import java.util.stream.Collectors; /** * Created by sohan on 3/6/2017. */ final public class IndirectDependencyDataLoaderImpl implements DependencyDataLoader { final String dependentTable; final String relationTable; final ColumnToColumnMapping[] srcColumnMappings; final ColumnToColumnMapping[] dstColumnMappings; final String[] primaryColumns; final String[] columns; final SqlDB sqlDB; public IndirectDependencyDataLoaderImpl(String dependentTable, String relationTable, ColumnToColumnMapping[] srcColumnMappings, ColumnToColumnMapping[] dstColumnMappings, String[] primaryColumns, String[] columns, SqlDB sqlDB) { Objects.requireNonNull(dependentTable); Objects.requireNonNull(relationTable); Objects.requireNonNull(srcColumnMappings); Objects.requireNonNull(dstColumnMappings); Objects.requireNonNull(primaryColumns); Objects.requireNonNull(columns); Objects.requireNonNull(sqlDB); this.dependentTable = dependentTable; this.relationTable = relationTable; this.srcColumnMappings = srcColumnMappings; this.dstColumnMappings = dstColumnMappings; this.primaryColumns = primaryColumns; this.columns = columns; this.sqlDB = sqlDB; } @Override public String dependentTable() { return dependentTable; } @Override public Promise<List<TableData>> load(TableData parentTableData) { final String alias = "t"; final String relationTableAlias = "r"; ImmutableList.Builder<SqlCriteria> listBuilder = ImmutableList.builder(); for (ColumnToColumnMapping dstColumnMapping : dstColumnMappings) { listBuilder.add( new SqlCriteria( dstColumnMapping.getDstColumn(), parentTableData.getValues().getValue(dstColumnMapping.getSrcColumn()), relationTableAlias ) ); } return sqlDB.query( sqlSelections(alias, primaryColumns, columns), new SqlFrom(dependentTable, alias), sqlJoins(relationTable, alias, relationTableAlias, dstColumnMappings), listBuilder.build() ).map(resultSet -> resultSet.getRows().stream().map(jo -> new TableData(dependentTable, primaryColumns, jo)) .collect(Collectors.toList())); } private List<SqlJoin> sqlJoins(String relationTable, String parentAlias, String alias, ColumnToColumnMapping[] dstColumnMappings) { ImmutableList.Builder<SqlJoinColumn> listBuilder = ImmutableList.builder(); for (ColumnToColumnMapping dstColumnMapping : dstColumnMappings) { listBuilder.add( new SqlJoinColumn( dstColumnMapping.getDstColumn(), dstColumnMapping.getSrcColumn(), Optional.of(parentAlias) ) ); } return Arrays.asList( new SqlJoin( JoinType.INNER_JOIN, relationTable, alias, listBuilder.build() ) ); } private List<SqlSelection> sqlSelections(String alias, String[] primaryColumns, String[] columns) { ImmutableList.Builder<SqlSelection> listBuilder = ImmutableList.builder(); for (String primaryColumn : primaryColumns) { listBuilder.add( new SqlSelection(primaryColumn, alias) ); } for (String column : columns) { listBuilder.add( new SqlSelection(column, alias) ); } return listBuilder.build(); } }
######################################### # Summary ######################################### @timer def classify(text): query = QueryFactory(text) pipeline = Pipeline() .naive_bayes_classifier(query.typo_corrected) .string_similarity_classifier(query.sanitized) .naive_bayes_classifier(query.named_entities_substituted) .rnn_classifier(query.sanitized) return pipeline.first_with_probability_above(.80)
<filename>matlab/reconstruction/grid3/grid3_AVS.cpp /************************************************************************** * GRID3_AVS.C * * Author: <NAME> * Date: 2011 apr 11 * Rev: 2011 apr 11 * * Summary: AVS module that interpolates 3-D non-cartesian samples onto a * cartesian grid. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. This code * is for research and academic purposes and is not intended for * clinical use. * **************************************************************************/ #include <avs/avs.h> #include <avs/port.h> #include <avs/field.h> #include <stdio.h> /* for printf etc. */ #include <assert.h> extern "C" { #include "threads.c" #include "grid_utils.c" } /* copy (memcpy) the data of an AVSfield_double to a dataArray_double */ dataArray_double *AVSfield_2_dataArray(AVSfield_double *in) { assert(in != NULL); /* copy input array dimensions, copy veclen as a new dimension */ unsigned long *dim = (unsigned long*) malloc( sizeof(unsigned long)*((in->ndim)+1) ); for(int i=0;i<(in->ndim);i++) dim[i+1] = in->dimensions[i]; dim[0] = in->veclen; /* allocate new dataArray_double */ dataArray_double *out = new_dataArray_double((in->ndim)+1, dim); free(dim); /* copy the data */ memcpy(out->data, in->data, sizeof(double)*out->num_elem); return(out); } /* copy (memcpy) the data of a dataArray_double to an AVSfield_double * -assumes 2vec input */ AVSfield_double *dataArray_2_AVSfield(dataArray_double *in) { assert(in != NULL); assert(in->dimensions[0] == 2); /* veclen */ /* copy input array dimensions, copy veclen as a new dimension */ int *dim = (int*) malloc( sizeof(int)*((in->nd)) ); for(int i=0;i<(in->nd)-1;i++) dim[i] = in->dimensions[i+1]; /* allocate new dataArray_double */ char field_str[100]; /* generate a data type description for AVS */ sprintf(field_str,"field %dD %d-vector uniform double", 3, 2); AVSfield_double *out = (AVSfield_double *) AVSdata_alloc(field_str, dim);/* let AVS allocate array */ free(dim); /* copy the data */ memcpy(out->data, in->data, sizeof(double)*(in->num_elem)); return(out); } /*///////////////////////////////////////////////////////////////////////////////////// */ /*///////////////////////////////////////////////////////////////////////////////////// */ /* BEGIN Module Compute Routine */ int module_compute( AVSfield *data, AVSfield *coords, AVSfield *weights, AVSfield *table, AVSfield **out, float *radiusFOVproduct, int width_in, float *OverSampFact, float *windowLength, float *taper_start, float *taper_length, int num_threads, int veclen, /* if data is not present */ int compute) { /* check to see if the body should run or not */ if(!compute) { /* don't signal downstream modules */ AVSmark_output_unchanged("spectrum"); return(1); } /* grid size */ int width = (float)width_in * (*OverSampFact); /* allocate kernel table */ unsigned long dim[1]; dim[0] = DEFAULT_KERNEL_TABLE_SIZE; dataArray_double *kern = (dataArray_double*) new_dataArray_double(1,dim); for(long i=0;i<kern->num_elem;i++) kern->data[i]=0.; loadGrid3Kernel(kern); /* make dataArray_double copies of input data */ dataArray_double *data_tmp = AVSfield_2_dataArray( (AVSfield_double*)data ); dataArray_double *crds_tmp = AVSfield_2_dataArray( (AVSfield_double*)coords ); dataArray_double *wght_tmp = NULL; if (weights != NULL) wght_tmp = AVSfield_2_dataArray( (AVSfield_double*)weights ); unsigned long dims[4]; dims[0] = 2; /* complex */ dims[1] = width; dims[2] = width; dims[3] = width; dataArray_double *out_tmp = new_dataArray_double(4,dims); /* choose either octant split gridding or single thread */ if(num_threads == 8) { /* GRID3_threaded */ /* 1) map threads */ dataArray_double *threadMask = NULL; dataArray_double *centerPts = NULL; int partsize = -1; double rfp_d = *radiusFOVproduct; partitionGrid_octants(crds_tmp, rfp_d, width, &threadMask, &centerPts, &partsize); /* 2) grid it */ grid3_threaded(data_tmp, crds_tmp, wght_tmp, out_tmp, kern, *radiusFOVproduct, *windowLength, num_threads, threadMask, centerPts, partsize); /* free tmp data */ free_dataArray_double(threadMask); free_dataArray_double(centerPts); } else { /* GRID3 non-threaded */ int nt = 1; int ct = 0; double rfp_d = *radiusFOVproduct; double win_d = *windowLength; grid3 ( &nt, &ct, data_tmp, crds_tmp, wght_tmp, &out_tmp, kern, NULL, NULL, &width, &rfp_d, &win_d ); } /* allocate output array */ if(*out != NULL) AVSfield_free((AVSfield*)*out); *out = (AVSfield *)dataArray_2_AVSfield(out_tmp); /* free kernel table */ free_dataArray_double(kern); free_dataArray_double(out_tmp); return 1; } /* END Module Compute Routine */ /*///////////////////////////////////////////////////////////////////////////////////// */ /*///////////////////////////////////////////////////////////////////////////////////// */ /*///////////////////////////////////////////////////////////////////////////////////// */ /* BEGIN Module Specification */ static int module_desc() { int in_port, out_port, param; /* Name and categorize the module. The name is automatically */ /* generated by the filename */ char filename[100]; sprintf(filename, "%s", __FILE__); /* MODULE_DATA MODULE_FILTER MODULE_MAPPER MODULE_RENDER */ AVSset_module_name(strtok(filename,"."), MODULE_MAPPER); /* INPUT PORTS ****************** */ in_port = AVScreate_input_port("data","field 2-vector double", REQUIRED); in_port = AVScreate_input_port("coords","field 3-vector double", REQUIRED); in_port = AVScreate_input_port("weighting","field 1-vector double", REQUIRED); in_port = AVScreate_input_port("kernel table in","field 1D 1-vector", OPTIONAL | INVISIBLE); /* END INPUT PORTS ************** */ /* OUTPUT PORTS ***************** */ out_port = AVScreate_output_port("gridded","field double"); /* END OUTPUT PORTS ************* */ /* PARAMETERS ******************* */ param = AVSadd_float_parameter("radius fov prod",DEFAULT_RADIUS_FOV_PRODUCT,0.001,FLOAT_UNBOUND); AVSconnect_widget(param, "typein_real"); param = AVSadd_parameter("width","integer",160,0,INT_UNBOUND); AVSconnect_widget(param, "typein_integer"); param = AVSadd_float_parameter ("OverSampFact", 1., 1., FLOAT_UNBOUND); AVSconnect_widget (param, "typein_real"); param = AVSadd_float_parameter("window length",1.0,.0001,FLOAT_UNBOUND); AVSconnect_widget(param, "typein_real"); param = AVSadd_float_parameter("taper start",0,0,100); AVSconnect_widget(param, "typein_real"); param = AVSadd_float_parameter("taper length",0,0,100); AVSconnect_widget(param, "typein_real"); param = AVSadd_parameter("number threads","integer",8,1,8); AVSconnect_widget(param, "typein_integer"); param = AVSadd_parameter("veclen","integer",2,1,2); AVSconnect_widget(param, "typein_integer"); param = AVSadd_parameter("compute","boolean",0,0,1); AVSconnect_widget(param, "toggle"); /* END PARAMETERS **************** */ /* send function pointer to compute module */ AVSset_compute_proc((AVS_FNCP)module_compute); /*to keep warning suppressed */ param = in_port = out_port = 0; return(1); } /* END Module Specification */ /*///////////////////////////////////////////////////////////////////////////////////// */ /* AVS module instantiation */ extern "C" { /* instantiate module */ void AVSinit_modules() { AVSmodule_from_desc( module_desc ); } }
/* * MIT License * * Copyright (c) 2021 <NAME> * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package net.jamsimulator.jams.mips.memory.cache; import net.jamsimulator.jams.mips.memory.Memory; import java.util.Optional; /** * Represents a cache. */ public interface Cache extends Memory { /** * Returns the {@link CacheBuilder} of this cache. * * @return the {@link CacheBuilder} of this cache. */ CacheBuilder<?> getBuilder(); /** * Returns the amount of blocks inside this cache. * * @return the amount of blocks. */ int getBlocksAmount(); /** * Returns the size of each block inside this cache in words. * * @return the size of each block in words. */ int getBlockSize(); /** * Returns the size of each tag inside this cache in bits. * * @return the size of each tag in bits. */ int getTagSize(); /** * Returns a new {@link CacheStats} instance containing all * stats of this cache at this moment. * <p> * This instance won't be updated if any operation is made on the cache. * * @return the {@link CacheStats}. */ CacheStats getStats(); /** * Resets this cache * <p> * This method also flushes any pending change. */ void resetCache(); /** * Writes to the next step memory any pending change. */ void flush(); /** * This method should be used only by undo operations. * <p> * Removes an operation and sets the given block at the given index. * If the parameter hit is true, this method removes one hit too. * * @param hit whether a hit should be substracted. * @param blockIndex the block index. * @param old the old {@link CacheBlock}. */ void undoOperation(boolean hit, int blockIndex, CacheBlock old); /** * This method should be used only by undo operations. * <p> * Sets the stats of this cache. * * @param operations the operations. * @param hits the hits. */ void forceStats(long operations, long hits); /** * Returns the block at the given index, if present. * <p> * This method should be used only for representation purposes. Modifying the cache blocks * would cause unpredictable behaviours. * * @param index the index of the block. * @return the block. */ Optional<CacheBlock> getCacheBlock(int index); }
import React, { useState } from 'react'; import { View, Text, TextInput, Button } from 'react-native'; const DietTracker = () => { const [goalList, setGoalList] = useState([]); const [goalText, setGoalText] = useState(''); const addGoal = () => { const newGoal = { goal: goalText }; setGoalList([...goalList, newGoal]); setGoalText(''); }; return ( <View> <TextInput value={goalText} onChangeText={setGoalText} placeholder="Add new diet goal" /> <Button title="Add Goal" onPress={addGoal} /> <View> {goalList.map(goal => ( <Text>{goal.goal}</Text> ))} </View> </View> ); }; export default DietTracker;
<form id="registration" action="register.php" method="POST"> <input type="text" name="name" required> <input type="email" name="email" required> <input type="password" name="password" required> <input type="text" name="phone" required> <input type="text" name="country" required> <input type="submit" value="Register"> </form> <form id="shipping" action="shipping.php" method="POST"> <input type="text" name="address" required> <input type="text" name="city" required> <input type="text" name="state" required> <input type="text" name="zip" required> <input type="submit" value="Submit Shipping Details"> </form> <form id="payment" action="payment.php" method="POST"> <input type="text" name="name" required> <input type="text" name="card_number" required> <input type="text" name="expiration_date" required> <input type="text" name="secure_code" required> <input type="text" name="billing_address" required> <input type="submit" value="Confirm Payment"> </form> <form id="preferences" action="preferences.php" method="POST"> <input type="checkbox" name="newsletter" value="yes" required> <label for="newsletter">Subscribe to Newsletter</label> <input type="submit" value="Submit Preferences"> </form> <form id="confirmation" action="confirm.php" method="POST"> <input type="checkbox" name="tos" value="yes" required> <label for="tos">Accept Terms of Service</label> <input type="submit" value="Confirm Registration"> </form>
package com.zlikun.spring.dto; /** * @author zlikun <<EMAIL>> * @date 2018/8/12 10:34 */ public class UserInfo { }
var structarmnn_1_1_layer_type_of_impl_3_01_layer_type_1_1_convert_fp32_to_fp16_01_4 = [ [ "Type", "structarmnn_1_1_layer_type_of_impl_3_01_layer_type_1_1_convert_fp32_to_fp16_01_4.xhtml#a910efd594415f2cb2ec6981a8890f3b8", null ] ];
import java.util.HashMap; import java.util.Map; class SceneState { private Map<String, Object> stateData; public SceneState() { stateData = new HashMap<>(); } public void saveState(Map<String, Object> sceneState) { stateData = new HashMap<>(sceneState); } public void releaseResources() { stateData.clear(); } public Map<String, Object> restoreState() { return new HashMap<>(stateData); } }
<reponame>community-boating/cbidb-public-web<gh_stars>1-10 import * as t from 'io-ts'; import APIWrapper from '@core/APIWrapper'; import { HttpMethod } from "@core/HttpMethod"; const path = "/static-yearly-data" // TODO: shouldnt be jscon export const apiw = new APIWrapper({ path, type: HttpMethod.GET, resultValidator: t.type({ data: t.type({ rows: t.array(t.array(t.any)) }) }), })
package graph.search; import graph.api.GraphApi; import graph.search.api.Search; public class DirectlySearch implements Search { private final Integer baseVertex; private final GraphApi baseGraph; public DirectlySearch(GraphApi graph, int baseVertex) { this.baseGraph = graph; this.baseVertex = baseVertex; } @Override public boolean marked(int inputVertex) { return baseGraph.getAdjacentVertices(baseVertex).contains(inputVertex); } @Override public int count() { return baseGraph.getAdjacentVertices(baseVertex).size(); } }
<reponame>duanduan2288/golang // Copyright 2013 The StudyGolang Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. // http://studygolang.com // Author:polaris <EMAIL> package model_test import ( "encoding/json" . "model" "testing" ) func TestNewUserLogin(t *testing.T) { user := NewUser() userList, err := user.FindAll() for _, tmpUser := range userList { t.Log(tmpUser.Name) t.Log("===") } if err == nil { t.Fatal(err) } } func testInsert(t *testing.T) { userLogin := NewUserLogin() userData := `{"uid":"1111","username":"poalris","email":"<EMAIL>","passwd":"<PASSWORD>"}` json.Unmarshal([]byte(userData), userLogin) // err := userLogin.Find() affectedNum, err := userLogin.Insert() if err != nil { t.Fatal(err) } t.Log(affectedNum) }
def palindrome_check(word) reversed_word = word.reverse if word == reversed_word puts "#{word} is a palindrome" else puts "#{word} is not a palindrome" end end word = "madam" palindrome_check(word)