text
stringlengths
1
1.05M
const mysql = require('mysql2/promise'); const dbconfig = require('../config/database'); const pool = mysql.createPool(dbconfig); // ๋กœ๊ทธ์ธ ์ƒํƒœ์ธ์ง€ // ๋˜๋Š” // ๊ฐ ์œ ์ €์˜ ์ข…๋ฅ˜๋ฅผ ํ™•์ธ exports.isLoggedIn = (req, res, next) => { if (req.isAuthenticated()) { next(); } else { res.status(403).send('๋กœ๊ทธ์ธ์ด ํ•„์š”ํ•ฉ๋‹ˆ๋‹ค'); } }; exports.isNotLoggedIn = (req, res, next) => { if (!req.isAuthenticated()) { next(); } else { res.redirect('/'); } }; exports.isAdmin = (req, res, next) => { if (req.user && req.user.type == 'admin') { next(); } else { res.render('error', { title: '์—๋Ÿฌ', message: '๊ด€๋ฆฌ์ž ๊ถŒํ•œ์ด ํ•„์š”ํ•ฉ๋‹ˆ๋‹ค', error: {status: 403} }) // res.status(403).send('๊ด€๋ฆฌ์ž ๊ถŒํ•œ์ด ํ•„์š”ํ•ฉ๋‹ˆ๋‹ค'); } }; exports.isFreelancer = (req, res, next) => { if (req.user && req.user.type == 'freelancer') { next(); } else { res.status(403).send('๋‹น์‹ ์€ ํ”„๋ฆฌ๋žœ์„œ๊ฐ€ ์•„๋‹™๋‹ˆ๋‹ค'); } }; exports.isClient = (req, res, next) => { if (req.user && req.user.type == 'client') { next(); } else { res.status(403).send('๋‹น์‹ ์€ ์˜๋ขฐ์ž๊ฐ€ ์•„๋‹™๋‹ˆ๋‹ค'); } }; // ํŒ€ ํŽ˜์ด์ง€๋ฅผ ์‚ฌ์šฉํ•˜๋Š” ๊ฒฝ์šฐ // ๋กœ๊ทธ์ธํ•œ ์‚ฌ์šฉ์ž๊ฐ€ ๊ทธ ํŒ€์˜ ํŒ€์žฅ์ธ์ง€ ํ™•์ธ exports.isMgr = async (req, res, next) => { const tname = req.params.tname; const conn = await pool.getConnection(async conn => conn); try { const [[exMgr]] = await conn.query( 'SELECT * FROM team WHERE mgr_id=? AND tname=?', [req.user.id, tname] ); conn.release(); if(exMgr) { next(); } else { res.status(403).send('๋‹น์‹ ์€ ํŒ€์žฅ์ด ์•„๋‹™๋‹ˆ๋‹ค'); } } catch (err) { conn.release(); console.log(req.params); console.error(err); res.status(403).send('Query error'); } }
<reponame>CSUFTitanRover/TitanRover2018 from socket import AF_INET, socket, SOCK_STREAM from threading import Thread import subprocess from time import sleep import smbus import time # for RPI version 1, use "bus = smbus.SMBus(0)" bus = smbus.SMBus(1) def acceptConnections(): while True: client, client_address = SERVER.accept() print("%s:%s has connected." % client_address) Thread(target=handle_client, args=(client, )).start() def StringToBytes(val): retVal = [] for c in val: retVal.append(ord(c)) return retVal def handle_reading_pressure(client, addr, value): block = bus.read_word_data(addr, value) client.send(block) def handle_activating_shock(client, addr, value): bus.write_i2c_block_data(int(addr), 0x00, StringToBytes(value)) client.send('Successfully activated shock ', value) def handle_client(client): while True: data = client.recv(4096) payload = data.split(',') addr = payload[0] value = payload[1] if payload[0] == '*': handle_reading_pressure(client, 30, value) if payload[0] == '+': handle_activating_shock(client, 30, value) clients = {} HOST = '' PORT = '6789' BUFSIZ = 4096 ADDR = (HOST, PORT) SERVER = socket(AF_INET, SOCK_STREAM) while True: try: SERVER.bind(ADDR) break except: subprocess.call( ' sudo lsof -t -i tcp:9090 | xargs kill -9', shell=True) if __name__ == "__main__": SERVER.listen(5) print("Waiting for connection...") ACCEPT_THREAD = Thread(target=acceptConnections) ACCEPT_THREAD.start() ACCEPT_THREAD.join() SERVER.close()
# ARGUMENT HANDLING ============================================================= if { [ "$1" = "-h" ] || [ "$1" = "--help" ]; }; then echo "Install libraries required to build the pharo VM under ubuntu. " exit 0; elif [ $# -gt 0 ]; then echo "--help/-h is the only argument allowed" exit 1; fi # INSTALL BUILD LIBRARIES ====================================================== sudo apt-get install cmake zip bash-completion ruby git xz-utils debhelper devscripts sudo apt-get install libc6-dev:i386 libasound2:i386 libasound2-dev:i386 libasound2-plugins:i386 libssl-dev:i386 libssl0.9.8:i386 libfreetype6-dev:i386 libx11-dev:i386 libsm-dev:i386 libice-dev:i386 sudo apt-get install build-essential gcc-multilib g++ # due to https://bugs.launchpad.net/ubuntu/+source/mesa/+bug/949606 we cannot directly install libgl1-mesa-dev:i386 sudo apt-get install libgl1-mesa-dev libgl1-mesa-glx:i386 sudo ln -s /usr/lib/i386-linux-gnu/mesa/libGL.so /usr/lib/i386-linux-gnu/libGL.so sudo ln -s /usr/lib/i386-linux-gnu/mesa/libGL.so.1 /usr/lib/i386-linux-gnu/mesa/libGL.so
<gh_stars>1-10 # -*- coding=utf-8 -*- import numpy as np import pandas as pd import tushare as ts def save_data_as_hdf5(): np.random.seed(50) data = np.random.randn(500000, 10) # print(data.shape) data = pd.DataFrame(data) print(data.head()) # 1- ๅฐ†ๆ•ฐๆฎๅญ˜ๅ‚จๅˆฐ HDF5 ไธญ hdf5 = pd.HDFStore('../data/random_number.h5', 'w') hdf5['data'] = data hdf5.close() # 2- ่ฏปๅ– HDF5 ๆ•ฐๆฎใ€‚็ฑปๅž‹ไธบ๏ผšpandas.io.pytables.HDFStore hdf5 = pd.HDFStore('../data/random_number.h5', 'r') data = hdf5['data'] print(data.head()) hdf5.close() if __name__ == '__main__': save_data_as_hdf5()
import { shallow } from 'enzyme'; import { createBrowserHistory } from 'history'; import * as React from 'react'; import { INITIAL_COMPANY_STATE } from '../../ducks/company'; import StockDetail, { IStockDetailProps } from './StockDetail'; jest.mock('../../components/StockChart', () => () => null); describe('StockDetail', () => { const symbol = 'TST'; const mockLoadCompany = jest.fn(); const mockMatchProp = { isExact: false, params: { symbol, }, path: `/stock/${symbol}`, url: `http://test.com/stock/${symbol}`, }; const props:IStockDetailProps = { company: INITIAL_COMPANY_STATE, history: createBrowserHistory(), loadCompany: mockLoadCompany, location: { hash: '', pathname: `/stock/${symbol}`, search: '', state: {}, }, match: mockMatchProp, }; const wrapper = shallow(<StockDetail {...props} />); it('renders without crashing', () => { expect(wrapper).toMatchSnapshot(); }); it('should try to load the company data for the symbol', () => { expect(mockLoadCompany).toBeCalledWith(symbol); }); it('should reload company data if symbol prop changes', () => { const newSymbol = 'CHANGE'; mockLoadCompany.mockClear(); wrapper.setProps({ match: { ...mockMatchProp, params: { symbol: newSymbol }, }, }); expect(mockLoadCompany).toBeCalledWith(newSymbol); }); });
<gh_stars>1-10 import { getBasicActionCreator } from './getBasicActionCreator'; import { requireActionType, requireSteps } from './require'; import { ActionCreatorsBag } from '../types/asyncCreator'; interface GetActionCreatorsBag { <T extends string, S extends string>(type: T, steps: S[]): ActionCreatorsBag<T, S>; } interface CreateAsyncActionCreator { <ActionType extends string, Steps extends string>( acc: ActionCreatorsBag<ActionType, Steps>, step: Steps ): ActionCreatorsBag<ActionType, Steps>; } export const getActionCreatorsBag: GetActionCreatorsBag = <T extends string, S extends string>( type: T, steps: S[], ): ActionCreatorsBag<T, S> => { requireActionType(type); requireSteps(steps); const createAsyncActionCreator: CreateAsyncActionCreator = (acc, step) => { const stepType = `${type}[${step}]`; return { ...acc, [step]: getBasicActionCreator(stepType) }; }; const emptyBag = {} as ActionCreatorsBag<T, S>; return steps.reduce(createAsyncActionCreator, emptyBag); };
"""The Hue Power constants.""" DOMAIN = "powercalc" DOMAIN_CONFIG = "config" DATA_CALCULATOR_FACTORY = "calculator_factory" CONF_CALIBRATE = "calibrate" CONF_CREATE_GROUP = "create_group" CONF_CREATE_ENERGY_SENSOR = "create_energy_sensor" CONF_CREATE_ENERGY_SENSORS = "create_energy_sensors" CONF_CREATE_UTILITY_METERS = "create_utility_meters" CONF_ENERGY_SENSOR_NAMING = "energy_sensor_naming" CONF_FIXED = "fixed" CONF_LINEAR = "linear" CONF_MODEL = "model" CONF_MANUFACTURER = "manufacturer" CONF_MODE = "mode" CONF_MULTIPLY_FACTOR = "multiply_factor" CONF_MULTIPLY_FACTOR_STANDBY = "multiply_factor_standby" CONF_MIN_WATT = "min_watt" CONF_MAX_WATT = "max_watt" CONF_POWER_SENSOR_NAMING = "power_sensor_naming" CONF_POWER = "power" CONF_MIN_POWER = "min_power" CONF_MAX_POWER = "max_power" CONF_WATT = "watt" CONF_STATES_POWER = "states_power" CONF_STANDBY_POWER = "standby_power" CONF_DISABLE_STANDBY_POWER = "disable_standby_power" CONF_STANDBY_USAGE = "standby_usage" CONF_DISABLE_STANDBY_USAGE = "disable_standby_usage" CONF_CUSTOM_MODEL_DIRECTORY = "custom_model_directory" CONF_UTILITY_METER_TYPES = "utility_meter_types" ATTR_CALCULATION_MODE = "calculation_mode" ATTR_ENTITIES = "entities" ATTR_INTEGRATION = "integration" ATTR_SOURCE_ENTITY = "source_entity" ATTR_SOURCE_DOMAIN = "source_domain" MODE_LUT = "lut" MODE_LINEAR = "linear" MODE_FIXED = "fixed" CALCULATION_MODES = [ MODE_FIXED, MODE_LINEAR, MODE_LUT, ] MANUFACTURER_DIRECTORY_MAPPING = { "IKEA of Sweden": "ikea", "Feibit Inc co. ": "jiawen", "LEDVANCE": "ledvance", "MLI": "mueller-licht", "OSRAM": "osram", "Signify Netherlands B.V.": "signify", } MODEL_DIRECTORY_MAPPING = { "IKEA of Sweden": { "TRADFRI bulb E14 WS opal 400lm": "LED1536G5", "TRADFRI bulb GU10 WS 400lm": "LED1537R6", "TRADFRI bulb E27 WS opal 980lm": "LED1545G12", "TRADFRI bulb E27 WS clear 950lm": "LED1546G12", "TRADFRI bulb E27 opal 1000lm": "LED1623G12", "TRADFRI bulb E27 CWS opal 600lm": "LED1624G9", "TRADFRI bulb E14 W op/ch 400lm": "LED1649C5", "TRADFRI bulb GU10 W 400lm": "LED1650R5", "TRADFRI bulb E27 WS opal 1000lm": "LED1732G11", "TRADFRI bulb GU10 WW 400lm": "LED1837R5", "TRADFRI bulb E27 WW 806lm": "LED1836G9", }, "Signify Netherlands B.V.": {"440400982841": "LCT024"}, }
import numpy as np # Taken from https://raw.github.com/muzhig/ESOQ2/master/esoq2p1.py # ESOQ2.1 - Attitude EStimate OPtimization (<NAME>, 7/15/99). # Variation of Daniele Mortari's ESOQ2 (Paper AAS 97-167, AAS/AIAA # Space Flight Mechanics Meeting, Huntsville, AL, February 10-12, 1997), # with new singularity-avoidance and lambda_max computation logic. # # input: obs(3,n) - array of n observation unit vectors # ref(3,n) - array of n reference unit vectors # wt(n) - row array of n measurement weights # # The columns of obs and ref are assumed to be normalized. # No assumption is made about the normalization of the weights. # # output: q(4) - optimal quaternion # loss - optimized value of Wahba's loss function def esoq2p1(obs, ref, wt): lam = sum(wt) # zeroth order approximation to lambda_max B = np.array([obs[0, :]*wt, obs[1, :]*wt, obs[2, :]*wt]) B = B.dot(ref.T) trB = np.trace(B) diag = [B[0, 0], B[1, 1], B[2, 2], trB] # Optimal 180 deg rotation to avoid zero rotation angle singularity Bmin = min(diag) irot = diag.index(Bmin) if irot == 0: B[:, 1:3] *= -1 trB = 2 * Bmin - trB elif irot == 1: B[:, 0] *= -1 B[:, 2] *= -1 trB = 2 * Bmin - trB elif irot == 2: B[:, 0:2] *= -1 trB = 2 * Bmin - trB # Compute needed matrices and vectors S11 = 2 * B[0, 0] S23 = B[1, 2] + B[2, 1] S22 = 2 * B[1, 1] S31 = B[2, 0] + B[0, 2] S33 = 2 * B[2, 2] S12 = B[0, 1] + B[1, 0] z = np.array([B[1, 2] - B[2, 1], B[2, 0] - B[0, 2], B[0, 1] - B[1, 0]]) z12 = z[0] * z[0] z22 = z[1] * z[1] z32 = z[2] * z[2] wt_len_eq_2 = max(wt.shape) == 2 # max eigenvalue computation for two observation case if wt_len_eq_2: lam0 = lam trB2 = trB * trB Sz = np.array([[S11, S12, S31], [S12, S22, S23], [S31, S23, S33]]).dot(z) aa = trB2 - S22 * S33 + S23 * S23 - S11 * S33 + S31 * S31 - S22 * S11 + S12 * S12 bb = trB2 + z12 + z22 + z32 c2 = - aa - bb u = 2 * np.sqrt(aa * bb - Sz.T.dot(Sz)) lam = (np.sqrt(u - c2) + np.sqrt(- u - c2)) / 2 loss = lam0 - lam tml = trB - lam tpl = trB + lam M11 = tml * (S11 - tpl) - z12 M23 = tml * S23 - z[1] * z[2] M22 = tml * (S22 - tpl) - z22 M31 = tml * S31 - z[2] * z[0] M33 = tml * (S33 - tpl) - z32 M12 = tml * S12 - z[0] * z[1] # Compute loss function and rotation axis e = np.array([M22 * M33 - M23 * M23, M11 * M33 - M31 * M31, M11 * M22 - M12 * M12]) dummy = np.max(np.abs(e)) if e[0] == dummy: e = np.array([e[0], M31 * M23 - M12 * M33, M12 * M23 - M31 * M22]) imax = 0 elif e[1] == dummy: e = np.array([M31 * M23 - M12 * M33, e[1], M12 * M31 - M11 * M23]) imax = 1 else: e = np.array([M12 * M23 - M31 * M22, M12 * M31 - M11 * M23, e[2]]) imax = 2 if not wt_len_eq_2: m1 = np.array([M11, M12, M31]) m2 = np.array([M12, M22, M23]) m3 = np.array([M31, M23, M33]) n1 = np.array([(S11 - 2 * lam), S12, S31]) n2 = np.array([S12, (S22 - 2 * lam), S23]) n3 = np.array([S31, S23, (S33 - 2 * lam)]) a = [m2, m3, m1][imax] b = [n3, n1, n2][imax] c = [m3, m1, m2][imax] d = [n2, n3, n1][imax] m = [m1, m2, m3][imax] n = [n1, n2, n3][imax] v = np.cross(a,b).T - np.cross(c, d).T loss = - (m.dot(e)) / (n.dot(e) + m.dot(v)) tml = tml + loss e = e + loss * v # Quaternion computation in rotated frame q = np.hstack((tml * e, -z.T.dot(e))) q = q / np.linalg.norm(q) # Undo rotation to get quaternion in input frame if irot == 0: q = np.array([-q[0], q[3], -q[2], q[1]]) elif irot == 1: q = np.array([-q[1], q[2], q[3], -q[0]]) elif irot == 2: q = np.array([-q[2], -q[1], q[0], q[3]]) return q, loss
package main import ( "fmt" "io" "os" "strings" "github.com/BurntSushi/toml" ) type SourceConfig struct { P4Port string `toml:"p4port"` P4User string `toml:"p4user"` P4Client string `toml:"p4client"` } type DestinationConfig struct { P4Port string `toml:"p4port"` P4User string `toml:"p4user"` ClientName string `toml:"new_client_name"` ClientRoot string `toml:"new_client_root"` ClientStream string `toml:"new_client_stream"` } type Config struct { Src SourceConfig `toml:"source"` Dst DestinationConfig `toml:"destination"` // save the file from which this config was loaded, for logging purposes filename string } func (c *Config) Filename() string { return c.filename } // Load helpers func loadConfigFromFile(path string) (Config, error) { f, err := os.Open(path) if err != nil { return Config{}, fmt.Errorf("Error opening '%s': %w", path, err) } defer f.Close() cfg, err := loadConfig(f) cfg.filename = path return cfg, err } func loadConfigFromString(s string) (Config, error) { return loadConfig(strings.NewReader(s)) } func loadConfig(r io.Reader) (Config, error) { var cfg Config if _, err := toml.NewDecoder(r).Decode(&cfg); err != nil { return Config{}, err } return cfg, nil }
package org.telegram.telegrambots.meta.api.methods.send; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.core.type.TypeReference; import org.telegram.telegrambots.meta.api.methods.BotApiMethod; import org.telegram.telegrambots.meta.api.objects.ApiResponse; import org.telegram.telegrambots.meta.api.objects.Message; import org.telegram.telegrambots.meta.api.objects.replykeyboard.ReplyKeyboard; import org.telegram.telegrambots.meta.exceptions.TelegramApiRequestException; import org.telegram.telegrambots.meta.exceptions.TelegramApiValidationException; import java.io.IOException; import java.util.Arrays; import java.util.Collections; import java.util.List; /** * @author <NAME> * @version 4.7 * Use this method to send an animated emoji that will display a random value. On success, the sent Message is returned. */ public class SendDice extends BotApiMethod<Message> { private static final List<String> VALIDEMOJIS = Collections.unmodifiableList(Arrays.asList("\uD83C\uDFB2", "\uD83C\uDFAF", "\uD83C\uDFC0")); public static final String PATH = "sendDice"; private static final String CHATID_FIELD = "chat_id"; private static final String EMOJI_FIELD = "emoji"; private static final String DISABLENOTIFICATION_FIELD = "disable_notification"; private static final String REPLYTOMESSAGEID_FIELD = "reply_to_message_id"; private static final String REPLYMARKUP_FIELD = "reply_markup"; @JsonProperty(CHATID_FIELD) private String chatId; ///< Unique identifier for the target chat or username of the target channel (in the format @channelusername) /** * Emoji on which the dice throw animation is based. Currently, must be one of โ€œ๐ŸŽฒโ€, โ€œ๐ŸŽฏโ€, or โ€œ๐Ÿ€โ€. * Dice can have values 1-6 for โ€œ๐ŸŽฒโ€ and โ€œ๐ŸŽฏโ€, and values 1-5 for โ€œ๐Ÿ€โ€. Defauts to โ€œ๐ŸŽฒโ€ */ @JsonProperty(EMOJI_FIELD) private String emoji; @JsonProperty(DISABLENOTIFICATION_FIELD) private Boolean disableNotification; ///< Optional. Sends the message silently. Users will receive a notification with no sound. @JsonProperty(REPLYTOMESSAGEID_FIELD) private Integer replyToMessageId; ///< Optional. If the message is a reply, ID of the original message @JsonProperty(REPLYMARKUP_FIELD) private ReplyKeyboard replyMarkup; ///< Optional. JSON-serialized object for a custom reply keyboard public SendDice() { super(); } public String getChatId() { return chatId; } public SendDice setChatId(String chatId) { this.chatId = chatId; return this; } public SendDice setChatId(Long chatId) { this.chatId = chatId.toString(); return this; } public Integer getReplyToMessageId() { return replyToMessageId; } public SendDice setReplyToMessageId(Integer replyToMessageId) { this.replyToMessageId = replyToMessageId; return this; } public ReplyKeyboard getReplyMarkup() { return replyMarkup; } public SendDice setReplyMarkup(ReplyKeyboard replyMarkup) { this.replyMarkup = replyMarkup; return this; } public Boolean getDisableNotification() { return disableNotification; } public SendDice enableNotification() { this.disableNotification = false; return this; } public SendDice disableNotification() { this.disableNotification = true; return this; } public String getEmoji() { return emoji; } public SendDice setEmoji(String emoji) { this.emoji = emoji; return this; } @Override public String getMethod() { return PATH; } @Override public Message deserializeResponse(String answer) throws TelegramApiRequestException { try { ApiResponse<Message> result = OBJECT_MAPPER.readValue(answer, new TypeReference<ApiResponse<Message>>(){}); if (result.getOk()) { return result.getResult(); } else { throw new TelegramApiRequestException("Error sending dice", result); } } catch (IOException e) { throw new TelegramApiRequestException("Unable to deserialize response", e); } } @Override public void validate() throws TelegramApiValidationException { if (chatId == null) { throw new TelegramApiValidationException("ChatId parameter can't be empty", this); } if (emoji != null && !VALIDEMOJIS.contains(emoji)) { throw new TelegramApiValidationException("Only \uD83C\uDFB2, \uD83C\uDFAF or \uD83C\uDFC0 are allowed in Emoji field ", this); } if (replyMarkup != null) { replyMarkup.validate(); } } @Override public String toString() { return "SendDice{" + "chatId='" + chatId + '\'' + ", emoji='" + emoji + '\'' + ", disableNotification=" + disableNotification + ", replyToMessageId=" + replyToMessageId + ", replyMarkup=" + replyMarkup + '}'; } }
username="$USER" user="$(id -u)" default_image="darsh3/jupyterlab:latest" image="${1:-$default_image}" default_container_name="jupyterlab_darsh" container_name="${2:-$default_container_name}" docker run -it -d -p 8000:8000 --name $container_name \ --user=${user} \ -e USER=${username} \ --workdir="$HOME" \ --volume="$(pwd):$HOME" \ --volume="/etc/group:/etc/group:ro" \ --volume="/etc/passwd:/etc/passwd:ro" \ --volume="/etc/shadow:/etc/shadow:ro" \ --volume="/etc/sudoers.d:/etc/sudoers.d:ro" \ $image
# Iterate through the range of 10 for i in range(10): # Compare the number with 5 if i < 5: print("Smaller than 5") else: print("Greater than or equal to 5")
# Define the Task model with the next_available_submission method class Task(models.Model): # Other fields and methods for the Task model def next_available_submission(self, team): last_submission = Submission.objects.filter(task=self, team=team).order_by('-timestamp').first() if last_submission: next_submission_time = last_submission.timestamp + timedelta(minutes=15) # Assuming a 15-minute rate limit else: next_submission_time = timezone.now() # No previous submissions, so team can submit immediately return next_submission_time # Update the code snippet to use the next_available_submission method else: form = TeamForm(task_id=task.id, request=request) teams = request.user.teams.prefetch_related('users').filter(challenge=task.challenge) for team in teams: team.next_available_submission = task.next_available_submission(team) has_rate_limited_teams = any(team.next_available_submission > timezone.now() for team in teams)
#!/usr/bin/env bash set -eux # This test expects 7 loggable vars and 0 non-loggable ones. # If either mismatches it fails, run the ansible-playbook command to debug. [ "$(ansible-playbook no_log_local.yml -i ../../inventory -vvvvv "$@" | awk \ 'BEGIN { logme = 0; nolog = 0; } /LOG_ME/ { logme += 1;} /DO_NOT_LOG/ { nolog += 1;} END { printf "%d/%d", logme, nolog; }')" = "26/0" ] # deal with corner cases with no log and loops # no log enabled, should produce 6 censored messages [ "$(ansible-playbook dynamic.yml -i ../../inventory -vvvvv "$@" -e unsafe_show_logs=no|grep -c 'output has been hidden')" = "6" ] # no log disabled, should produce 0 censored [ "$(ansible-playbook dynamic.yml -i ../../inventory -vvvvv "$@" -e unsafe_show_logs=yes|grep -c 'output has been hidden')" = "0" ] # test no log for sub options [ "$(ansible-playbook no_log_suboptions.yml -i ../../inventory -vvvvv "$@" | grep -Ec '(MANPOWER|UNTAPPED|CONCERNED|MARLIN|FLICK)')" = "0" ] # test invalid data passed to a suboption [ "$(ansible-playbook no_log_suboptions_invalid.yml -i ../../inventory -vvvvv "$@" | grep -Ec '(SUPREME|IDIOM|MOCKUP|EDUCATED|FOOTREST|CRAFTY|FELINE|CRYSTAL|EXPECTANT|AGROUND|GOLIATH|FREEFALL)')" = "0" ]
import produce from "immer"; import { useRef, useState } from "react"; import { Contact } from "../models/Contact"; import { ScalarField, VectorField } from "./ContactFields"; import { todayISO } from "./utils/today"; import { SelectTag } from "./Select"; import { useAppDispatch, useAppSelector } from "./hooks"; import { actions } from "./stores/contacts"; function field(key: string, placeholder?: string, multiline = false) { return { key, placeholder: placeholder || key, multiline, }; } const CONTACT_FIELDS = { scalar: [ field("name"), // field("notes", "notes", true), field("place", "current location"), field("where", "where met"), field("work", "company / job"), field("link", "@username/url"), field("last", "last date"), ], advanced: [ field("gift", "gift idea"), field("food", "food preference"), field("family", "family info", true), ], vector: [ field("mtg", "meeting", true), field("likes", "interests / likings", true), ], select: [field("tag")], }; interface ContactProps { contact: Contact; } function ContactItem({ contact }: ContactProps) { const dispatch = useAppDispatch(); const tags = useAppSelector((store) => store.contacts.tags); const [editing, setEditing] = useState<Contact | null>( contact.__local ? contact : null ); const ref = useRef<HTMLLIElement>(null); const displayContact = editing || contact; const [showAdvanced, setAdvanced] = useState(false); const toggleAdvanced = () => { setAdvanced(!showAdvanced); }; const deleteContact = (contact: Contact) => dispatch(actions.deleteContact(contact)); const saveContact = (contact: Contact) => dispatch(actions.saveContact(contact)); const deleteMe = () => deleteContact(displayContact); const toggleEditing = () => setEditing((prev) => (prev ? null : contact)); const saveChanges = () => { if (editing) { saveContact(editing); toggleEditing(); } }; const fillToday = () => { setEditing( produce(displayContact, (c) => { c.last = todayISO(); }) ); }; return ( <li ref={ref} className={`contact-item card paper block split-v ${ editing ? "isEditing" : "notEditing" }`} onClick={editing ? undefined : toggleEditing} onKeyUp={(evt) => { if (!ref.current || evt.target !== ref.current) return; if (evt.key === "Return" && !editing) { saveChanges(); } else if (evt.key === "Escape" && editing) { toggleEditing(); } }} tabIndex={0} > <div className="editArea split-h"> <div className="left contact-single-items"> {CONTACT_FIELDS.scalar.map((args) => ( <ScalarField key={args.key} contact={displayContact} editing={!!editing} label={args.key} multiline={args.multiline} placeholder={args.placeholder} value={(displayContact[args.key] as string) || ""} save={saveChanges} setPending={setEditing} /> ))} {showAdvanced && CONTACT_FIELDS.advanced.map((args) => ( <ScalarField key={args.key} contact={displayContact} editing={!!editing} label={args.key} multiline={args.multiline} placeholder={args.placeholder} value={(displayContact[args.key] as string) || ""} save={saveChanges} setPending={setEditing} /> ))} </div> <div className="right contact-multi-items"> {showAdvanced && CONTACT_FIELDS.select.map((args) => ( <SelectTag key={args.key} contact={displayContact} label={args.key} editing={!!editing} setPending={setEditing} save={saveChanges} value={displayContact[args.key] as Array<string>} options={tags} /> ))} {CONTACT_FIELDS.vector.map((args) => ( <VectorField key={args.key} contact={displayContact} editing={!!editing} label={args.key} multiline={args.multiline} placeholder={args.placeholder} values={ Array.isArray(displayContact[args.key]) ? (displayContact[args.key] as string[]) : [] } save={saveChanges} setPending={setEditing} /> ))} </div> </div> {editing ? ( <div className="buttonFooter split-h frost"> <div className="left buttonArea"> {!editing.__local && ( <button className="contact-button" onClick={deleteMe}> delete </button> )} </div> <div className="left buttonArea"> { <button className="contact-button" onClick={toggleAdvanced}> {!showAdvanced ? "more" : "less"} </button> } </div> <div className="right buttonArea"> <button className="contact-button" onClick={fillToday}> today! </button> <button className="contact-button" onClick={editing.__local ? deleteMe : toggleEditing} > cancel </button> <button className="contact-button" onClick={saveChanges}> save </button> </div> </div> ) : null} </li> ); } export function ContactList({ contacts }: { contacts: Contact[] }) { return ( <ul className="contact-list"> {contacts.map((c, i) => ( <ContactItem contact={c} key={c.key || i} /> ))} </ul> ); }
<filename>find.rb # encoding: utf-8 puts `find /`
<reponame>orhoj/concordium-desktop-wallet import React, { useState, useMemo } from 'react'; import { useLocation } from 'react-router-dom'; import PlusIcon from '@resources/svg/plus.svg'; import PickRecipient from '../PickRecipient'; import PickAmount from '../PickAmount'; import { getTransactionKindCost } from '~/utils/transactionCosts'; import { TransactionKindId, AddressBookEntry, Fraction } from '~/utils/types'; import locations from '~/constants/transferLocations.json'; import { TransferState } from '~/utils/transactionTypes'; import TransferView from '../TransferView'; import UpsertAddress from '../../UpsertAddress'; import { useAsyncMemo } from '~/utils/hooks'; import { nodeSupportsMemo } from '~/node/nodeHelpers'; import styles from './ExternalTransfer.module.scss'; interface Props { toConfirmTransfer( amount: string, recipient: AddressBookEntry, memo?: string ): void; exitFunction?(): void; exchangeRate?: Fraction; amountHeader: string; senderAddress: string; transactionKind: TransactionKindId; } /** * Controls the flow of creating an external transfer. */ export default function ExternalTransfer({ toConfirmTransfer, amountHeader, exchangeRate, exitFunction, senderAddress, transactionKind, }: Props) { const location = useLocation<TransferState>(); const allowMemo = useAsyncMemo(nodeSupportsMemo); const [subLocation, setSubLocation] = useState<string>( locations.pickAmount ); const [amount, setAmount] = useState<string>( location?.state?.amount ?? '0.00' ); // This is a string, to allows user input in GTU const [recipient, setRecipient] = useState<AddressBookEntry | undefined>( location?.state?.recipient ); const [memo, setMemo] = useState<string | undefined>(location?.state?.memo); const [shownMemoWarning, setShownMemoWarning] = useState<boolean>( !!location?.state?.memo ); const estimatedFee = useMemo( () => exchangeRate && getTransactionKindCost(transactionKind, exchangeRate, 1, memo), [exchangeRate, memo, transactionKind] ); function selectRecipient(entry: AddressBookEntry) { setRecipient(entry); setSubLocation(locations.pickAmount); } return ( <TransferView showBack={subLocation === locations.pickRecipient} exitOnClick={exitFunction} backOnClick={() => setSubLocation(locations.pickAmount)} > {subLocation === locations.pickAmount && ( <PickAmount recipient={recipient} header={amountHeader} defaultAmount={amount} memo={ allowMemo ? { defaultMemo: memo, setMemo, shownMemoWarning, setShownMemoWarning, } : undefined } estimatedFee={estimatedFee} transactionKind={transactionKind} toPickRecipient={( currentAmount: string, currentMemo?: string ) => { setMemo(currentMemo); setAmount(currentAmount); setSubLocation(locations.pickRecipient); }} toConfirmTransfer={( currentAmount: string, currentMemo?: string ) => { if (!recipient) { throw new Error('Unexpected missing recipient'); } toConfirmTransfer( currentAmount, recipient, currentMemo ); }} /> )} {subLocation === locations.pickRecipient && ( <> <div className="mH30"> <h3 className="textCenter">Select recipient</h3> <PickRecipient pickRecipient={selectRecipient} senderAddress={senderAddress} /> </div> <UpsertAddress clear className={styles.addRecipient} onSubmit={selectRecipient} > <PlusIcon /> </UpsertAddress> </> )} </TransferView> ); }
def count_different_bits(x, y): count = 0 for i in range(30, -1, -1): mask = 1 << i digitX = x & mask digitY = y & mask if digitX != digitY: count += 1 return count
<reponame>kuun/shipwheel<filename>src/main/java/org/ship/core/dao/engine/EngineDao.java<gh_stars>1-10 package org.ship.core.dao.engine; import org.apache.ibatis.annotations.Select; import org.ship.core.vo.engine.Engine; import java.util.Collection; /** * Created by wx on 2017/5/8. */ public interface EngineDao { @Select("select * from ship_engine") Collection<Engine> getNodes(); }
<gh_stars>0 package com.justinbenz.anytimefitnessbe.repositories; import com.justinbenz.anytimefitnessbe.models.UserRoles; import org.springframework.data.repository.CrudRepository; public interface UserRoleRepository extends CrudRepository<UserRoles, Long> { }
/** * */ package jframe.aliyun.service; /** * @author dzh * @date Feb 22, 2016 12:22:13 PM * @since 1.0 */ public interface MemcacheService { }
<reponame>codefinity/micro-continuum //Marked for deletion //Dependency Injection used Instead /*package com.codefinity.microcontinuum.identityaccess.application; import org.springframework.beans.BeansException; import org.springframework.context.ApplicationContext; import org.springframework.context.ApplicationContextAware; public class ApplicationServiceRegistry implements ApplicationContextAware { private static ApplicationContext applicationContext; public static AccessApplicationService accessApplicationService() { return (AccessApplicationService) applicationContext.getBean("accessApplicationService"); } public static IdentityApplicationService identityApplicationService() { return (IdentityApplicationService) applicationContext.getBean("identityApplicationService"); } public static NotificationApplicationService notificationApplicationService() { return (NotificationApplicationService) applicationContext.getBean("notificationApplicationService"); } @Override public synchronized void setApplicationContext( ApplicationContext anApplicationContext) throws BeansException { if (ApplicationServiceRegistry.applicationContext == null) { ApplicationServiceRegistry.applicationContext = anApplicationContext; } } } */
#!/bin/sh primary_suffix=psw backup_suffix=backup max_age=15768000 add_details="" cmdargs="$0 [primary cert key] [backup cert key] [sub]?" example="$0 server.key server-backup.key sub" if [ -z "$1" ] && [ -z "$2" ]; then echo "$cmdargs" echo "Example: $example" exit 2; fi if [ "$3" = "sub" ]; then add_details="; includeSubdomains" fi primary=`openssl rsa -in $1 -outform der -pubout 2> /dev/null | openssl dgst -sha256 -binary 2> /dev/null | base64` backup=`openssl rsa -in $2 -outform der -pubout 2> /dev/null | openssl dgst -sha256 -binary 2> /dev/null | base64` echo "Public-Key-Pins 'pin-sha256=\"$primary\"; pin-sha256=\"$backup\"; max-age=$max_age$add_details'"
#!/bin/bash # Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"). # You may not use this file except in compliance with the License. # A copy of the License is located at # # http://aws.amazon.com/apache2.0/ # # or in the "LICENSE.txt" file accompanying this file. # This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, express or implied. # See the License for the specific language governing permissions and limitations under the License. source /opt/parallelcluster/pyenv/versions/cookbook_virtualenv/bin/activate && supervisorctl stop clustermgtd
#!/bin/bash try() { expected="$1" input="$2" ./zcc "$input" > tmp.s gcc -o tmp tmp.s ./tmp actual="$?" if [ "$actual" = "$expected" ]; then echo "$input => $actual" else echo "$expected expected, but got $actual" exit 1 fi } try 0 "return 0;" try 123 "return 123;" try 3 "return 1+2;" try 0 "return 11 + 22 - 33;" try 7 "return 1+2*3;" try 1 "return 4/2-1;" try 4 "return 8/4+2*1;" try 3 "return 3*(2-1);" try 6 "return (8+4)/2;" try 1 "return -1+2;" try 1 "return -1*+1/-1;" try 1 "return 0==0;" try 0 "return 0==1;" try 0 "return 0!=0;" try 1 "return 0!=1;" try 1 "return 0<=0;" try 0 "return 1<=0;" try 1 "return 0>=0;" try 0 "return 0>=1;" try 0 "return 0<0;" try 1 "return 0<1;" try 0 "return 0>0;" try 1 "return 1>0;" try 1 "return a=1;" try 3 "a=1; b=2; return a+b;" try 6 "a=1; b=2*3+4; return a+b/2;" echo OK
package br.com.matheuslino.pacman; import br.com.matheuslino.pacman.game.LabyrinthObjectVisitor; public class Checkpoint extends LabyrinthObject { private boolean conquered; private boolean special; Checkpoint(int x, int y) { super(x, y); this.conquered = false; // Estado do checkpoint (conquistado ou nao) this.special = false; // Tipo do checkpoint (especial ou comum) } @Override public void accept(LabyrinthObjectVisitor visitor) { visitor.visit(this); } public boolean isSpecial() { return special; } public void setSpecial(boolean special) { this.special = special; } public boolean isConquered() { return conquered; } public void setConquered(boolean conquered) { this.conquered = conquered; } // Mtodo com visibilidade de pacote (Default) void conquer() { this.setConquered(true); } }
<reponame>MinhAnhLe94/jsdocx import { assert } from 'chai' import * as jsdocx from '../dist/jsdocx' describe('#Section', () => { describe('#src', () => { it('should be equal to "w:sectPr"', () => { let s = new jsdocx.Section() assert.equal(s.hasOwnProperty('src'), true) assert.deepEqual(s.src, { 'w:sectPr': {} }) }) }) describe('#contentHook', () => { it('should be equal to "w:sectPr" after creation', () => { let s = new jsdocx.Section() assert.equal(s.hasOwnProperty('contentHook'), true) assert.equal(s.contentHook, '["w:sectPr"]') }) }) describe('#addParagraph', () => { it('should add a valid format', () => { let s = new jsdocx.Section() let p = s.addParagraph() assert.equal(p instanceof jsdocx.Paragraph, true) }) }) describe('#addTable', () => { it('should add a valid table', () => { let s = new jsdocx.Section() let t = s.addTable() assert.equal(t instanceof jsdocx.Table, true) }) }) describe('#toJson', () => { it('should render "w:sectPr" normally if no paragraphs are added', () => { let s = new jsdocx.Section() s.addCols().setNum(2) assert.deepEqual(s.toJson(), { 'w:sectPr': { '#': [{ 'w:cols': { '@w:num': 2 } }] } }) }) it('should insert "w:sectPr" tag inside an injected paragraph\'s props', () => { let s = new jsdocx.Section() s.addCols().setNum(2) let p1 = s.addParagraph() let p2 = s.addParagraph() assert.deepEqual(s.toJson(), [ { 'w:p': { } }, { 'w:p': { } }, { 'w:p': { '#': [{ 'w:pPr': { '#': [ { 'w:sectPr': { '#': [{ 'w:cols': { '@w:num': 2 } }] } } ] } }] } } ]) }) }) describe('#toXml', () => { it('should render simple subtree correctly', () => { let s = new jsdocx.Section() s.addParagraph().addRun().addText('Hello World!') s.addParagraph() assert.equal(s.toXml(), '<w:p><w:r><w:t xml:space="preserve">Hello World!</w:t></w:r></w:p><w:p></w:p><w:p><w:pPr><w:sectPr></w:sectPr></w:pPr></w:p>') }) it('should respect previous paragraph\'s format', () => { let s = new jsdocx.Section() s.addCols().setNum(2) let p = s.addParagraph() p.addRun().addText('Hello World!') // Format will be injected before any other content. p.addFormat().addTabs().addTab() assert.equal(s.toXml(), '<w:p><w:pPr><w:tabs><w:tab/></w:tabs></w:pPr><w:r><w:t xml:space="preserve">Hello World!</w:t></w:r></w:p><w:p><w:pPr><w:sectPr><w:cols w:num="2"/></w:sectPr></w:pPr></w:p>') }) }) })
<gh_stars>1-10 var list = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]; var evens = list.filter(function (num) { return num % 2 === 0; }); console.log('Original:', list); console.log('Even numbers:', evens);
<reponame>hueyjj/Skadoosh /** @module */ import { API_URL, } from "../utils/authUtils"; import { FETCHING_LOGIN, LOGIN_SUCCESSFUL, LOGIN_FAIL, FETCHING_LOGOUT, LOGOUT_SUCCESSFUL, LOGOUT_FAIL, FETCHING_SIGNUP, SIGNUP_SUCCESSFUL, SIGNUP_FAIL, } from "../constants/AuthActions"; export const fetchingLogin = ({ type: FETCHING_LOGIN, }); export const loginSuccessful = ({ type: LOGIN_SUCCESSFUL, }); export const loginFail = ({ type: LOGIN_FAIL, }); /** * This callback receives an error or null after logging in * @callback loginCallback * @param {Error} error Holds server response status and message */ /** * Logs in the user by sending a POST request with the user's email and password to the server * @function * @param {object} credentials User credentials * @param {string} crendentials.email User email * @param {string} crendentials.password User password * @param {loginCallback} callback - A post callback after attempting to log in */ export const fetchLogin = ({ email, password }, callback) => (dispatch, getState) => { dispatch(fetchingLogin); let url = process.env.REACT_APP_API_URL + "api/login"; let form = new FormData(); form.append("email", email); form.append("password", password); fetch(url, { method: "POST", mode: "cors", credentials: "include", headers: { }, body: form, }) .then(res => { return res.json() .then((data) => ({ status: res.status, body: data, })) }) .then((response) => { let error; if (response.status === 200) { error = null; dispatch(loginSuccessful); } else { error = Error("Login failed - " + response.status + " status code. Server message: " + response.body.message); dispatch(loginFail); } callback(error); }) .catch((error) => { dispatch(loginFail); console.log(error) }); }; export const fetchingLogout = ({ type: FETCHING_LOGOUT, }); export const logoutSuccessful = ({ type: LOGOUT_SUCCESSFUL, }); export const logoutFail = ({ type: LOGOUT_FAIL, }); /** * This callback receives an error or null after logging out * @callback logoutCallback * @param {Error} error Holds server response status and message */ /** * Logs out the user by sending a POST request with the user's session id (stored in cookie) * @function * @param {logoutCallback} callback - A post callback after attempting to log out */ export const fetchLogout = (callback) => (dispatch, getState) => { dispatch(fetchingLogout); let url = process.env.REACT_APP_API_URL + "api/logout"; fetch(url, { method: "POST", mode: "cors", credentials: "include", headers: { }, }) .then(res => { return res.json() .then((data) => ({ status: res.status, body: data, })) }) .then((response) => { let error; if (response.status === 200) { error = null; dispatch(logoutSuccessful); } else { error = Error("Logout failed - " + response.status + " status code. Server message: " + response.body.message); dispatch(logoutFail); } callback(error); }) .catch((error) => { dispatch(logoutFail); console.log(error); }) }; export const fetchingSignup = ({ type: FETCHING_SIGNUP, }); export const signupSuccessful = ({ type: SIGNUP_SUCCESSFUL, }); export const signupFail = ({ type: SIGNUP_FAIL, }); /** * This callback receives an error or null after signing up * @callback signupCallback * @param {Error} error Holds server response status and message */ /** * Creates an account for the user by sending a POST request with the user's email and password * @function * @param {signupCallback} callback - A post callback after attempting to sign up */ export const fetchSignup = ({ email, password, confirmPassword, }, callback) => async (dispatch, getState) => { dispatch(fetchingSignup); let url = process.env.REACT_APP_API_URL + "api/signup"; let form = new FormData(); form.append("email", email); form.append("password", password); form.append("confirm_password", confirmPassword); fetch(url, { method: "POST", mode: "cors", credentials: "include", headers: { }, body: form, }) .then(res => { return res.json() .then((data) => ({ status: res.status, body: data, })) }) .then((response) => { let error; if (response.status === 200) { error = null; dispatch(signupSuccessful); } else { error = Error("Signup failed: " + response.status + " status code. Server message: " + response.body.message); dispatch(signupFail); } callback(error); }) .catch((error) => { dispatch(signupFail); console.log(error) }); }
<filename>reportsgtm-v1/property_test.go<gh_stars>0 package reportsgtm import ( "testing" "gopkg.in/h2non/gock.v1" "github.com/stretchr/testify/assert" ) // // Important note: The test cases enclosed piggyback on the objects created in the configgtm test cases // // TODO: Add tests for Opt args // // var GtmTestProperty = "testproperty" // Verify GetIpStatusPerProperty. Property and domain names hardcoded. Should pass, e.g. no API errors and property returned // Depends on CreateProperty func TestGetIpStatusProperty(t *testing.T) { defer gock.Off() mock := gock.New("https://akaa-baseurl-xxxxxxxxxxx-xxxxxxxxxxxxx.luna.akamaiapis.net/gtm-api/v1/reports/ip-availability/domains/gtmtest.akadns.net/properties/" + GtmTestProperty) mock. Get("/gtm-api/v1/reports/ip-availability/domains/gtmtest.akadns.net/properties/"+GtmTestProperty). HeaderPresent("Authorization"). Reply(200). SetHeader("Content-Type", "application/json"). BodyString(`{ "metadata": { "domain": "gtmtest.akadns.net", "property": "testproperty", "start" : "2017-02-23T21:00:00Z", "end" : "2017-03-23T22:00:00Z", "uri": "https://akaa-xxxxxxxxxxxxxxxx-xxxxxxxxxxxxxxxx.luna.akamaiapis.net/gtm-api/v1/reports/ip-availability/domains/gtmtest.akadns.net/properties/testproperty" }, "dataRows": [ { "timestamp": "2017-02-23T21:42:35Z", "cutOff": 112.5, "datacenters": [ { "datacenterId": 3132, "nickname": "Winterfell", "trafficTargetName": "Winterfell - 1.2.3.4", "IPs": [ { "ip": "1.2.3.4", "score": 75.0, "handedOut": true, "alive": true } ] }, { "datacenterId": 3133, "nickname": "Braavos", "trafficTargetName": "Braavos - 1.2.3.5", "IPs": [ { "ip": "1.2.3.5", "score": 85.0, "handedOut": true, "alive": true } ] } ] }, { "timestamp": "2017-03-23T21:42:35Z", "cutOff": 112.5, "datacenters": [ { "datacenterId": 3132, "nickname": "Winterfell", "trafficTargetName": "Winterfell - 1.2.3.4", "IPs": [ { "ip": "1.2.3.4", "score": 115.0, "handedOut": false, "alive": false } ] }, { "datacenterId": 3133, "nickname": "Braavos", "trafficTargetName": "Braavos - 1.2.3.5", "IPs": [ { "ip": "1.2.3.5", "score": 75.0, "handedOut": true, "alive": true } ] } ] } ], "links": [ { "rel": "self", "href": "https://akaa-xxxxxxxxxxxxxxxx-xxxxxxxxxxxxxxxx.luna.akamaiapis.net/gtm-api/v1/reports/ip-availability/domains/gtmtest.akadns.net/properties/testproperty" } ] }`) Init(config) optArgs := make(map[string]string) optArgs["start"] = "2017-02-23T21:00:00Z" optArgs["end"] = "2017-03-23T22:00:00Z" testPropertyIpStatus, err := GetIpStatusPerProperty(gtmTestDomain, GtmTestProperty, optArgs) assert.NoError(t, err) assert.Equal(t, testPropertyIpStatus.DataRows[0].Datacenters[0].DatacenterId, 3132) assert.Equal(t, testPropertyIpStatus.Metadata.Domain, gtmTestDomain) assert.Equal(t, testPropertyIpStatus.Metadata.Property, GtmTestProperty) } /* // TestGetIpStatus with mostRecent flag func TestGetIpStatusPropertyRecent(t *testing.T) { config, _ := edgegrid.InitEdgeRc("", "default") Init(config) // add mock ... optArgs := make(map[string]string) optArgs["mostRecent"] = "true" domDCs, err := configgtm.ListDatacenters(GtmObjectTestDomain) assert.NoError(t, err, "Failure retrieving DCs") if len(domDCs) > 0 { optArgs["datacenterId"] = strconv.Itoa(domDCs[0].DatacenterId) fmt.Println("dcid: "+optArgs["datacenterId"]) } testPropertyIpStatus, err := GetIpStatusPerProperty(GtmObjectTestDomain, GtmTestProperty, optArgs) assert.NoError(t, err) json, err := json.MarshalIndent(testPropertyIpStatus, "", " ") if err == nil { fmt.Println(string(json)) } else { t.Fatal("PropertyIP Status retrival failed. " + err.Error()) } } */ // Verify GetTrafficPerProperty. Domain name and property name hardcoded. func TestGetTrafficPerProperty(t *testing.T) { defer gock.Off() mock := gock.New("https://akaa-baseurl-xxxxxxxxxxx-xxxxxxxxxxxxx.luna.akamaiapis.net/gtm-api/v1/reports/traffic/domains/gtmtest.akadns.net/properties/" + GtmTestProperty) mock. Get("/gtm-api/v1/reports/traffic/domains/gtmtest.akadns.net/properties/"+GtmTestProperty). HeaderPresent("Authorization"). Reply(200). SetHeader("Content-Type", "application/json"). BodyString(`{ "metadata": { "domain": "gtmtest.akadns.net", "property": "testproperty", "start": "2016-11-24T01:40:00Z", "end": "2016-11-24T01:50:00Z", "interval": "FIVE_MINUTE", "uri": "https://akaa-xxxxxxxxxxxxxxxx-xxxxxxxxxxxxxxxx.luna.akamaiapis.net/gtm-api/v1/reports/traffic/domains/gtmtest.akadns.net/properties/testproperty?start=2016-11-23T00:00:00Z&2016-11-24T01:50:00Z" }, "dataRows": [ { "timestamp": "2016-11-24T01:40:00Z", "datacenters": [ { "datacenterId": 3130, "nickname": "Winterfell", "trafficTargetName": "Winterfell - 1.2.3.4", "requests": 34, "status": "1" } ] }, { "timestamp": "2016-11-24T01:45:00Z", "datacenters": [ { "datacenterId": 3130, "nickname": "Winterfell", "trafficTargetName": "Winterfell - 1.2.3.4", "requests": 45, "status": "1" } ] } ], "links": [ { "rel": "self", "href": "https://akaa-xxxxxxxxxxxxxxxx-xxxxxxxxxxxxxxxx.luna.akamaiapis.net/gtm-api/v1/reports/traffic/domains/gtmtest.akadns.net/properties/testproperty?start=2016-11-23T00:00:00Z&2016-11-24T01:50:00Z" } ] }`) Init(config) optArgs := make(map[string]string) optArgs["start"] = "2016-11-24T01:40:00Z" optArgs["end"] = "2016-11-24T01:50:00Z" testPropertyTraffic, err := GetTrafficPerProperty(gtmTestDomain, GtmTestProperty, optArgs) assert.NoError(t, err) assert.Equal(t, testPropertyTraffic.DataRows[0].Datacenters[0].DatacenterId, 3130) assert.Equal(t, testPropertyTraffic.Metadata.Domain, gtmTestDomain) assert.Equal(t, testPropertyTraffic.Metadata.Property, GtmTestProperty) } // Verify failed case for GetProperty. Should pass, e.g. no API errors and domain not found func TestGetBadTrafficPerProperty(t *testing.T) { defer gock.Off() mock := gock.New("https://akaa-baseurl-xxxxxxxxxxx-xxxxxxxxxxxxx.luna.akamaiapis.net/gtm-api/v1/reports/traffic/domains/gtmtest.akadns.net/properties/badproperty") mock. Get("/gtm-api/v1/reports/traffic/domains/gtmtest.akadns.net/properties/badproperty"). HeaderPresent("Authorization"). Reply(404). SetHeader("Content-Type", "application/json"). BodyString(`{ }`) Init(config) optArgs := make(map[string]string) _, err := GetTrafficPerProperty(gtmTestDomain, "badproperty", optArgs) assert.Error(t, err) }
import requests from bs4 import BeautifulSoup # Specify a list of keywords keywords = ["web crawler", "python", "programming"] # Loop through the list of keywords for keyword in keywords: page = requests.get(f"https://www.google.com/search?q={keyword}") # Parse the response using BeautifulSoup soup = BeautifulSoup(page.content, 'html.parser') # Search for the given keyword result_div = soup.find_all('div', attrs = {'class': 'g'}) # Print the results for item in result_div: title = item.find("h3").text link = item.find("a").get("href") print(f"Title: {title}") print(f"Link: {link}") print()
<reponame>datsukan/skilloupe import { OK } from "../util"; const state = { const: null }; const getters = { db: state => (state.const ? state.const["DB"] : []), project: state => (state.const ? state.const["PROJECT"] : []), skill: state => (state.const ? state.const["SKILL"] : []), all: state => (state.const ? state.const : []) }; const mutations = { setConst(state, constants) { state.const = constants; } }; const actions = { // ๅฎšๆ•ฐๅ–ๅพ— async ref(context) { const response = await axios.get("/api/const"); const constants = response.data || null; if (response.status === OK) { context.commit("setConst", constants); return true; } context.commit("error/setCode", response.status, { root: true }); return false; } }; export default { namespaced: true, state, getters, mutations, actions };
'use strict'; module.exports = { db: 'mongodb://localhost/faculty-recruitment-system-dev', app: { title: 'Faculty Recruitment System - Development Environment', description: 'Application for Candidates, Faculty and Staff of DUSON', keywords: 'Duke, Duke University, FRS, Faculty Recruitment' }, saml: { path: '/auth/saml/callback', entryPoint: 'https://duson.onelogin.com/trust/saml2/http-post/sso/503330', callbackURL: 'http://localhost:3000/auth/saml/callback', issuer: 'frs-passport-saml' } };
#!/bin/bash set -e mkdir -p "$HOME"/.npm-global npm config set prefix "\${HOME}/.npm-global" echo "export NO_SUDO=true" >> "$HOME"/.zprofile-local
from django.shortcuts import render from rest_framework import viewsets,status from .serializers import * from rest_framework.response import Response from rest_framework.decorators import api_view, action from .models import * import random class QuestionViewSet(viewsets.ModelViewSet): queryset = Question.objects.all() serializer_class = QuestionSerializer @action(detail=False, methods=['GET']) def getQuestion(self, request, *arg, **kwargs): q_id = int(request.GET['q_id']) question = self.get_queryset() #print(question.count()) #print(question[q_id].answer) question_list = [] question_list.append(Question.objects.get(id=q_id).answer) while(len(question_list)<4): index = random.randrange(0,question.count()) if question[index].id != q_id and (question[index].answer not in question_list): question_list.append(question[index].answer) random.shuffle(question_list) return Response({ 'question1': question_list[0], 'question2': question_list[1], 'question3': question_list[2], 'question4': question_list[3], }, status=200) @action(detail=False, methods=['GET']) def getAnswer(self, request, *arg, **kwargs): question_id = request.GET['id'] user_answer = request.GET['user_answer'] print("id ", question_id) print("๊ณ ๋ฅธ ๋‹ต ", user_answer) answers = str(Question.objects.get(id=question_id)) print("๋‹ต ", answers) if(str(user_answer) == str(answers)): return Response({"์ •๋‹ต"}, status=200) else: return Response({"์˜ค๋‹ต"}, status=401)
#!/bin/bash # Copyright 2019 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== set -e set -x source tensorflow/tools/ci_build/release/common.sh install_ubuntu_16_python_pip_deps python3.9 # Update bazel install_bazelisk # Export required variables for running pip.sh export OS_TYPE="UBUNTU" export CONTAINER_TYPE="CPU" export TF_PYTHON_VERSION='python3.9' # Run configure. export PYTHON_BIN_PATH=$(which ${TF_PYTHON_VERSION}) yes "" | "$PYTHON_BIN_PATH" configure.py # Get the default test targets for bazel. source tensorflow/tools/ci_build/build_scripts/DEFAULT_TEST_TARGETS.sh # Export optional variables for running pip.sh export TF_BUILD_FLAGS="--config=release_cpu_linux" export TF_TEST_FLAGS="--define=no_tensorflow_py_deps=true --test_lang_filters=py --test_output=errors --verbose_failures=true --keep_going --test_env=TF2_BEHAVIOR=1" export TF_TEST_TARGETS="${DEFAULT_BAZEL_TARGETS} -//tensorflow/lite/... " export TF_PIP_TESTS="test_pip_virtualenv_non_clean test_pip_virtualenv_clean" export TF_TEST_FILTER_TAGS='-no_oss,-oss_serial,-no_oss_py39,-v1only' #export IS_NIGHTLY=0 # Not nightly; uncomment if building from tf repo. export TF_PROJECT_NAME="tensorflow_cpu" export TF_PIP_TEST_ROOT="pip_test" ./tensorflow/tools/ci_build/builds/pip_new.sh
import { diffWordsWithSpace } from 'diff' import objectInspect from 'object-inspect' import RunnableStats from './runnable' import { Argument } from '../types' import { pad, color, colorLines } from '../utils' import { AssertionError } from 'assert' const maxStringLength = 2048 export interface Test { type: 'test:start' | 'test:pass' | 'test:fail' | 'test:retry' | 'test:pending' | 'test:end' title: string parent: string fullTitle: string pending: boolean file?: string duration?: number cid: string specs: string[] uid: string pendingReason?: string error?: Error errors?: Error[] retries?: number argument?: string | Argument } interface Output { command: string params: any method: 'PUT' | 'POST' | 'GET' | 'DELETE' endpoint: string body: {} result: { value: string | null } sessionId: string cid: string type: 'command' | 'result' } /** * TestStats class * captures data on a test. */ export default class TestStats extends RunnableStats { uid: string cid: string title: string currentTest?: string fullTitle: string output: Output[] argument?: string | Argument retries?: number parent: string /** * initial test state is pending * the state can change to the following: passed, skipped, failed */ state: 'pending' | 'passed' | 'skipped' | 'failed' pendingReason?: string errors?: Error[] error?: Error constructor(test: Test) { super('test') this.uid = RunnableStats.getIdentifier(test) this.cid = test.cid this.title = test.title this.fullTitle = test.fullTitle this.output = [] this.argument = test.argument this.retries = test.retries this.parent= test.parent /** * initial test state is pending * the state can change to the following: passed, skipped, failed */ this.state = 'pending' } pass() { this.complete() this.state = 'passed' } skip(reason: string) { this.pendingReason = reason this.state = 'skipped' } fail(errors?: Error[]) { this.complete() this.state = 'failed' /** * Iterates through all errors to check if they're a type of 'AssertionError', * and formats it if so. Otherwise, just leaves error as is */ const formattedErrors = errors?.map((err: Error) => ( /** * only format if error object has either an "expected" or "actual" property set */ ((err as AssertionError).expected || (err as AssertionError).actual) && /** * and if they aren't already formated, e.g. in Jasmine */ (err.message && !err.message.includes('Expected: ') && !err.message.includes('Received: ')) ? this._stringifyDiffObjs(err as AssertionError) : err )) this.errors = formattedErrors if (formattedErrors && formattedErrors.length) { this.error = formattedErrors[0] } } private _stringifyDiffObjs (err: AssertionError) { const inspectOpts = { maxStringLength } const expected = objectInspect(err.expected, inspectOpts) const actual = objectInspect(err.actual, inspectOpts) let msg = diffWordsWithSpace(actual, expected) .map((str) => ( str.added ? colorLines('diff added inline', str.value) : str.removed ? colorLines('diff removed inline', str.value) : str.value )) .join('') // linenos const lines = msg.split('\n') if (lines.length > 4) { const width = String(lines.length).length msg = lines .map(function(str: string, i: number) { return pad(String(++i), width) + ' |' + ' ' + str }) .join('\n') } // legend msg = `\n${color('diff removed inline', 'actual')} ${color('diff added inline', 'expected')}\n\n${msg}\n` // indent msg = msg.replace(/^/gm, ' ') const newError = new Error(err.message + msg) newError.stack = err.stack return newError } }
<?xml version="1.0"?> <library> <book> <title>Harry Potter and the Philosopher's Stone</title> <price>9.99</price> </book> <book> <title>The Lord of the Rings</title> <price>12.99</price> </book> <book> <title>The Catcher in the Rye</title> <price>7.99</price> </book> </library>
package roundtrip import ( "net/http" ) func NewJunoRoundTripper(p ProxyRoundTripper) ProxyRoundTripper { return &junoRoundTripper{ p: p, d: http.DefaultTransport, } } type junoRoundTripper struct { p ProxyRoundTripper d http.RoundTripper } func (d *junoRoundTripper) RoundTrip(r *http.Request) (*http.Response, error) { return d.p.RoundTrip(r) } func (d *junoRoundTripper) CancelRequest(r *http.Request) { d.p.CancelRequest(r) } type FactoryImpl struct { Template *http.Transport } func (t *FactoryImpl) New(expectedServerName string) ProxyRoundTripper { newTransport := &http.Transport{ Dial: t.Template.Dial, DisableKeepAlives: t.Template.DisableKeepAlives, MaxIdleConns: t.Template.MaxIdleConns, IdleConnTimeout: t.Template.IdleConnTimeout, MaxIdleConnsPerHost: t.Template.MaxIdleConnsPerHost, DisableCompression: t.Template.DisableCompression, } return NewJunoRoundTripper(newTransport) }
<gh_stars>0 package com.mh.controltool2.exceptions.invoke; public class ParamDataIsEmptyException extends RuntimeException { }
<reponame>SAP/hybris-iot-plat<filename>client/test/mqttPublishCommands.js 'use strict' const options = require('../options.js'), mqtt = require('mqtt'), client = mqtt.connect(options.mqtt_broker, {username:options.mqtt_username, password:options.mqtt_password}), platjson = require('../platjson'), tenant = 'tenant', base = 'base', interval = 5000, minPlat = 100, maxPlat = 200, threshold = 50, brightness = 50, pixels = 24, plat = '100', //can be random, too newPlat = '100', //can be random, too mode = '4' //can be random, too, but excludes 10 (EEPROM_UPDATE) ; var getRandomRGB = function() { var r = Math.round(Math.random()*255); var g = Math.round(Math.random()*255); var b = Math.round(Math.random()*255); return [r,g,b]; } var getPlat = function() { if (plat == 'random') { var p; while (p == undefined || p < minPlat || p > maxPlat) { p = Math.round(Math.random()*maxPlat); } return p; } else { return parseInt(plat, 10); } } var testPlatformColor = function(platID) { var rgb = getRandomRGB(); return platjson.platformColor(platID, rgb[0], rgb[1], rgb[2]); }; var testPlatformSequenceColor = function(platID) { var rgb = getRandomRGB(); return platjson.platformSequenceColor(platID, rgb[0], rgb[1], rgb[2]); }; var testPlatformFlash = function(platID) { var rgb = getRandomRGB(); return platjson.platformFlash(platID, rgb[0], rgb[1], rgb[2], 100); //100*10ms duration }; var testPlatformFadePixels = function(platID) { var rgb = getRandomRGB(); return platjson.platformFadePixels(platID, rgb[0], rgb[1], rgb[2], 10); //100*10ms duration }; var testRotateColor = function(platID) { var rgb = getRandomRGB(); return platjson.rotateColor(platID, rgb[0], rgb[1], rgb[2], 15); //100ms delay between }; var testPlatformSequenceCleanupColor = function(platID) { var rgb = getRandomRGB(); return platjson.platformSequenceCleanupColor(platID, rgb[0], rgb[1], rgb[2]); }; var testPlatformIndividualColor = function(platID) { var triplets = []; for (var i = 0; i < 24; i++) { var r = Math.round(Math.random()*255); var g = Math.round(Math.random()*255); var b = Math.round(Math.random()*255); triplets[triplets.length] = [r,g,b]; } return platjson.platformIndividualColor(platID, triplets); }; var testPlatformSensorReading = function(platID) { return platjson.platformSensorReading(platID); }; var testUpdateMeta = function(platID) { var id = (newPlat == 'random') ? Math.round(Math.random()*255) : parseInt(newPlat, 10); var b = (brightness == 'random') ? Math.round(Math.random()*255) : parseInt(brightness, 10); return platjson.updateMeta(platID, id, threshold, b, pixels); }; var testRequestMeta = function(platID) { return platjson.requestMeta(platID); }; var getRandomPayloadWithoutEEPROM = function(platID){ var randomMode; while (randomMode == undefined) { var mode = Math.round(Math.random()*255); if (mode != 10 && modes[mode+''] != undefined) randomMode = mode + ''; } return modes[randomMode](platID); } const modes = { '0' : testPlatformColor, '1' : testPlatformSequenceColor, '2' : testPlatformSequenceCleanupColor, '3' : testPlatformIndividualColor, '4' : testPlatformSensorReading, '5' : testPlatformFlash, '6' : testRotateColor, '7' : testPlatformFadePixels, '10' : testUpdateMeta, //DEGRADES EEPROM '11' : testRequestMeta }; client.on('connect', function () { console.log("(re)connected to " + options.mqtt_broker); setInterval(function() { var platID = getPlat(); var topic = options.mqtt_username + '/' + tenant + '/' + base + '/' + platID + '/command'; var payload = JSON.stringify((mode == 'random') ? getRandomPayloadWithoutEEPROM(platID) : modes[mode](platID)); console.log(topic + ' > ' + payload); client.publish(topic, payload); }, interval); });
<filename>data/src/test/java/org/brunel/data/diagram/TestHierarchical.java /* * Copyright (c) 2015 IBM Corporation and others. * * Licensed under the Apache License, Version 2.0 (the "License"); * You may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.brunel.data.diagram; import org.brunel.data.Data; import org.brunel.data.Dataset; import org.brunel.data.modify.Filter; import org.brunel.data.io.CSV; import org.junit.Assert; import org.junit.Test; import static org.junit.Assert.assertEquals; public class TestHierarchical { private static final String csv = Data.join(new String[]{ "A,B,C,D", "a,x,1,4", "b,x,2,3", "c,y,1,2", "c,x,2,1", "c,y,5,1", }, "\n"); private static final Dataset simple = Dataset.make(CSV.read(csv)); @Test public void testPreservesKeys() { Dataset trimmed = Filter.transform(simple, "A !is a"); Node data = Hierarchical.makeByNestingFields(trimmed, null, "A").root; Assert.assertEquals("((0-1) (1-1 2-1 3-1))", dumpTree(data)); assertEquals("", data.key); // top level has unknown key Node[] children = (Node[]) data.children; // the top level children (b and c groups) assertEquals(2, children.length); assertEquals("-b", children[0].key); assertEquals("-c", children[1].key); Node[] group_b = (Node[]) children[0].children; // 'b' group assertEquals(1, group_b.length); assertEquals(2, group_b[0].key); // 'b' item has index 2 in the original data Node[] group_c = (Node[]) children[1].children; // 'c' group assertEquals(3, group_c.length); assertEquals(3, group_c[0].key); assertEquals(4, group_c[1].key); assertEquals(5, group_c[2].key); } @Test public void testOneLevel() { Node data = Hierarchical.makeByNestingFields(simple, "D", "A").root; Assert.assertEquals("((0-4) (1-3) (2-2 3-1 4-1))", dumpTree(data)); data = Hierarchical.makeByNestingFields(simple, "D", "B").root; Assert.assertEquals("((0-4 1-3 3-1) (2-2 4-1))", dumpTree(data)); } @Test public void testTwoLevels() { Node data = Hierarchical.makeByNestingFields(simple, "D", "A", "B").root; Assert.assertEquals("(((0-4)) ((1-3)) ((2-2 4-1) (3-1)))", dumpTree(data)); Node level1Inner = ((Node[]) data.children)[0]; Node level2Inner = ((Node[]) level1Inner.children)[0]; Node leaf = ((Node[]) level2Inner.children)[0]; assertEquals("", data.key); assertEquals("-a", level1Inner.key); assertEquals("-a-x", level2Inner.key); assertEquals(1, leaf.key); } @Test public void testZeroLevel() { Node data = Hierarchical.makeByNestingFields(simple, "D").root; Assert.assertEquals("(0-4 1-3 2-2 3-1 4-1)", dumpTree(data)); } // recursive output of the tree private String dumpTree(Node node) { String s = ""; if (node.row != null || node.value > 0) s += node.row + "-" + Data.format(node.value, false); Node[] children = (Node[]) node.children; if (children != null) { s += "("; for (Node n : children) { if (n != children[0]) s += " "; s += dumpTree(n); } s += ")"; } return s; } }
<reponame>jianglong0156/chromium.src // Copyright (c) 2012 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. // StatusController handles all counter and status related number crunching and // state tracking on behalf of a SyncSession. // // The most important feature of StatusController is the // ScopedModelSafeGroupRestriction. Some of its functions expose per-thread // state, and can be called only when the restriction is in effect. For // example, if GROUP_UI is set then the value returned from // commit_id_projection() will be useful for iterating over the commit IDs of // items that live on the UI thread. // // Other parts of its state are global, and do not require the restriction. // // NOTE: There is no concurrent access protection provided by this class. It // assumes one single thread is accessing this class for each unique // ModelSafeGroup, and also only one single thread (in practice, the // SyncerThread) responsible for all "shared" access when no restriction is in // place. Thus, every bit of data is to be accessed mutually exclusively with // respect to threads. // // StatusController can also track if changes occur to certain parts of state // so that various parts of the sync engine can avoid broadcasting // notifications if no changes occurred. #ifndef SYNC_SESSIONS_STATUS_CONTROLLER_H_ #define SYNC_SESSIONS_STATUS_CONTROLLER_H_ #include <map> #include <vector> #include "base/logging.h" #include "base/stl_util.h" #include "base/time.h" #include "sync/internal_api/public/sessions/model_neutral_state.h" #include "sync/sessions/ordered_commit_set.h" namespace syncer { namespace sessions { class StatusController { public: explicit StatusController(const ModelSafeRoutingInfo& routes); ~StatusController(); // ClientToServer messages. const ModelTypeSet updates_request_types() const { return model_neutral_.updates_request_types; } void set_updates_request_types(ModelTypeSet value) { model_neutral_.updates_request_types = value; } const sync_pb::ClientToServerResponse& updates_response() const { return model_neutral_.updates_response; } sync_pb::ClientToServerResponse* mutable_updates_response() { return &model_neutral_.updates_response; } // Changelog related state. int64 num_server_changes_remaining() const { return model_neutral_.num_server_changes_remaining; } const OrderedCommitSet::Projection& commit_id_projection( const sessions::OrderedCommitSet &commit_set) { DCHECK(group_restriction_in_effect_) << "No group restriction for projection."; return commit_set.GetCommitIdProjection(group_restriction_); } // Various conflict counters. int num_encryption_conflicts() const; int num_hierarchy_conflicts() const; int num_server_conflicts() const; // Aggregate sum of all conflicting items over all conflict types. int TotalNumConflictingItems() const; // Number of successfully applied updates. int num_updates_applied() const; int num_server_overwrites() const; // Returns the number of updates received from the sync server. int64 CountUpdates() const; // Returns true if the last download_updates_command received a valid // server response. bool download_updates_succeeded() const { return model_neutral_.last_download_updates_result == SYNCER_OK; } // Returns true if the last updates response indicated that we were fully // up to date. This is subtle: if it's false, it could either mean that // the server said there WAS more to download, or it could mean that we // were unable to reach the server. If we didn't request every enabled // datatype, then we can't say for sure that there's nothing left to // download: in that case, this also returns false. bool ServerSaysNothingMoreToDownload() const; ModelSafeGroup group_restriction() const { return group_restriction_; } base::Time sync_start_time() const { // The time at which we sent the first GetUpdates command for this sync. return sync_start_time_; } bool HasBookmarkCommitActivity() const { return ActiveGroupRestrictionIncludesModel(BOOKMARKS); } const ModelNeutralState& model_neutral_state() const { return model_neutral_; } SyncerError last_get_key_result() const; // Download counters. void set_num_server_changes_remaining(int64 changes_remaining); void increment_num_updates_downloaded_by(int value); void increment_num_tombstone_updates_downloaded_by(int value); void increment_num_reflected_updates_downloaded_by(int value); // Update application and conflict resolution counters. void increment_num_updates_applied_by(int value); void increment_num_encryption_conflicts_by(int value); void increment_num_hierarchy_conflicts_by(int value); void increment_num_server_conflicts(); void increment_num_local_overwrites(); void increment_num_server_overwrites(); // Commit counters. void increment_num_successful_commits(); void increment_num_successful_bookmark_commits(); void set_num_successful_bookmark_commits(int value); // Server communication status tracking. void set_sync_protocol_error(const SyncProtocolError& error); void set_last_get_key_result(const SyncerError result); void set_last_download_updates_result(const SyncerError result); void set_commit_result(const SyncerError result); // A very important flag used to inform frontend of need to migrate. void set_types_needing_local_migration(ModelTypeSet types); void UpdateStartTime(); void set_debug_info_sent(); bool debug_info_sent() const; private: friend class ScopedModelSafeGroupRestriction; // Check whether a particular model is included by the active group // restriction. bool ActiveGroupRestrictionIncludesModel(ModelType model) const { if (!group_restriction_in_effect_) return true; ModelSafeRoutingInfo::const_iterator it = routing_info_.find(model); if (it == routing_info_.end()) return false; return group_restriction() == it->second; } ModelNeutralState model_neutral_; // Used to fail read/write operations on state that don't obey the current // active ModelSafeWorker contract. bool group_restriction_in_effect_; ModelSafeGroup group_restriction_; const ModelSafeRoutingInfo routing_info_; base::Time sync_start_time_; DISALLOW_COPY_AND_ASSIGN(StatusController); }; // A utility to restrict access to only those parts of the given // StatusController that pertain to the specified ModelSafeGroup. class ScopedModelSafeGroupRestriction { public: ScopedModelSafeGroupRestriction(StatusController* to_restrict, ModelSafeGroup restriction) : status_(to_restrict) { DCHECK(!status_->group_restriction_in_effect_); status_->group_restriction_ = restriction; status_->group_restriction_in_effect_ = true; } ~ScopedModelSafeGroupRestriction() { DCHECK(status_->group_restriction_in_effect_); status_->group_restriction_in_effect_ = false; } private: StatusController* status_; DISALLOW_COPY_AND_ASSIGN(ScopedModelSafeGroupRestriction); }; } // namespace sessions } // namespace syncer #endif // SYNC_SESSIONS_STATUS_CONTROLLER_H_
#! /bin/bash export HOST_UID=jogamp export HOST_IP=10.1.0.122 export HOST_RSYNC_ROOT=PROJECTS/JogAmp export TARGET_UID=jogamp export TARGET_IP=jautab03 export TARGET_ADB_PORT=5555 export TARGET_ROOT=/data/projects export BUILD_DIR=../build-android-armv6 if [ -e /opt-linux-x86/android-sdk-linux_x86 ] ; then export ANDROID_HOME=/opt-linux-x86/android-sdk-linux_x86 export PATH=$ANDROID_HOME/platform-tools:$PATH fi # # orig android: # export LD_LIBRARY_PATH /system/lib # export BOOTCLASSPATH /system/framework/core.jar:/system/framework/bouncycastle.jar:/system/framework/ext.jar:/system/framework/framework.jar:/system/framework/android.policy.jar:/system/framework/services.jar:/system/framework/core-junit.jar # #TSTCLASS=com.jogamp.gluegen.test.junit.generation.Test1p1JavaEmitter #TSTCLASS=com.jogamp.gluegen.test.junit.generation.Test1p2ProcAddressEmitter #TSTCLASS=com.jogamp.common.GlueGenVersion TSTCLASS=jogamp.android.launcher.LauncherUtil # am start -a android.intent.action.MAIN -n com.jogamp.common/jogamp.common.os.android.GluegenVersionActivity LOGFILE=`basename $0 .sh`.log # -Djava.class.path=lib/junit.jar:/usr/share/ant/lib/ant.jar:/usr/share/ant/lib/ant-junit.jar:$BUILD_DIR/gluegen.jar:$BUILD_DIR/test/build/gluegen-test.jar \ # -Djava.class.path=lib/ant-junit-all.apk:$BUILD_DIR/gluegen-rt.apk \ # -Djava.library.path=/system/lib:$TARGET_ROOT/gluegen/make/$BUILD_DIR/obj:$BUILD_DIR/test/build/natives \ RSYNC_EXCLUDES="--delete-excluded \ --exclude 'build-x86*/' --exclude 'build-linux*/' --exclude 'build-win*/' --exclude 'build-mac*/' \ --exclude 'classes/' --exclude 'src/' --exclude '.git/' --exclude '*-java-src.zip' \ --exclude 'gensrc/' --exclude 'doc/' --exclude 'jnlp-files' --exclude 'archive/' \ --exclude 'android-sdk/' --exclude 'resources/' --exclude 'scripts/' \ --exclude 'stub_includes/' --exclude 'nbproject/' --exclude '*.log' --exclude '*.zip' --exclude '*.7z' \ --exclude 'make/lib/external/'" echo "#! /system/bin/sh" > $BUILD_DIR/gluegen-targetcommand.sh # export BOOTCLASSPATH=/system/framework/core.jar:/system/framework/bouncycastle.jar:/system/framework/ext.jar:/system/framework/framework.jar:/system/framework/android.policy.jar:/system/framework/services.jar ; \ echo "\ rsync -av --delete --delete-after $RSYNC_EXCLUDES \ $HOST_UID@$HOST_IP::$HOST_RSYNC_ROOT/gluegen \ $TARGET_ROOT ; \ cd $TARGET_ROOT/gluegen/make ; \ export LD_LIBRARY_PATH=/system/lib:$TARGET_ROOT/gluegen/make/$BUILD_DIR/obj:$TARGET_ROOT/gluegen/make/$BUILD_DIR/test/build/natives ; \ dalvikvm \ -Xjnigreflimit:2000 \ -cp ../make/lib/ant-junit-all.apk:$BUILD_DIR/jogamp-android-launcher.apk:$BUILD_DIR/gluegen.apk:$BUILD_DIR/test/build/gluegen-test.apk \ -Djogamp.debug.JNILibLoader=true \ -Djogamp.debug.NativeLibrary=true \ -Djogamp.debug.NativeLibrary.Lookup=true \ -Djogamp.debug.ProcAddressHelper=true \ com.android.internal.util.WithFramework \ $TSTCLASS \ " >> $BUILD_DIR/gluegen-targetcommand.sh chmod ugo+x $BUILD_DIR/gluegen-targetcommand.sh adb connect $TARGET_IP:$TARGET_ADB_PORT adb -s $TARGET_IP:$TARGET_ADB_PORT push $BUILD_DIR/gluegen-targetcommand.sh $TARGET_ROOT/gluegen-targetcommand.sh adb -s $TARGET_IP:$TARGET_ADB_PORT shell su -c $TARGET_ROOT/gluegen-targetcommand.sh 2>&1 | tee $LOGFILE
// Copyright Epic Games, Inc. All Rights Reserved. #include "HideAndseekwithAI.h" #include "Modules/ModuleManager.h" IMPLEMENT_PRIMARY_GAME_MODULE( FDefaultGameModuleImpl, HideAndseekwithAI, "HideAndseekwithAI" );
<reponame>vdice/epicodus-health_tracker require 'rails_helper' describe 'update a food item process' do before do @food = FactoryGirl.create(:food) end it 'can update a new food item' do visit food_path(@food) click_on 'Edit' fill_in 'Name', :with => '<NAME>' fill_in 'Count', :with => '90' click_on 'Update Food' expect(page).to have_content 'Honeycrisp Apple' expect(page).to have_content '90' end it 'throw errors if a field is empty' do visit food_path(@food) click_on 'Edit' fill_in 'Name', :with => '' click_on 'Update Food' expect(page).to have_content 'errors' end end
<gh_stars>0 #ifndef output_h_ #define output_h_ #ifdef DIGI_NAIVE #include <vector> #elif defined DIGI_CUDA || defined DIGI_ALPAKA || defined DIGI_CUPLA || defined DIGI_KOKKOS || defined DIGI_ONEAPI #include "GPUSimpleVector.h" #endif #include "pixelgpudetails.h" struct alignas(128) Output { uint16_t xx[pixelgpudetails::MAX_FED_WORDS]; uint16_t yy[pixelgpudetails::MAX_FED_WORDS]; uint16_t adc[pixelgpudetails::MAX_FED_WORDS]; uint16_t moduleInd[pixelgpudetails::MAX_FED_WORDS]; uint16_t clus[pixelgpudetails::MAX_FED_WORDS]; uint32_t digi[pixelgpudetails::MAX_FED_WORDS]; uint32_t rawIdArr[pixelgpudetails::MAX_FED_WORDS]; #ifdef DIGI_NAIVE std::vector<PixelErrorCompact> err; #elif defined DIGI_CUDA || defined DIGI_ALPAKA || defined DIGI_CUPLA || defined DIGI_KOKKOS || defined DIGI_ONEAPI PixelErrorCompact err_d[pixelgpudetails::MAX_FED_WORDS]; GPU::SimpleVector<PixelErrorCompact> err; #endif }; #endif // output_h_
module Testkit module Backend class Runner def initialize(port) @selector = NIO::Selector.new @server = TCPServer.new(port) puts "Listening on #{}:#{port}" monitor = @selector.register(@server, :r) monitor.value = proc { accept } end def run loop do @selector.select { |monitor| monitor.value.call } end end def accept socket = @server.accept _, port, host = socket.peeraddr puts "*** #{host}:#{port} connected" # handle_client(socket) @command_processor = CommandProcessor.new(socket) while @command_processor.process(blocking: true) do end socket.close # monitor = @selector.register(socket, :r) # monitor.value = proc { handle_client(socket, @command_processor) } end def handle_client(client_socket, command_processor) puts "handling client" command_processor.process puts "finished handling client" rescue StandardError => e _, port, host = client_socket.peeraddr puts "*** #{host}:#{port} disconnected" @selector.deregister(client_socket) client_socket.close # puts e # puts e.backtrace # raise e end end end end
SELECT opinion FROM reviews WHERE rating >= 3 and rating <= 4;
<filename>packages/xlayers/src/app/editor/code-editor/editor-container/codegen/vue/vue.service.spec.ts import { TestBed, inject } from '@angular/core/testing'; import { VueCodeGenService } from './vue.service'; describe('VueCodeGenService', () => { beforeEach(() => { TestBed.configureTestingModule({ providers: [VueCodeGenService] }); }); it('should be created', inject([VueCodeGenService], (service: VueCodeGenService) => { expect(service).toBeTruthy(); })); });
<filename>GetResponseAPI.podspec Pod::Spec.new do |s| s.name = "GetResponseAPI" s.version = "0.0.1" s.summary = "This library is an API wrapper for the GetResponse API v3.0" s.description = <<-DESC GetResponseAPI is an API wrapper for the GetResponse API 3.0 that allows you to use basic API methods such as listning, adding, and removing contacts as well as fetching the list of your campaigns. DESC s.author = { "<NAME>" => "<EMAIL>" } s.homepage = "https://github.com/GetResponse/iOS-Developer-Kit" s.license = { :type => 'MIT', :file => 'LICENSE.TXT' } s.platform = :ios, '8.2' s.ios.deployment_target = '8.2' s.source = { :git => "https://github.com/GetResponse/iOS-Developer-Kit.git", :tag => s.version.to_s } s.source_files = 'GetResponseAPI', 'GetResponseAPI/**/*.{h,m}' s.resource_bundles = { 'GetResponseAPI' => [] } s.requires_arc = true s.dependency 'AFNetworking', '~> 2.0' s.dependency 'JSONModel', '~> 1.0' s.dependency 'AFNetworkActivityLogger' end
#include <iostream> #include <string> #include <map> #include <algorithm> using namespace std; // function to find the most occuring word from the given text string findCommonWord(string text) { map<string, int> word_map; string temp = ""; // traversing the text for(char c : text) { // checking for alpha numerical character if(isalnum(c)) temp += c; else { // storing the word in map word_map[temp] += 1; temp = ""; } } string most_occuring_word; int occurrence = 0; // finding the word with most occurance for(auto word : word_map) { if(word.second > occurrence) { most_occuring_word = word.first; occurrence = word.second; } } return most_occuring_word; } int main() { string text = "This is a sentence with a lot of words."; cout << "Most common word: " << findCommonWord(text); return 0; }
load test_helpers setup() { cd ./tests/help } @test "help: should create .lets dir" { run lets printf "%s\n" "${lines[@]}" [[ $status == 0 ]] [[ -d .lets ]] } @test "help: run 'lets' as is" { run lets printf "%s\n" "${lines[@]}" [[ $status = 0 ]] [[ "${lines[0]}" = "A CLI command runner" ]] [[ "${lines[1]}" = "Usage:" ]] [[ "${lines[2]}" = " lets [flags]" ]] [[ "${lines[3]}" = " lets [command]" ]] [[ "${lines[4]}" = "Available Commands:" ]] [[ "${lines[5]}" = " bar Print bar" ]] [[ "${lines[6]}" = " foo Print foo" ]] [[ "${lines[7]}" = " help Help about any command" ]] [[ "${lines[8]}" = "Flags:" ]] [[ "${lines[9]}" = " -E, --env stringToString set env variable for running command KEY=VALUE (default [])" ]] [[ "${lines[10]}" = " --exclude stringArray run all but excluded command(s) described in cmd as map" ]] [[ "${lines[11]}" = " -h, --help help for lets" ]] [[ "${lines[12]}" = " --only stringArray run only specified command(s) described in cmd as map" ]] [[ "${lines[13]}" = " --upgrade upgrade lets to latest version" ]] [[ "${lines[14]}" = " -v, --version version for lets" ]] [[ "${lines[15]}" = 'Use "lets [command] --help" for more information about a command.' ]] } @test "help: run 'lets help'" { run lets printf "%s\n" "${lines[@]}" [[ $status = 0 ]] [[ "${lines[0]}" = "A CLI command runner" ]] [[ "${lines[1]}" = "Usage:" ]] [[ "${lines[2]}" = " lets [flags]" ]] [[ "${lines[3]}" = " lets [command]" ]] [[ "${lines[4]}" = "Available Commands:" ]] [[ "${lines[5]}" = " bar Print bar" ]] [[ "${lines[6]}" = " foo Print foo" ]] [[ "${lines[7]}" = " help Help about any command" ]] [[ "${lines[8]}" = "Flags:" ]] [[ "${lines[9]}" = " -E, --env stringToString set env variable for running command KEY=VALUE (default [])" ]] [[ "${lines[10]}" = " --exclude stringArray run all but excluded command(s) described in cmd as map" ]] [[ "${lines[11]}" = " -h, --help help for lets" ]] [[ "${lines[12]}" = " --only stringArray run only specified command(s) described in cmd as map" ]] [[ "${lines[13]}" = " --upgrade upgrade lets to latest version" ]] [[ "${lines[14]}" = " -v, --version version for lets" ]] [[ "${lines[15]}" = 'Use "lets [command] --help" for more information about a command.' ]] }
<gh_stars>1-10 // Code generated by MockGen. DO NOT EDIT. // Source: identity_svc.go // Package auth is a generated GoMock package. package auth import ( x509 "crypto/x509" gomock "github.com/golang/mock/gomock" reflect "reflect" ) // MockIdentityService is a mock of IdentityService interface type MockIdentityService struct { ctrl *gomock.Controller recorder *MockIdentityServiceMockRecorder } // MockIdentityServiceMockRecorder is the mock recorder for MockIdentityService type MockIdentityServiceMockRecorder struct { mock *MockIdentityService } // NewMockIdentityService creates a new mock instance func NewMockIdentityService(ctrl *gomock.Controller) *MockIdentityService { mock := &MockIdentityService{ctrl: ctrl} mock.recorder = &MockIdentityServiceMockRecorder{mock} return mock } // EXPECT returns an object that allows the caller to indicate expected use func (m *MockIdentityService) EXPECT() *MockIdentityServiceMockRecorder { return m.recorder } // MspID mocks base method func (m *MockIdentityService) MspID() (string, error) { ret := m.ctrl.Call(m, "MspID") ret0, _ := ret[0].(string) ret1, _ := ret[1].(error) return ret0, ret1 } // MspID indicates an expected call of MspID func (mr *MockIdentityServiceMockRecorder) MspID() *gomock.Call { return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "MspID", reflect.TypeOf((*MockIdentityService)(nil).MspID)) } // CreatorID mocks base method func (m *MockIdentityService) CreatorID() (string, error) { ret := m.ctrl.Call(m, "CreatorID") ret0, _ := ret[0].(string) ret1, _ := ret[1].(error) return ret0, ret1 } // CreatorID indicates an expected call of CreatorID func (mr *MockIdentityServiceMockRecorder) CreatorID() *gomock.Call { return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CreatorID", reflect.TypeOf((*MockIdentityService)(nil).CreatorID)) } // Cert mocks base method func (m *MockIdentityService) Cert() (*x509.Certificate, error) { ret := m.ctrl.Call(m, "Cert") ret0, _ := ret[0].(*x509.Certificate) ret1, _ := ret[1].(error) return ret0, ret1 } // Cert indicates an expected call of Cert func (mr *MockIdentityServiceMockRecorder) Cert() *gomock.Call { return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Cert", reflect.TypeOf((*MockIdentityService)(nil).Cert)) } // CertID mocks base method func (m *MockIdentityService) CertID() (string, error) { ret := m.ctrl.Call(m, "CertID") ret0, _ := ret[0].(string) ret1, _ := ret[1].(error) return ret0, ret1 } // CertID indicates an expected call of CertID func (mr *MockIdentityServiceMockRecorder) CertID() *gomock.Call { return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CertID", reflect.TypeOf((*MockIdentityService)(nil).CertID)) } // GetAttribute mocks base method func (m *MockIdentityService) GetAttribute(attrName string) (AttributeValue, error) { ret := m.ctrl.Call(m, "GetAttribute", attrName) ret0, _ := ret[0].(AttributeValue) ret1, _ := ret[1].(error) return ret0, ret1 } // GetAttribute indicates an expected call of GetAttribute func (mr *MockIdentityServiceMockRecorder) GetAttribute(attrName interface{}) *gomock.Call { return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetAttribute", reflect.TypeOf((*MockIdentityService)(nil).GetAttribute), attrName) }
/* * Copyright 2014-2015 <NAME> <http://www.terems.org/> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.terems.webz.base; import org.terems.webz.WebzDestroyable; /** * Basic implementation of {@code WebzDestroyable} which does nothing upon {@link #destroy()}. **/ public abstract class BaseWebzDestroyable implements WebzDestroyable { /** Do nothing by default... **/ @Override public void destroy() { } }
#! /usr/bin/env bash # Copyright Project Contour Authors # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. set -o pipefail set -o errexit set -o nounset readonly KIND=${KIND:-kind} readonly CLUSTERNAME=${CLUSTERNAME:-contour-integration} kind::cluster::delete() { ${KIND} delete cluster --name "${CLUSTERNAME}" } # Delete existing kind cluster kind::cluster::delete
<reponame>IanGClifton/MoreEssentials package com.iangclifton.moreessentials.lesson2; import android.content.Context; import android.graphics.Color; import android.hardware.Sensor; import android.hardware.SensorEvent; import android.hardware.SensorEventListener; import android.hardware.SensorManager; import android.os.Bundle; import android.support.v4.app.Fragment; import android.util.Log; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.TextView; import com.iangclifton.moreessentials.R; /** * Fragment for running the Lesson 2 examples. * * For any foreground usage, you almost always want to register your listener * in onResume and unregister it in onPause. We moved code to onAttach and * onDetach in order to demonstrate background batching easily, but you * would typically create a service for listening to sensor events in the * background. * * @author <NAME> */ public class Lesson2Fragment extends Fragment implements SensorEventListener { private static final String TAG = "Lesson2Fragment"; private static final int REPORT_LATENCY = 1000 * 1000 * 5; private Sensor mAccelerometer; private Sensor mLightSensor; private TextView mTextView; private View mEmptyView; public Lesson2Fragment() { // Required empty public constructor } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { // Inflate the layout for this fragment View rootView = inflater.inflate(R.layout.fragment_lesson2, container, false); mTextView = (TextView) rootView.findViewById(R.id.text); mEmptyView = rootView.findViewById(R.id.emptyView); return rootView; } @Override public void onDestroyView() { super.onDestroyView(); mTextView = null; mEmptyView = null; } @Override public void onAttach(Context context) { super.onAttach(context); SensorManager sensorManager = (SensorManager) getActivity().getSystemService(Context.SENSOR_SERVICE); mLightSensor = sensorManager.getDefaultSensor(Sensor.TYPE_LIGHT); if (mLightSensor == null) { Log.d(TAG, "No light sensor available"); } else { sensorManager.registerListener(this, mLightSensor, SensorManager.SENSOR_DELAY_UI); } mAccelerometer = sensorManager.getDefaultSensor(Sensor.TYPE_LINEAR_ACCELERATION); if (mAccelerometer == null) { Log.d(TAG, "No accelerometer"); } else { // sensorManager.registerListener(this, mAccelerometer, SensorManager.SENSOR_DELAY_UI); sensorManager.registerListener(this, mAccelerometer, SensorManager.SENSOR_DELAY_UI, REPORT_LATENCY); } } @Override public void onDetach() { super.onDetach(); SensorManager sensorManager = (SensorManager) getActivity().getSystemService(Context.SENSOR_SERVICE); if (mLightSensor != null) { sensorManager.unregisterListener(this, mLightSensor); } if (mAccelerometer != null) { sensorManager.unregisterListener(this, mAccelerometer); } } @Override public void onResume() { super.onResume(); // SensorManager sensorManager = (SensorManager) getActivity().getSystemService(Context.SENSOR_SERVICE); // mLightSensor = sensorManager.getDefaultSensor(Sensor.TYPE_LIGHT); // if (mLightSensor == null) { // Log.d(TAG, "No light sensor available"); // } else { // sensorManager.registerListener(this, mLightSensor, SensorManager.SENSOR_DELAY_UI); // } // // mAccelerometer = sensorManager.getDefaultSensor(Sensor.TYPE_LINEAR_ACCELERATION); // if (mAccelerometer == null) { // Log.d(TAG, "No accelerometer"); // } else { //// sensorManager.registerListener(this, mAccelerometer, SensorManager.SENSOR_DELAY_UI); // sensorManager.registerListener(this, mAccelerometer, SensorManager.SENSOR_DELAY_UI, REPORT_LATENCY); // } } @Override public void onPause() { super.onPause(); // SensorManager sensorManager = (SensorManager) getActivity().getSystemService(Context.SENSOR_SERVICE); // if (mLightSensor != null) { // sensorManager.unregisterListener(this, mLightSensor); // } // if (mAccelerometer != null) { // sensorManager.unregisterListener(this, mAccelerometer); // } } @Override public void onSensorChanged(SensorEvent event) { if (event.sensor == mLightSensor) { // Light reading float lux = event.values[0]; // mTextView.setText(lux + "lx; max value: " + mLightSensor.getMaximumRange()); } else { // Accelerometer reading // Log.d(TAG, "Accelerometer values: " + event.values[0] + " " + event.values[1] + " " + event.values[2]); float sensorReading = Math.abs(Math.min(event.values[0], 1)); int red = (int) (255 * sensorReading); sensorReading = Math.abs(Math.min(event.values[1], 1)); int green = (int) (255 * sensorReading); sensorReading = Math.abs(Math.min(event.values[2], 1)); int blue = (int) (255 * sensorReading); mEmptyView.setBackgroundColor(Color.rgb(red, green, blue)); // Log.d(TAG, "New color: " + Color.rgb(red, green, blue)); } } @Override public void onAccuracyChanged(Sensor sensor, int accuracy) { Log.d(TAG, "onAccuracyChanged called: " + sensor + "; " + accuracy); } }
<reponame>skarpushin/swingpm package ru.skarpushin.swingpm.base; import org.summerb.approaches.validation.ValidationError; import ru.skarpushin.swingpm.collections.ListEx; public interface HasValidationErrorsListEx { ListEx<ValidationError> getValidationErrors(); }
#!/bin/bash ACCOUNT=${1:-proxytoken} PKEY=${2:-EOS6MRyAjQq8ud7hVNYcfnVPJqcVpscN5So8BhtHuGYqET5GDW5CV} cleos -u https://nodes.get-scatter.com set account permission $ACCOUNT active \ '{"threshold": 1,"keys": [{"key": "'$PKEY'","weight": 1}],"accounts": [{"permission":{"actor":"'$ACCOUNT'","permission":"eosio.code"},"weight":1}]}' owner
/*package yimei.jss.rule.workcenter.basic; import yimei.jss.jobshop.OperationOption; import yimei.jss.jobshop.WorkCenter; import yimei.jss.rule.AbstractRule; import yimei.jss.rule.RuleType; import yimei.jss.simulation.state.SystemState; *//** * Created by fzhang on 18/04/18. * Average cost in queue. * The priority of this method should be the lowest average cost in the queue. *//* public class LAC extends AbstractRule { public LAC(RuleType t) { name = "\"LAC\""; this.type = t; } @Override public double priority(OperationOption op, WorkCenter workCenter, SystemState systemState) { return workCenter.getAverageCostInQueue(); } } */
function dec(get, context) { context.addInitializer(function() { this[context.name + 'Context'] = context; }); return function () { return get.call(this) + 1; } } class Foo { static value = 1; @dec static get #a() { return this.value; } static getA() { return this.#a; } } const aContext = Foo['#aContext']; expect(aContext.access.get.call(Foo)).toBe(2); expect(Foo.getA()).toBe(2); Foo.value = 123; expect(aContext.access.get.call(Foo)).toBe(124); expect(Foo.getA()).toBe(124); expect(aContext.name).toBe('#a'); expect(aContext.kind).toBe('getter'); expect(aContext.isStatic).toBe(true); expect(aContext.isPrivate).toBe(true); expect(typeof aContext.addInitializer).toBe('function'); expect(typeof aContext.setMetadata).toBe('function'); expect(typeof aContext.getMetadata).toBe('function');
# format is database name | schema name | stream name | privilege | true/false for with_grant_option terraform import snowflake_stream_grant.example 'dbName|schemaName|streamName|SELECT|false'
<filename>src/if.cpp #include <string> #include <map> #include <vector> #include "../include/node.h" #include "../include/variables.hpp" #include "../include/usefull.h" #include "../include/visitor.h" #include "../include/types.h" #include "../include/main.hpp" #include "../include/code_execution.h" #include "../include/paterns.h" #include "../include/paterns_debug.h" #include "../include/types.h" #include "../include/types_check.h" #include "../include/conversion.h" #include "../include/keywords.h" string normal_code_execution(vector<Node *> ast, bool function, map<string, Mtmc_variable *> &variable_function, int recursive, bool loop, vector<string> reference) { int index = 0; string what_to_return = ""; while (index != ast.size()) { Node *instruction = ast[index]; current_reference = instruction->ref; threaded_references.push(current_reference); if (instruction->value == "expr") { Mtmc_variable *result; if (function) { // Si on se trouve dans l'execution d'une fonction result = compute_function(instruction->children[0], true, true, variable_function, recursive, reference); // Des calculs chiants } else { result = compute(instruction->children[0], true, false, reference); } // Dans le cas ou l'utilisateur assigne cette valeur ร  une variable if (instruction->children.size() > 1 and instruction->children[1]->value == "->") { if (instruction->children[2]->children.size() == 3) // pour les assigantions des listes genre '3 -> liste*3' { // Normalement, sa se prรฉsente soit liste*index soit liste*[index] // On crรฉe donc un sous noeud qui contient une fois la liste, pour pouvoir utiliser la fonction de calcule Node *n = new Node(); n->children.push_back(instruction->children[2]->children[0]); Mtmc_variable *list_to_mod; if (function) { list_to_mod = compute_function(n, false, function, variable_function, recursive, reference); } else { list_to_mod = compute(n, false, false, reference); } if (list_to_mod->type != "list") { string err = "the type of the list being modified must be 'list', not '" + list_to_mod->type + "'"; Error("execution", err); } // Pareil n = new Node(); n->children.push_back(instruction->children[2]->children[2]); Mtmc_variable *list_index; if (function) { list_index = compute_function(n, false, true, variable_function, recursive, reference); } else { list_index = compute(n, false, false, reference); } if (list_index->type == "int") { Mtmc_listed_assigne(*(int64_t *)list_index->content, (Mtmc_listed *)list_to_mod->content, result); } else if (list_index->type == "list") { Mtmc_listed *list_indexL = (Mtmc_listed *)list_index->content; Mtmc_variable *index = list_indexL->begin->next->content; if (index->type == "none") { Error("not implemented", "indexing with a content still waiting to be evaluated (include/visitor.h/visitor/asignation dans une liste)"); } else if (index->type != "int") { string err = "the index of the list must be of type 'int' not type '" + index->type + "'"; Error("execution", err); } int64_t index_int = int64_t(*(int64_t *)index->content); // Cast vers du int (l'index) Mtmc_listed_assigne(index_int, (Mtmc_listed *)list_to_mod->content, result); } else { string err = "the index of the list must be of type 'int' or 'list' not type '" + list_index->type + "'"; Error("execution", err); } } else if (instruction->children[2]->children.size() == 1) { // Asignation du nom ร  la variable if (!is_object_attribute(instruction->children[2]->children[0]->value)) { string name = instruction->children[2]->children[0]->value; // Asignation du nom ร  la variable if (function) { if (variable_exist_function(name, variable_function)) { variable_override_function(result, variable_function, name); } else variable_asignement_function(result, variable_function, name); } else if (variable_exist(name)) { variable_override(result, name); } variable_asignement(result, name); } else { if (!function) { vector<string> names_of_objects = object_atribute_separator(instruction->children[2]->children[0]->value); if (!variable_exist(names_of_objects[0])) { string err = "unknown variable '" + names_of_objects[0] + "'"; Error("execution", err); } Mtmc_variable *n = variable_get_value(names_of_objects[0]); if (!is_typed_non_predefined(n->type)) { string err = "type '" + n->type + "' has no attribute '" + names_of_objects[1] + "'"; Error("execution", err); } Mtmc_class_variable *n_prime = (Mtmc_class_variable *)n->content; if (!n_prime->variable_exists(names_of_objects[1])) { string err = "object '" + names_of_objects[0] + "' has no attribute '" + names_of_objects[1] + "'"; Error("execution", err); } Mtmc_variable *r = new Mtmc_variable(); r->type = result->type; r->content = result->content; string name = names_of_objects[1]; n_prime->variable_override(r, name); } else { vector<string> names_of_objects = object_atribute_separator(instruction->children[2]->children[0]->value); if (!variable_exist_function(names_of_objects[0], variable_function)) { string err = "unknown variable '" + names_of_objects[0] + "'"; Error("execution", err); } Mtmc_variable *n = variable_get_value_function(names_of_objects[0], variable_function); if (!is_typed_non_predefined(n->type)) { string err = "type '" + n->type + "' has no attribute '" + names_of_objects[1] + "'"; Error("execution", err); } Mtmc_class_variable *n_prime = (Mtmc_class_variable *)n->content; if (!n_prime->variable_exists(names_of_objects[1])) { string err = "object '" + names_of_objects[0] + "' has no attribute '" + names_of_objects[1] + "'"; Error("execution", err); } Mtmc_variable *r = new Mtmc_variable(); r->type = result->type; r->content = result->content; string name = names_of_objects[1]; n_prime->variable_override(r, name); } } } } } else if (instruction->value == "keyword") { Node *args; vector<string> s_args; if (instruction->children.size() > 1) { // Il y a des mot-clรฉs qui n'ont pas d'argument ou qui peuvent ne pas en avoir : break, continue, return args = instruction->children[1]; s_args = childs_value(args); if (function) { compute_function(args, false, false, variable_function, recursive, reference); } else compute(args, false, false, reference); } if (instruction->children[0]->value == "push") { keyword_push(s_args, reference); } else if (instruction->children[0]->value == "convert") { keyword_convert(s_args); } else if (instruction->children[0]->value == "length") { keyword_length(s_args); } else if (instruction->children[0]->value == "type") { keyword_type(s_args); } else if (instruction->children[0]->value == "return") { if (function) { what_to_return = "return"; threaded_references.pop(); break; } } else if (instruction->children[0]->value == "break") { if (loop) { what_to_return = "break"; threaded_references.pop(); break; } else { Error("execution", "'break' keyword can only be used in a loop"); } } else { string err = "unknown keyword `" + instruction->children[0]->value + "` \nMaybe the keyword is not usable in the current state"; Error("execution", err); } } else if (instruction->value == "if_statement") { Node *condition = instruction->children[1]->children[0]; if (!function) { Mtmc_variable *condition_value = compute(condition, true, function, reference); string result; if (!Mtmc_variable_type_check(condition_value, "bool")) { Error("execution", "condition of an if statement needs to be a boolean"); } string cond = *(string *)condition_value->content; if (cond == "true") { vector<Node *> paterns = recognize_paternes(instruction->children[2], true); result = normal_code_execution(paterns, 0, variable_function, 0, loop, reference); // Qui correspond au {} } else if (instruction->children.size() > 3 and instruction->children[3]->value == "else") { vector<Node *> paterns = recognize_paternes(instruction->children[4], true); result = normal_code_execution(paterns, 0, variable_function, 0, loop, reference); // Qui correspond au {} du else statement } if (result == "return") { what_to_return = "return"; threaded_references.pop(); break; } else if (result == "break") { what_to_return = "break"; threaded_references.pop(); break; } } else { Mtmc_variable *condition_value = compute_function(condition, true, function, variable_function, recursive, reference); string result; if (!Mtmc_variable_type_check(condition_value, "bool")) { Error("execution", "condition of an if statement needs to be a boolean"); } string cond = *(string *)condition_value->content; if (cond == "true") { vector<Node *> paterns = recognize_paternes(instruction->children[2], true); result = normal_code_execution(paterns, function, variable_function, 0, loop, reference); // Qui correspond au {} } else if (instruction->children.size() > 3 and instruction->children[3]->value == "else") { vector<Node *> paterns = recognize_paternes(instruction->children[4], true); result = normal_code_execution(paterns, function, variable_function, 0, loop, reference); // Qui correspond au {} du else statement } if (result == "return") { what_to_return = "return"; threaded_references.pop(); break; } else if (result == "break") { what_to_return = "break"; threaded_references.pop(); break; } } } else if (instruction->value == "for_loop") { vector<Node *> paterns = recognize_paternes(instruction->children[2], debug); vector<Node *> args_for_loop = get_listed_values(instruction->children[1]->children[0]); string ret = for_loop(paterns, function, variable_function, recursive, args_for_loop, reference); if (ret == "return") { what_to_return = "return"; threaded_references.pop(); break; } } output_display_pile(); manage_system_pile(); index++; threaded_references.pop(); } return what_to_return; }
package runtime import "time" const ( CounterTypeInterval = 0 CounterTypeLastValue = 1 CounterTypeStatistics = 2 CounterTypeTimestamp = 3 CounterTypeIncrement = 4 ) type ICounters interface{ IComponent Reset(name string) ResetAll() Dump() error GetAll() []*Counter Get(name string, typ int) *Counter BeginTiming(name string) *Timing Stats(name string, value float32) Last(name string, value float32) TimestampNow(name string) Timestamp(name string, value time.Time) IncrementOne(name string) Increment(name string, value int) }
<reponame>WalterHu/DemonCat package org.spongycastle.tls.crypto.impl.bc; import org.spongycastle.crypto.DSA; import org.spongycastle.crypto.params.DSAPrivateKeyParameters; import org.spongycastle.crypto.signers.DSASigner; import org.spongycastle.crypto.signers.HMacDSAKCalculator; import org.spongycastle.tls.SignatureAlgorithm; /** * Implementation class for generation of the raw DSA signature type using the BC light-weight API. */ public class BcTlsDSASigner extends BcTlsDSSSigner { public BcTlsDSASigner(BcTlsCrypto crypto, DSAPrivateKeyParameters privateKey) { super(crypto, privateKey); } protected DSA createDSAImpl(short hashAlgorithm) { return new DSASigner(new HMacDSAKCalculator(crypto.createDigest(hashAlgorithm))); } protected short getSignatureAlgorithm() { return SignatureAlgorithm.dsa; } }
package request const AlipayPassTemplateUpdateMethod = "alipay.pass.template.update" type AlipayPassTemplateUpdateRequest struct { TplId string `json:"tpl_id"` TplContent string `json:"tpl_content"` }
import tensorflow as tf from tensorflow_addons.text.t5 import T5Tokenizer, T5ForConditionalGeneration # define tokenizer and model tokenizer = T5Tokenizer.from_pretrained('t5-base') model = T5ForConditionalGeneration.from_pretrained('t5-base') def predict_response(user_input): # tokenize input input_ids = tokenizer.encode(user_input, return_tensors='tf').to('cpu') # generate response output = model.generate(input_ids=input_ids) response = tokenizer.decode(output.squeeze()) return response # call function print(predict_response('What is your name?'))
#pragma once #include <memory> namespace archgraph { class Operation; using OpPtr = std::shared_ptr<Operation>; class Geometry; using GeoPtr = std::shared_ptr<Geometry>; class Rule; using RulePtr = std::shared_ptr<Rule>; class Function; using FuncPtr = std::shared_ptr<Function>; class Variant; using VarPtr = std::shared_ptr<Variant>; }
<filename>src/main/java/org/rs2server/rs2/domain/service/impl/SkillsServiceImpl.java package org.rs2server.rs2.domain.service.impl; import org.rs2server.rs2.domain.service.api.SkillsService; /** * @author tommo */ public class SkillsServiceImpl implements SkillsService { }
[ "$SCUDUM_CROSS" == "0" ] && exit 0 || true VERSION=${VERSION-2.35.2} VERSION_MAJOR=${VERSION_MAJOR-2.35} set -e +h wget --content-disposition "http://www.kernel.org/pub/linux/utils/util-linux/v$VERSION_MAJOR/util-linux-$VERSION.tar.xz" rm -rf util-linux-$VERSION && tar -Jxf "util-linux-$VERSION.tar.xz" rm -f "util-linux-$VERSION.tar.xz" cd util-linux-$VERSION mkdir -pv /var/lib/hwclock ./configure\ --prefix=$PREFIX\ --disable-chfn-chsh\ --disable-login\ --disable-nologin\ --disable-su\ --disable-setpriv\ --disable-runuser\ --disable-pylibmount\ --without-python\ --without-systemd\ --without-systemdsystemunitdir make && make install
const express = require("express"); const router = express.Router(); const userController = require("../controllers/user"); // Register GET request router.get("/register", userController.getRegister); // Get ALbum Details router.get("/album", userController.getAlbum); //Get Browse Page router.get("/browse", userController.getIndex); router.get("/artist", userController.getArtist); //Get Artist Details Page router.get("/search", userController.getSearch); //Get Search Page router.get("/yourmusic", userController.getYourMusic); //Your Music Page router.get("/playlist", userController.getPlaylistView); //Single Playlist Details Page router.get("/profile", userController.getProfile); //User Profile View router.get("/updateDetails", userController.getupdateDetails); //Edit User Profile View // Login submit req router.post("/login", userController.login); // Register User router.post("/register", userController.register); // AJAX Calls router.get("/getPlaylist", userController.getPlaylist); //Get Random Playlist Songs router.post("/getSongByID", userController.getSongByID); //Get Song Details by ID router.post("/getArtistByID", userController.getArtistByID); //Get Artist Details by ID router.post("/getAlbumByID", userController.getAlbumByID); //Get Artist Details by ID router.post("/updatePlays", userController.updatePlays); //Update song play count when song is played router.post("/createPlaylist", userController.createPlaylist); //Create new playlsit router.post("/deletePlaylist", userController.deletePlaylist); //Delete user playlist router.post("/addToPlaylist", userController.addToPlaylist); //Add song to playlist router.post("/deleteFromPlaylist", userController.deleteFromPlaylist); //Remove song from playlist router.post("/logout", userController.logout); //User logout router.post("/updateUserEmail", userController.updateUserEmail); //Update new email address of the user router.post("/changeUserPassword", userController.changeUserPassword); //Change User Password module.exports = router;
const links = { github_base: "https://raw.githubusercontent.com/Lila-Kuhlt/Lila-Kuh-bot/main/", config: "config/config-template.json", lang_base: "src/lang/" } export async function get_config() { return await (await fetch(links.github_base + links.config)).json() } export async function get_lang_jsons(config_json) { const lang_jsons = {} for (const lang_path_key of Object.keys(config_json.lang_paths)) { try { lang_jsons[lang_path_key] = (await (await fetch(links.github_base + links.lang_base + config_json.lang_paths[lang_path_key])).json()) } catch (e) { console.error("[Error] Could not get lang_file for lang " + lang_path_key) console.error(e) } } return lang_jsons }
<filename>project/plugins.sbt resolvers += "sonatype-releases" at "https://oss.sonatype.org/content/repositories/releases/" addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.14.3")
import { combineReducers } from "redux" import { auth } from "./auth/reducer" import { books as book } from "./book/reducer" export const rootReducer = combineReducers({ auth, book })
def detect_plagiarism(source_text): ''' This function takes in a string representing the text from a source document and returns a boolean value indicating whether or not the text appears to have been plagiarized. The algorithm works by splitting the text into chunks of five words each, then comparing each chunk of the source text to all of the chunks in a database of known source documents. If a close match is found, the algorithm returns true, indicating that the source text may have been plagiarized. :param source_text: string representing the text from a source document :return: boolean value indicating whether or not the text appears to have been plagiarized ''' # Split the text into chunks of five words chunks = get_five_word_chunks(source_text) # Set a maximum threshold for how similar the chunks need to be to indicate plagiarism threshold = 0.9 # Compare each chunk to all of the chunks in the database for chunk in chunks: for document in database: for document_chunk in document.chunks: similarity = calculate_similarity(chunk, document_chunk) if similarity >= threshold: # A close enough match was found, indicating plagiarism return True # No matches were found, indicating that the source text is not plagiarized return False
#!/bin/bash # Copyright 2021 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # Execute the following commands on the primary cluster kubectl get -n cert-manager secrets/apigee-ca -o json | jq -r '.data."tls.crt"' | base64 -d > tls.crt kubectl get -n cert-manager secrets/apigee-ca -o json | jq -r '.data."tls.key"' | base64 -d > tls.key
#!/bin/bash # Copyright 2015 The Kythe Authors. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. set -e TEST_NAME="test_modules" . ./kythe/cxx/extractor/testdata/test_common.sh . ./kythe/cxx/extractor/testdata/skip_functions.sh KYTHE_OUTPUT_DIRECTORY="${OUT_DIR}" \ "./${EXTRACTOR}" --with_executable "/dummy/bin/g++" \ -fmodules \ -fmodule-map-file="kythe/cxx/extractor/testdata/modfoo.modulemap" \ -I./kythe/cxx/extractor \ ./kythe/cxx/extractor/testdata/modules.cc [[ $(ls -1 "${OUT_DIR}"/*.kzip | wc -l) -eq 1 ]] INDEX_PATH=$(ls -1 "${OUT_DIR}"/*.kzip) "${KINDEX_TOOL}" -canonicalize_hashes -suppress_details -explode "${INDEX_PATH}" # Remove lines that will change depending on the machine the test is run on. skip_inplace "-target" 1 "${INDEX_PATH}_UNIT" skip_inplace "signature" 0 "${INDEX_PATH}_UNIT" sed "s|TEST_CWD|${PWD}/|" "${BASE_DIR}/modules.UNIT" | \ skip "-target" 1 | skip "signature" 0 | diff -u - "${INDEX_PATH}_UNIT"
#!/usr/bin/env bats IPVSCTL="$(dirname $BATS_TEST_FILENAME)/../dist/ipvsctl" if [ ! -x "${IPVSCTL}" ]; then IPVSCTL=$(which ipvsctl) if [ ! -x "${IPVSCTL}" ]; then echo ERROR unable to find ipvsctl in local dist or in path exit 1 fi fi ASSSD=fixtures/apply-single-service-single-destination.yaml @test "when i apply with invalid allowed actions, it must fail" { run $IPVSCTL apply --allowed-actions=inv -f $ASSSD [ "$status" -ne 0 ] } @test "given a simple model and a changeset, when i apply with no allowed actions, it must fail" { ipvsadm -C run $IPVSCTL apply --allowed-actions= -f $ASSSD [ "$status" -ne 0 ] ipvsadm -C } @test "given a simple model and a changeset, when i apply with missing allowed actions, it must fail (no add service allowed)" { ipvsadm -C $IPVSCTL apply -f $ASSSD run $IPVSCTL apply --allowed-actions=us,ds,ad,ud,dd -f fixtures/changeset-destination-5.yaml [ "$status" -ne 0 ] ipvsadm -C } @test "given a simple model and a changeset, when i apply with necessary allowed actions, it must pass (add service allowed)" { ipvsadm -C $IPVSCTL apply -f $ASSSD run $IPVSCTL apply --allowed-actions=as,us,ds,ad,ud,dd -f fixtures/changeset-destination-5.yaml [ "$status" -eq 0 ] ipvsadm -C } @test "given a simple model and a changeset, when i apply with missing allowed actions, it must fail (no update service allowed)" { ipvsadm -C $IPVSCTL apply -f $ASSSD run $IPVSCTL apply --allowed-actions=as,ds,ad,ud,dd -f fixtures/changeset-destination-5.yaml [ "$status" -ne 0 ] ipvsadm -C } @test "given a simple model and a changeset, when i apply with necessary allowed actions, it must pass (update service allowed)" { ipvsadm -C $IPVSCTL apply -f $ASSSD run $IPVSCTL apply --allowed-actions=as,us,ds,ad,ud,dd -f fixtures/changeset-destination-5.yaml [ "$status" -eq 0 ] ipvsadm -C } @test "given a simple model and a changeset, when i apply with missing allowed actions, it must fail (no delete service allowed)" { ipvsadm -C $IPVSCTL apply -f $ASSSD run $IPVSCTL apply --allowed-actions=as,us,ad,ud,dd -f fixtures/changeset-destination-6.yaml [ "$status" -ne 0 ] ipvsadm -C } @test "given a simple model and a changeset, when i apply with necessary allowed actions, it must pass (delete service allowed)" { ipvsadm -C $IPVSCTL apply -f $ASSSD run $IPVSCTL apply --allowed-actions=as,us,ds,ad,ud,dd -f fixtures/changeset-destination-6.yaml [ "$status" -eq 0 ] ipvsadm -C } @test "given a simple model and a changeset, when i apply with missing allowed actions, it must fail (no update destination allowed)" { ipvsadm -C $IPVSCTL apply -f $ASSSD run $IPVSCTL apply --allowed-actions=as,ad,ds,dd -f fixtures/changeset-destination-1.yaml [ "$status" -ne 0 ] ipvsadm -C } @test "given a simple model and a changeset, when i apply with necessary allowed actions, it must pass (update destination allowed)" { ipvsadm -C $IPVSCTL apply -f $ASSSD run $IPVSCTL apply --allowed-actions=us,ud -f fixtures/changeset-destination-1.yaml [ "$status" -eq 0 ] ipvsadm -C } @test "given a simple model and a changeset, when i apply with missing allowed actions, it must fail (no add destination allowed)" { ipvsadm -C $IPVSCTL apply -f $ASSSD run $IPVSCTL apply --allowed-actions=as,us,ds,ud,dd -f fixtures/changeset-destination-2.yaml [ "$status" -ne 0 ] ipvsadm -C } @test "given a simple model and a changeset, when i apply with necessary allowed actions, it must pass (add destination allowed)" { ipvsadm -C $IPVSCTL apply -f $ASSSD run $IPVSCTL apply --allowed-actions=us,ud,ad -f fixtures/changeset-destination-2.yaml [ "$status" -eq 0 ] ipvsadm -C } @test "given a simple model and a changeset, when i apply with missing allowed actions, it must fail (no delete destination allowed)" { ipvsadm -C $IPVSCTL apply -f $ASSSD run $IPVSCTL apply --allowed-actions=as,us,ds,ud -f fixtures/changeset-destination-3.yaml [ "$status" -ne 0 ] ipvsadm -C } @test "given a simple model and a changeset, when i apply with necessary allowed actions, it must pass (delete destination allowed)" { ipvsadm -C $IPVSCTL apply -f $ASSSD run $IPVSCTL apply --allowed-actions=us,dd -f fixtures/changeset-destination-3.yaml [ "$status" -eq 0 ] ipvsadm -C }
#!/bin/bash if [[ $# -lt 1 || "${1}" == -h || "${1}" == --help ]]; then echo 'Usage: build.bash <container_type...>' echo '' echo ' Arguments' echo ' - container_type : The OS to be installed. One of [ubuntu|centos|fedora]' else containers="$(find . -type d -mindepth 1 | tr '\n' ' ')" containers="${containers//\.\//}" for next_container in "${@}"; do if [[ "${containers}" == *"${next_container}"* ]]; then [[ -d "${next_container}/" ]] || echo -e "${RED}Unable to find directory: ${next_container}/${NC}" echo '' echo -e "${PURPLE}Building ${BLUE}[${next_container}] ... ${NC}" echo '' [[ -d "${next_container}/" ]] && docker build -t "yorevs/hhs-${next_container}" "${next_container}/" else echo "${RED}Invalid container type: \"${next_container}\". Please use one of [${containers}] ! ${NC}" fi done fi
<reponame>BotTech/scala-hedgehog-spines package com.lightbend.hedgehog.generators import com.lightbend.hedgehog.generators.CharGenerators._ import com.lightbend.hedgehog.implicits.RangeImplicits._ import hedgehog.{Gen, Range} import org.scalactic.TripleEquals._ import CollectionGenerators._ object StringGenerators { // We have to limit the string to approximately 1,500 characters otherwise it overflows the stack, // especially when debugging. // See https://github.com/hedgehogqa/scala-hedgehog/issues/47. private[generators] val MaxStringLength = 1500 // 99 is chosen to guarantee that as Size(1) we get an empty string. // It could be bigger but generating Strings is slow. private[generators] val ReasonableStringLength = 99 /** * Generates a String of characters from the given generator. * <p> * Negative values in the `range` are treated as a length of 0. * </p> * * @param gen generator for the characters in the String * @param range the range for the length of the String * @return A generator of Strings. */ def genString(gen: Gen[Char], range: Range[Int]): Gen[String] = genList(gen, maxRange(range)).map(_.mkString) /** * Generates a String of characters from the given generator by concatenating the String's together. * <p> * Negative values in the `range` are treated as a length of 0. * </p> * * @param gen generator for the characters in the String * @param range the range for the length of the String * @return A generator of Strings. */ def genConcatenatedString(gen: Gen[String], range: Range[Int]): Gen[String] = genJoined[Char, Iterable, Iterable](gen.map(_.toIterable), maxRange(range)).map(_.iterator.mkString) private def maxRange(range: Range[Int]): Range[Int] = range.clamp(0, MaxStringLength) /** * Generates a String of Unicode characters. * <p> * Negative values in the `range` are treated as a length of 0. * </p> * * @param range the range for the length of the String * @return A generator of Unicode Strings. */ def genUnicodeString(range: Range[Int]): Gen[String] = genString(Gen.unicode, range) /** * Generates a String of Unicode characters. * <p> * The length of the String scales linearly with the size of the generator. * </p> * * @return A generator of Unicode Strings. */ def genAnyUnicodeString: Gen[String] = genUnicodeString(Range.linear(0, ReasonableStringLength)) /** * Generates a non-empty String of Unicode characters. * <p> * The length of the String scales linearly with the size of the generator. * </p> * * @return A generator of non-empty Unicode Strings. */ def genAnyNonEmptyUnicodeString: Gen[String] = genUnicodeString(Range.linear(1, ReasonableStringLength)) /** * Generates a String which contains only whitespace characters. * <p> * Negative values in the `range` are treated as a length of 0. * </p> * * @param range the range for the length of the String * @return A generator of whitespace Strings. */ def genWhitespaceString(range: Range[Int]): Gen[String] = genString(genWhitespaceChar, range) /** * Generates a String which is either empty or contains only blank characters. * <p> * Negative values in the `range` are treated as a length of 0. * </p> * * @param range the range for the length of the String * @return A generator of blank Strings. */ def genBlankString(range: Range[Int]): Gen[String] = genString(genBlankChar, range) /** * Generates a String which is either empty or contains at least one non-whitespace character. * <p> * Negative values in the `range` are treated as a length of 0. * </p> * * @param range the range for the length of the String * @return A generator of non-whitespace Strings. */ def genNonWhitespaceString(range: Range[Int]): Gen[String] = genMaybeEmpty(genNonWhitespaceBlankString(genNonWhitespaceChar, _), range) /** * Generates a String which contains at least one non-blank character. * <p> * Non-positive values in the `range` are treated as a length of 1. * </p> * * @param range the range for the length of the String * @return A generator of non-blank Strings. */ def genNonBlankString(range: Range[Int]): Gen[String] = genNonWhitespaceBlankString(genNonBlankChar, range) private def genNonWhitespaceBlankString(gen: Gen[Char], range: Range[Int]): Gen[String] = genStringWithIndel(genUnicodeString(range), gen, 1) // TODO: Add test and docs. def genStringWithIndel(base: Gen[String], insert: Gen[Char], min: Int): Gen[String] = for { str <- base pos <- Gen.int(Range.linear(1, str.length)) toInsert <- genString(insert, Range.linear(min, math.max(min, str.length - pos + 1))) } yield { val prefix = str.take(pos - 1) val result = prefix + toInsert + str.drop(prefix.length).drop(toInsert.length) assert(result.length === str.length || result.length === 1 && str.length === 0 && min === 1) result } /** * Generates a String which contains at least one leading or trailing whitespace character. * <p> * Non-positive values in the `range` are treated as a length of 1. * </p> * * @param range the range for the length of the String * @return A generator of non-trimmed Strings. */ def genNotTrimmedString(range: Range[Int]): Gen[String] = for { length <- Gen.int(range.clamp(1, MaxStringLength)) leading <- genWhitespaceString(Range.linear(0, length)) trailingMin = if (leading.length === 0) 1 else 0 trailing <- genWhitespaceString(Range.linear(trailingMin, length - leading.length)) middle <- genNonWhitespaceString(Range.singleton(length - leading.length - trailing.length)) } yield s"$leading$middle$trailing" /** * Generates a String which does not contain any leading or trailing whitespace characters. * <p> * Negative values in the `range` are treated as a length of 0. * </p> * * @param range the range for the length of the String * @return A generator of trimmed Strings. */ def genTrimmedString(range: Range[Int]): Gen[String] = for { str <- genUnicodeString(range) first <- genNonWhitespaceChar.map(_.toString) last <- genNonWhitespaceChar.map(_.toString) } yield { if (str.isEmpty) str else if (str.length === 1) first else first + str.tail.init + last } /** * Generates a String which empty or contains at least one leading or trailing whitespace character. * <p> * Negative values in the `range` are treated as a length of 0. * </p> * * @param range the range for the length of the String * @return A generator of nonsense Strings. */ def genNonsenseString(range: Range[Int]): Gen[String] = genMaybeEmpty(genNotTrimmedString, range) /** * Generates a String which is not empty and does not contain any leading or trailing whitespace characters. * <p> * Non-positive values in the `range` are treated as a length of 1. * </p> * * @param range the range for the length of the String * @return A generator of sensible Strings. */ def genSensibleString(range: Range[Int]): Gen[String] = genTrimmedString(range.clampMin(1)) // TODO: Test and docs. def genMaybeEmpty(gen: Range[Int] => Gen[String], range: Range[Int]): Gen[String] = Gen.sized { size => val (min, max) = if (range.linear) { val (x, y) = range.bounds(size) if (x <= y) (x, y) else (y, x) } else (range.min, range.max) if (min > 0) gen(range) else if (max <= 0) Gen.constant("") else gen(range).option.map(_.getOrElse("")) } }
#!/bin/bash # Switch to script directory cd `dirname -- "$0"` # Specify paper/hyperparameters if [ -z "$1" ]; then echo "Please enter paper, e.g. ./run nature" echo "Atari Choices: nature|doubleq|duel|prioritised|priorduel|persistent|bootstrap|recurrent|async-nstep|async-a3c" echo "Catch Choices: demo|demo-async|demo-async-a3c" echo "Example Choices: demo-grid" exit 0 else PAPER=$1 shift fi # Specify game if ! [[ "$PAPER" =~ demo ]]; then if [ -z "$1" ]; then echo "Please enter game, e.g. ./run nature breakout" exit 0 else GAME=$1 shift fi fi if [[ "$PAPER" =~ async ]]; then echo "Async mode specified, setting OpenMP threads to 1" export OMP_NUM_THREADS=1 fi if [ "$PAPER" == "demo" ]; then # Catch demo th main.lua -gpu 0 -zoom 4 -hiddenSize 32 -optimiser adam -steps 500000 -learnStart 50000 -tau 4 -memSize 50000 -epsilonSteps 10000 -valFreq 10000 -valSteps 6000 -bootstraps 0 -memPriority rank -PALpha 0 "$@" elif [ "$PAPER" == "nature" ]; then # Nature th main.lua -env rlenvs.Atari -modelBody models.Atari -game $GAME -cudnn true -height 84 -width 84 -colorSpace y -duel false -bootstraps 0 -epsilonEnd 0.1 -tau 10000 -doubleQ false -PALpha 0 -eta 0.00025 -gradClip 0 "$@" elif [ "$PAPER" == "doubleq" ]; then # Double-Q (tuned) th main.lua -env rlenvs.Atari -modelBody models.Atari -game $GAME -cudnn true -height 84 -width 84 -colorSpace y -duel false -bootstraps 0 -PALpha 0 -eta 0.00025 -gradClip 0 "$@" elif [ "$PAPER" == "duel" ]; then # Duel (eta is apparently lower but not specified in paper) # Note from Tom Schaul: Tuned DDQN hyperparameters are used th main.lua -env rlenvs.Atari -modelBody models.Atari -game $GAME -cudnn true -height 84 -width 84 -colorSpace y -bootstraps 0 -PALpha 0 -eta 0.00025 "$@" elif [ "$PAPER" == "prioritised" ]; then # Prioritised (rank-based) th main.lua -env rlenvs.Atari -modelBody models.Atari -game $GAME -cudnn true -height 84 -width 84 -colorSpace y -duel false -bootstraps 0 -memPriority rank -alpha 0.7 -betaZero 0.5 -PALpha 0 -gradClip 0 "$@" elif [ "$PAPER" == "priorduel" ]; then # Duel with rank-based prioritised experience replay (in duel paper) th main.lua -env rlenvs.Atari -modelBody models.Atari -game $GAME -cudnn true -height 84 -width 84 -colorSpace y -bootstraps 0 -memPriority rank -alpha 0.7 -betaZero 0.5 -PALpha 0 "$@" elif [ "$PAPER" == "persistent" ]; then # Persistent th main.lua -env rlenvs.Atari -modelBody models.Atari -game $GAME -cudnn true -height 84 -width 84 -colorSpace y -duel false -bootstraps 0 -epsilonEnd 0.1 -tau 10000 -doubleQ false -eta 0.00025 -gradClip 0 "$@" elif [ "$PAPER" == "bootstrap" ]; then # Bootstrap th main.lua -env rlenvs.Atari -modelBody models.Atari -game $GAME -cudnn true -height 84 -width 84 -colorSpace y -duel false -tau 10000 -PALpha 0 -eta 0.00025 -gradClip 0 "$@" elif [ "$PAPER" == "recurrent" ]; then # Recurrent (note that evaluation methodology is different) th main.lua -env rlenvs.Atari -modelBody models.Atari -game $GAME -cudnn true -height 84 -width 84 -colorSpace y -histLen 10 -duel false -bootstraps 0 -recurrent true -memSize 400000 -memSampleFreq 1 -epsilonEnd 0.1 -tau 10000 -doubleQ false -PALpha 0 -optimiser adadelta -eta 0.1 "$@" # Async modes elif [ "$PAPER" == "demo-async" ]; then # N-Step Q-learning Catch demo th main.lua -zoom 4 -async NStepQ -eta 0.00025 -momentum 0.99 -bootstraps 0 -batchSize 5 -hiddenSize 32 -doubleQ false -duel false -optimiser adam -steps 15000000 -tau 4 -memSize 20000 -epsilonSteps 10000 -valFreq 10000 -valSteps 6000 -bootstraps 0 -PALpha 0 "$@" elif [ "$PAPER" == "demo-async-a3c" ]; then # A3C Catch demo th main.lua -zoom 4 -async A3C -eta 0.0007 -momentum 0.99 -bootstraps 0 -batchSize 5 -hiddenSize 32 -doubleQ false -duel false -optimiser adam -steps 15000000 -tau 4 -memSize 20000 -epsilonSteps 10000 -valFreq 10000 -valSteps 6000 -bootstraps 0 -PALpha 0 -entropyBeta 0 "$@" elif [ "$PAPER" == "async-nstep" ]; then # Steps for "1 day" = 80 * 1e6; for "4 days" = 1e9 th main.lua -env rlenvs.Atari -modelBody models.Atari2013 -hiddenSize 256 -game $GAME -height 84 -width 84 -colorSpace y -async NStepQ -bootstraps 0 -batchSize 5 -momentum 0.99 -rmsEpsilon 0.1 -steps 80000000 -duel false -tau 40000 -optimiser sharedRmsProp -epsilonSteps 4000000 -doubleQ false -PALpha 0 -eta 0.0007 -gradClip 0 "$@" elif [ "$PAPER" == "async-a3c" ]; then th main.lua -env rlenvs.Atari -modelBody models.Atari2013 -hiddenSize 256 -game $GAME -height 84 -width 84 -colorSpace y -async A3C -bootstraps 0 -batchSize 5 -momentum 0.99 -rmsEpsilon 0.1 -steps 80000000 -duel false -tau 40000 -optimiser sharedRmsProp -epsilonSteps 4000000 -doubleQ false -PALpha 0 -eta 0.0007 -gradClip 0 "$@" # Examples elif [ "$PAPER" == "demo-grid" ]; then # GridWorld th main.lua -env examples/GridWorldVis -modelBody examples/GridWorldNet -histLen 1 -async A3C -zoom 4 -hiddenSize 32 -optimiser adam -steps 400000 -tau 4 -memSize 20000 -valFreq 10000 -valSteps 6000 -doubleQ false -duel false -bootstraps 0 -PALpha 0 "$@" else echo "Invalid options" fi
/* * Copyright (c) 2018, CiBO Technologies, Inc. * All rights reserved. * * Redistribution and use in source and binary forms, with or without modification, * are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * 3. Neither the name of the copyright holder nor the names of its contributors * may be used to endorse or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package com.cibo.evilplot.demo import com.cibo.evilplot.colors._ import com.cibo.evilplot.geometry._ import com.cibo.evilplot.numeric._ import com.cibo.evilplot.{geometry, plot} import com.cibo.evilplot.plot._ import com.cibo.evilplot.plot.aesthetics.DefaultTheme.{DefaultFonts, DefaultTheme} import com.cibo.evilplot.plot.aesthetics.Theme import com.cibo.evilplot.plot.components.{Legend, Marker, Position} import com.cibo.evilplot.plot.renderers._ import scala.util.Random /** A number of examples of Evil Plotting */ object DemoPlots { implicit val theme: Theme = DefaultTheme.copy( fonts = DefaultFonts .copy(tickLabelSize = 14, legendLabelSize = 14, fontFace = "'Lato', sans-serif") ) val plotAreaSize: Extent = Extent(1000, 600) lazy val histogram: Drawable = { val data = (0.0 to 3 by .25) ++ (3.0 to 5 by .05) ++ (5.0 to 8 by 1.0) plot .Histogram(data, 10) .standard() .xbounds(-75, 225) .ybounds(0, 15) .vline(3.5, HTMLNamedColors.blue) .render(plotAreaSize) } lazy val legendFeatures: Drawable = { val allYears = (2007 to 2013).toVector val data = Seq.fill(150)( Point3d(Random.nextDouble(), Random.nextDouble(), allYears(Random.nextInt(allYears.length)))) val customCategoricalLegend = Legend( Position.Right, LegendContext( Seq(Rect(10), Rect(20), Rect(30)), Seq(Text("one"), Text("two"), Text("three")) ), LegendRenderer.vertical(), x = 0, y = 0.3 ) val customGradientLegend = Legend( Position.Top, LegendContext( theme.colors.stream.slice(1, 4).map(Rect(10) filled _), Seq(Text("one"), Text("two"), Text("three")), LegendStyle.Gradient ), LegendRenderer.vertical(), x = 0.5, y = 0 ) ScatterPlot( data = data, pointRenderer = Some(PointRenderer.colorByCategory(data, { x: Point3d[Int] => x.z })) ).standard() .overlayLegend(x = 0.95, y = 0.8) .component(customCategoricalLegend) .component(customGradientLegend) .bottomLegend(labels = Some(allYears.map(_ + " AD"))) .render(plotAreaSize) } lazy val barChart: Drawable = { val percentChange = Seq[Double](-10, 5, 12, 68, -22) val labels = Seq("one", "two", "three", "four", "five") def labeledByColor(implicit theme: Theme) = new BarRenderer { def render(plot: Plot, extent: Extent, category: Bar): Drawable = { val rect = Rect(extent) val value = category.values.head val positive = HEX("#4c78a8") val negative = HEX("#e45756") val color = if (value >= 0) positive else negative Align .center( rect filled color, Text(s"$value%", fontFace = theme.fonts.fontFace, size = 20).filled(theme.colors.label)) .group } } BarChart .custom(percentChange.map(Bar.apply), spacing = Some(20), barRenderer = Some(labeledByColor)) .standard(xLabels = labels) .hline(0) .render(plotAreaSize) } lazy val axesTesting: Drawable = { val points = Seq(Point(1, 1), Point(1.5, 1.1), Point(2.5, 1.5), Point(2.9, 2.5), Point(3, 3)) val filler = Seq("Lorem", "ipsum", "dolor", "sit", "amet", "consectetur") LinePlot(points) // Note discrete axes are still "banded/boxed" such that ticks don't point to values, but to the center of a // band for that value .discreteAxis( Seq("foo", "bar", "baz"), Seq(1d, 2, 10), Position.Bottom, updatePlotBounds = false, align = 0) .discreteAxis( filler, filler.indices.map(_.toDouble), Position.Right, updatePlotBounds = false, align = 0.5) .continuousAxis( plot => plot.xbounds, Position.Top, tickRenderer = Some( TickRenderer.axisTickRenderer( Position.Top, rotateText = 315 ))) .continuousAxis(_ => Bounds(0, 100000), Position.Left, updatePlotBounds = false) .xGrid() .yGrid() .frame() .render(Extent(400, 300)) } lazy val clusteredBarChart: Drawable = { val data = Seq[Seq[Double]]( Seq(1, 2, 3), Seq(4, 5, 6), Seq(3, 4, 1), Seq(2, 3, 4) ) BarChart .clustered( data, labels = Seq("one", "two", "three"), clusterSpacing = Some(25) ) .title("Clustered Bar Chart Demo") .xAxis(Seq("a", "b", "c", "d")) .yAxis() .frame() .bottomLegend() .render(plotAreaSize) } lazy val stackedBarChart: Drawable = { val data = Seq[Seq[Double]]( Seq(1, 2, 3), Seq(4, 5, 6), Seq(3, 4, 1), Seq(2, 3, 4) ) BarChart .stacked( data, labels = Seq("one", "two", "three") ) .title("Stacked Bar Chart Demo") .xAxis(Seq("a", "b", "c", "d")) .yAxis() .frame() .bottomLegend() .render(plotAreaSize) } lazy val clusteredStackedBarChart: Drawable = { val data = Seq[Seq[Seq[Double]]]( Seq(Seq(1, 2, 3), Seq(4, 5, 6)), Seq(Seq(3, 4, 1), Seq(2, 3, 4)) ) BarChart .clusteredStacked( data, labels = Seq("one", "two", "three"), clusterSpacing = Some(25) ) .title("Clustered Stacked Bar Chart Demo") .standard(Seq("Category 1", "Category 2")) .xLabel("Category") .yLabel("Level") .rightLegend() .render(plotAreaSize) } lazy val functionPlot: Drawable = { val Seq(one, two, three) = theme.colors.stream.take(3) Overlay( FunctionPlot.series(x => x * x, "y = x\u00B2", one, xbounds = Some(Bounds(-1, 1))), FunctionPlot.series(x => math.pow(x, 3), "y = x\u00B3", two, xbounds = Some(Bounds(-1, 1))), FunctionPlot.series(x => math.pow(x, 4), "y = x\u2074", three, xbounds = Some(Bounds(-1, 1))) ).title("A bunch of polynomials.") .overlayLegend() .standard() .render(plotAreaSize) } lazy val boxPlot: Drawable = { val data = Seq.fill(10)(Seq.fill(Random.nextInt(30))(Random.nextDouble())) val series = Seq.fill(10)(Random.nextInt(2)) BoxPlot(data, boxRenderer = Some(BoxRenderer.colorBy(series).withMeanLine())) .standard(xLabels = (1 to 10).map(_.toString)) .rightLegend() .render(plotAreaSize) } lazy val clusteredBoxPlot: Drawable = { val data = Seq.fill(3)(Seq.fill(3)(Seq.fill(Random.nextInt(30))(Random.nextDouble()))) val series = Seq.fill(3)(Seq(0, 1, 2)).flatten BoxPlot .clustered( data, boxRenderer = Some(BoxRenderer.colorBy(series)), spacing = Some(10.0), clusterSpacing = Some(60.0) ) .standard(xLabels = (1 to 3).map(_.toString)) .rightLegend() .render(plotAreaSize) } case class Record(value: Double) lazy val simpleGroupedPlot: Drawable = { val continuousData = Seq.fill(60)(Record(Math.random() * 100)) val colorBar = ScaledColorBar(ColorGradients.magma, 0, 100) val gradientFn = GradientUtils.multiGradient(ColorGradients.magma, 0, 100, GradientMode.Linear) val groupPlot = BinnedPlot.continuous[Record]( continuousData, _.continuousBins(_.value, numBins = 30), legendContext = LegendContext.continuousGradientFromColorBar(colorBar) )( _.histogram(Some(ContinuousBinRenderer.custom({ case (context, bin) => val extent = context.extent Rect(extent.width, extent.height).filled(gradientFn(bin.bounds.midpoint)) }))) ) groupPlot .standard() .xLabel("Value") .rightLegend() .render(plotAreaSize) } lazy val simpleContinuousPlot: Drawable = { val continuousData = Seq.fill(60)(Math.random() * 100) val histogramPlot = BinnedPlot.continuous[Double]( // creates a histogram continuousData, _.continuousBins(identity) )(_.histogram()) histogramPlot.standard() .xLabel("x") .yLabel("y") .rightLegend() .render(plotAreaSize) } lazy val simpleCartesianPlot: Drawable = { val points = Seq.fill(150)(Point(Random.nextDouble() * 2, Random.nextDouble())) :+ Point( 0.0, 0.0) :+ Point(1.0, 0.0) :+ Point(0.0, 1.0) :+ Point(1.0, 1.0) val pointData = points.sortBy(_.x).map(thing => Point3d(thing.x, thing.y, Math.random())) CartesianPlot(pointData)( // creates a scatter plot _.scatter({ pt: Point3d[Double] => if (pt.z > 0.6) { Text("\uD83D\uDC10", size = 20).translate(-10, -10) } else { Style(Disc.centered(2), fill = RGB.random) } }) ).standard() .xLabel("x") .yLabel("y") .trend(1, 0) .rightLegend() .render(plotAreaSize) } lazy val scatterPlot: Drawable = { val points = Seq.fill(150)( Point3d(Random.nextDouble(), Random.nextDouble(), Random.nextDouble())) :+ Point3d( 0.0, 0.0, Random.nextDouble()) ScatterPlot( points, pointRenderer = Some(PointRenderer.depthColor[Point3d[Double]]( x => x.z, points.map(_.z).min, points.map(_.z).max, Some(ContinuousColoring .gradient3(HTMLNamedColors.green, HTMLNamedColors.yellow, HTMLNamedColors.red)), None )) ).standard() .xLabel("x") .yLabel("y") .trend(1, 0) .rightLegend() .render(plotAreaSize) } lazy val marginalHistogram: Drawable = { import com.cibo.evilplot.plot._ import com.cibo.evilplot.plot.renderers._ // Make up some data... val allYears = (2007 to 2013).toVector val data = Seq.fill(150)( Point3d(Random.nextDouble(), Random.nextDouble(), allYears(Random.nextInt(allYears.length)))) val xhist = Histogram(data.map(_.x), bins = 50) val yhist = Histogram(data.map(_.y), bins = 40) ScatterPlot( data = data, pointRenderer = Some(PointRenderer.colorByCategory(data, { x: Point3d[Int] => x.z })) ).topPlot(xhist) .rightPlot(yhist) .standard() .xLabel("x") .yLabel("y") .trend(1, 0, color = RGB(45, 45, 45), lineStyle = LineStyle.DashDot) .overlayLegend(x = 0.95, y = 0.8) .render(plotAreaSize) } lazy val pieChart: Drawable = { val data = Seq("one" -> 1.5, "two" -> 3.5, "three" -> 2.0) PieChart(data).rightLegend().render(plotAreaSize) } lazy val contourPlot: Drawable = { import com.cibo.evilplot.plot._ import scala.util.Random val data = Seq.fill(100)(Point(Random.nextDouble() * 20, Random.nextDouble() * 20)) ContourPlot(data) .standard() .xbounds(0, 20) .ybounds(0, 20) .render(plotAreaSize) } lazy val linePlot: Drawable = { val data = (0 to 5) .map(_.toDouble) .zip( Seq( 0.0, 0.1, 0.0, 0.1, 0.0, 0.1 )) .map(Point.tupled) LinePlot( data ).ybounds(0, .12) .yAxis() .xGrid() .yGrid() .frame() .render(plotAreaSize) } lazy val heatmap: Drawable = { val data = Seq[Seq[Double]]( Seq(1, 2, 3, 4), Seq(5, 6, 7, 8), Seq(9, 8, 7, 6) ) val coloring = ContinuousColoring.gradient3( HTMLNamedColors.dodgerBlue, HTMLNamedColors.crimson, HTMLNamedColors.dodgerBlue) Heatmap(data, Some(coloring)) .title("Heatmap Demo") .xAxis() .yAxis() .rightLegend() .render(plotAreaSize) } lazy val facetedPlot: Drawable = { val years = 2007 to 2013 val datas: Seq[Seq[Point]] = years.map(_ => Seq.fill(Random.nextInt(20))(Point(Random.nextDouble(), Random.nextDouble()))) val plot1 = Overlay(ScatterPlot.series(datas(0), "2010", HTMLNamedColors.red)) val plot2 = Overlay( ScatterPlot.series(datas(0), "2010", HTMLNamedColors.red), ScatterPlot.series(datas(1), "2011", HTMLNamedColors.blue) ) val plot3 = Overlay( ScatterPlot.series(datas(0), "2010", HTMLNamedColors.red), ScatterPlot.series(datas(1), "2011", HTMLNamedColors.blue), ScatterPlot.series(datas(2), "2012", HTMLNamedColors.green), ScatterPlot.series(datas(3), "2013", HTMLNamedColors.teal) ) val plot4 = Overlay( ScatterPlot.series(datas(0), "2010", HTMLNamedColors.red) ) Facets(Seq(Seq(plot1, plot2), Seq(plot3, plot4))) .standard() .xLabel("x") .yLabel("y") .trend(1.0, 0) .topLabels(Seq("A", "B")) .hline(0.6) .vline(0.6) .title("Facet Demo") .rightLegend() .rightLabels(Seq("before", "after")) .render(Extent(600, 400)) } lazy val crazyPlot: Drawable = { import com.cibo.evilplot.plot._ import com.cibo.evilplot.plot.renderers._ // Make up some data... val allYears = (2007 to 2013).toVector val data = Seq.fill(150)( Point3d( 6 * Random.nextDouble(), Random.nextDouble(), allYears(Random.nextInt(allYears.length)))) val xhist = Histogram(data.map(_.x), bins = 50) val yhist = Histogram(data.map(_.y), bins = 40) val plot = ScatterPlot( data = data, pointRenderer = Some(PointRenderer.colorByCategory(data, { x: Point3d[Int] => x.z })) ).xAxis() .topPlot(xhist) .rightPlot(yhist) .rightLegend() .overlayLegend(x = 0.95, y = 0.8) .bottomLegend() val lines = Overlay( LinePlot.series(Seq(Point(2000, 0), Point(10000, 10000)), "one", HTMLNamedColors.red), LinePlot.series(Seq(Point(2000, 10000), Point(10000, 0)), "two", HTMLNamedColors.blue) ).xbounds(0, 10000).ybounds(0, 10000).overlayLegend(0.75, 0.5) val plot2 = Histogram(Seq(1, 1, 1.5, 1, 5, 3, 2, 5, 7, 8, 9, 10), bins = 8) .ybounds(0, 10) .xbounds(0, 10) .yAxis() val plot3 = BarChart.custom( bars = Seq( Bar(Seq(0.5, 0.8), 0, Color.stream), Bar(Seq(0.2, 0.7), 0, Color.stream), Bar(Seq(0.4, 0.9), 1, Color.stream.drop(5)), Bar(Seq(0.1, 0.3), 1, Color.stream.drop(5)) ), barRenderer = Some(BarRenderer.stacked()) ) val plot4 = plot3.overlay(plot2) val plot5 = plot3.xAxis(Seq("one", "two", "four", "six"), Seq(1.0, 2.0, 4.0, 6.0)).yAxis() val facets = Facets( Seq( Seq( plot .vline(3.2) .hline(.6) .function( d => math.pow(d, 3), color = HTMLNamedColors.dodgerBlue, lineStyle = LineStyle.Dotted), plot5), Seq(lines, plot4) ) ).title("Actual vs. Expected") .xLabel("Actual") .yLabel("Expected") .rightLabels(Seq("row one", "row two")) .rightLegend() .bottomLegend() .rightLabel((e: Extent) => Rect(e) filled HTMLNamedColors.blue, 10) .leftLabel((e: Extent) => Rect(e) filled HTMLNamedColors.green, 10) .topLabel((e: Extent) => Rect(e) filled HTMLNamedColors.red, 10) .bottomLabel((e: Extent) => Rect(e) filled HTMLNamedColors.yellow, 10) .standard() facets.render(plotAreaSize) } lazy val markerPlot: Drawable = { val Seq(one, two, three) = theme.colors.stream.take(3) FunctionPlot .series(x => x, "y = x", one, xbounds = Some(Bounds(-1, 1))) .xLabel("x") .yLabel("y") .overlayLegend() .standard() .component(Marker(Position.Overlay, _ => Style(Rect(25), three), Extent(25, 25), 0, 0)) .component( Marker( Position.Overlay, _ => Style(Text(" Square marker at the center", 20), three), Extent(25, 25), 0, -0.1)) .component( Marker(Position.Top, _ => Style(Rotate(Wedge(40, 25), 250), two), Extent(25, 25), 0.7)) .component( Marker(Position.Top, _ => Style(Text(" Up here is a wedge", 20), two), Extent(25, 25), 0.7)) .title("A line graph with markers") .render(plotAreaSize) } def gaussianKernel(u: Double): Double = { 1 / math.sqrt(2 * math.Pi) * math.exp(-0.5d * u * u) } def densityEstimate(data: Seq[Double], bandwidth: Double)(x: Double): Double = { val totalProbDensity = data.map { x_i => gaussianKernel((x - x_i) / bandwidth) }.sum totalProbDensity / (data.length * bandwidth) } lazy val densityPlot: Drawable = { val data = Seq.fill(150)(Random.nextDouble() * 30) val colors = theme.colors.stream.slice(1, 4) val bandwidths = Seq(5d, 2d, 0.5d) val hist = Histogram(data).xbounds(0, 30) val densities = Overlay(colors.zip(bandwidths).map { case (c, b) => FunctionPlot( densityEstimate(data, b), Some(Bounds(0, 30)), Some(500), Some(PathRenderer.default(color = Some(c))) ) }: _*) // Can provide bounds directly //MixedBoundsOverlay(hist.xbounds, hist.ybounds, hist, densities) // Or use the bounds from the first plot MixedBoundsOverlay(hist, densities) .standard() .continuousAxis(_ => densities.ybounds, Position.Right, updatePlotBounds = false) .render(plotAreaSize) } }
<filename>2-resources/3-misc/SandboxSQL-master/SandboxSQL-master/src/js/actions/PartialQueryActions.js<gh_stars>1-10 "use strict"; var AppConstants = require('../constants/AppConstants'); var AppDispatcher = require('../dispatcher/AppDispatcher'); var ActionTypes = AppConstants.ActionTypes; class PartialQueryActions { static addToken(token) { AppDispatcher.handleViewAction({ type: ActionTypes.ADD_TOKEN, token: token, }); } static deleteToken(token) { AppDispatcher.handleViewAction({ type: ActionTypes.DELETE_TOKEN, token: token, }); } static updateInsertIndex(newIndex) { AppDispatcher.handleViewAction({ type: ActionTypes.UPDATE_INSERT_INDEX, newIndex: newIndex }); } static incrementInsertIndex() { AppDispatcher.handleViewAction({ type: ActionTypes.INCREMENT_INSERT_INDEX, }); } static decrementInsertIndex() { AppDispatcher.handleViewAction({ type: ActionTypes.DECREMENT_INSERT_INDEX, }); } static clearTokens() { AppDispatcher.handleViewAction({ type: ActionTypes.CLEAR_TOKENS, }); } } module.exports = PartialQueryActions;
<reponame>mdsd-team-1/photos-metamodeling /** */ package PhotosMetaModel.impl; import PhotosMetaModel.Component; import PhotosMetaModel.Configuration; import PhotosMetaModel.Entity; import PhotosMetaModel.PhotosMetaModelPackage; import PhotosMetaModel.Repository; import PhotosMetaModel.RestController; import PhotosMetaModel.SpringBootApplication; import java.util.Collection; import org.eclipse.emf.common.notify.NotificationChain; import org.eclipse.emf.common.util.EList; import org.eclipse.emf.ecore.EClass; import org.eclipse.emf.ecore.InternalEObject; import org.eclipse.emf.ecore.impl.MinimalEObjectImpl; import org.eclipse.emf.ecore.util.EObjectContainmentEList; import org.eclipse.emf.ecore.util.InternalEList; /** * <!-- begin-user-doc --> * An implementation of the model object '<em><b>Spring Boot Application</b></em>'. * <!-- end-user-doc --> * <p> * The following features are implemented: * </p> * <ul> * <li>{@link PhotosMetaModel.impl.SpringBootApplicationImpl#getRepository <em>Repository</em>}</li> * <li>{@link PhotosMetaModel.impl.SpringBootApplicationImpl#getRestcontroller <em>Restcontroller</em>}</li> * <li>{@link PhotosMetaModel.impl.SpringBootApplicationImpl#getEntity <em>Entity</em>}</li> * <li>{@link PhotosMetaModel.impl.SpringBootApplicationImpl#getComponent <em>Component</em>}</li> * <li>{@link PhotosMetaModel.impl.SpringBootApplicationImpl#getConfiguration <em>Configuration</em>}</li> * </ul> * * @generated */ public class SpringBootApplicationImpl extends MinimalEObjectImpl.Container implements SpringBootApplication { /** * The cached value of the '{@link #getRepository() <em>Repository</em>}' containment reference list. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getRepository() * @generated * @ordered */ protected EList<Repository> repository; /** * The cached value of the '{@link #getRestcontroller() <em>Restcontroller</em>}' containment reference list. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getRestcontroller() * @generated * @ordered */ protected EList<RestController> restcontroller; /** * The cached value of the '{@link #getEntity() <em>Entity</em>}' containment reference list. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getEntity() * @generated * @ordered */ protected EList<Entity> entity; /** * The cached value of the '{@link #getComponent() <em>Component</em>}' containment reference list. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getComponent() * @generated * @ordered */ protected EList<Component> component; /** * The cached value of the '{@link #getConfiguration() <em>Configuration</em>}' containment reference list. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getConfiguration() * @generated * @ordered */ protected EList<Configuration> configuration; /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected SpringBootApplicationImpl() { super(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected EClass eStaticClass() { return PhotosMetaModelPackage.Literals.SPRING_BOOT_APPLICATION; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public EList<Repository> getRepository() { if (repository == null) { repository = new EObjectContainmentEList<Repository>(Repository.class, this, PhotosMetaModelPackage.SPRING_BOOT_APPLICATION__REPOSITORY); } return repository; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public EList<RestController> getRestcontroller() { if (restcontroller == null) { restcontroller = new EObjectContainmentEList<RestController>(RestController.class, this, PhotosMetaModelPackage.SPRING_BOOT_APPLICATION__RESTCONTROLLER); } return restcontroller; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public EList<Entity> getEntity() { if (entity == null) { entity = new EObjectContainmentEList<Entity>(Entity.class, this, PhotosMetaModelPackage.SPRING_BOOT_APPLICATION__ENTITY); } return entity; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public EList<Component> getComponent() { if (component == null) { component = new EObjectContainmentEList<Component>(Component.class, this, PhotosMetaModelPackage.SPRING_BOOT_APPLICATION__COMPONENT); } return component; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public EList<Configuration> getConfiguration() { if (configuration == null) { configuration = new EObjectContainmentEList<Configuration>(Configuration.class, this, PhotosMetaModelPackage.SPRING_BOOT_APPLICATION__CONFIGURATION); } return configuration; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs) { switch (featureID) { case PhotosMetaModelPackage.SPRING_BOOT_APPLICATION__REPOSITORY: return ((InternalEList<?>)getRepository()).basicRemove(otherEnd, msgs); case PhotosMetaModelPackage.SPRING_BOOT_APPLICATION__RESTCONTROLLER: return ((InternalEList<?>)getRestcontroller()).basicRemove(otherEnd, msgs); case PhotosMetaModelPackage.SPRING_BOOT_APPLICATION__ENTITY: return ((InternalEList<?>)getEntity()).basicRemove(otherEnd, msgs); case PhotosMetaModelPackage.SPRING_BOOT_APPLICATION__COMPONENT: return ((InternalEList<?>)getComponent()).basicRemove(otherEnd, msgs); case PhotosMetaModelPackage.SPRING_BOOT_APPLICATION__CONFIGURATION: return ((InternalEList<?>)getConfiguration()).basicRemove(otherEnd, msgs); } return super.eInverseRemove(otherEnd, featureID, msgs); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Object eGet(int featureID, boolean resolve, boolean coreType) { switch (featureID) { case PhotosMetaModelPackage.SPRING_BOOT_APPLICATION__REPOSITORY: return getRepository(); case PhotosMetaModelPackage.SPRING_BOOT_APPLICATION__RESTCONTROLLER: return getRestcontroller(); case PhotosMetaModelPackage.SPRING_BOOT_APPLICATION__ENTITY: return getEntity(); case PhotosMetaModelPackage.SPRING_BOOT_APPLICATION__COMPONENT: return getComponent(); case PhotosMetaModelPackage.SPRING_BOOT_APPLICATION__CONFIGURATION: return getConfiguration(); } return super.eGet(featureID, resolve, coreType); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @SuppressWarnings("unchecked") @Override public void eSet(int featureID, Object newValue) { switch (featureID) { case PhotosMetaModelPackage.SPRING_BOOT_APPLICATION__REPOSITORY: getRepository().clear(); getRepository().addAll((Collection<? extends Repository>)newValue); return; case PhotosMetaModelPackage.SPRING_BOOT_APPLICATION__RESTCONTROLLER: getRestcontroller().clear(); getRestcontroller().addAll((Collection<? extends RestController>)newValue); return; case PhotosMetaModelPackage.SPRING_BOOT_APPLICATION__ENTITY: getEntity().clear(); getEntity().addAll((Collection<? extends Entity>)newValue); return; case PhotosMetaModelPackage.SPRING_BOOT_APPLICATION__COMPONENT: getComponent().clear(); getComponent().addAll((Collection<? extends Component>)newValue); return; case PhotosMetaModelPackage.SPRING_BOOT_APPLICATION__CONFIGURATION: getConfiguration().clear(); getConfiguration().addAll((Collection<? extends Configuration>)newValue); return; } super.eSet(featureID, newValue); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eUnset(int featureID) { switch (featureID) { case PhotosMetaModelPackage.SPRING_BOOT_APPLICATION__REPOSITORY: getRepository().clear(); return; case PhotosMetaModelPackage.SPRING_BOOT_APPLICATION__RESTCONTROLLER: getRestcontroller().clear(); return; case PhotosMetaModelPackage.SPRING_BOOT_APPLICATION__ENTITY: getEntity().clear(); return; case PhotosMetaModelPackage.SPRING_BOOT_APPLICATION__COMPONENT: getComponent().clear(); return; case PhotosMetaModelPackage.SPRING_BOOT_APPLICATION__CONFIGURATION: getConfiguration().clear(); return; } super.eUnset(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public boolean eIsSet(int featureID) { switch (featureID) { case PhotosMetaModelPackage.SPRING_BOOT_APPLICATION__REPOSITORY: return repository != null && !repository.isEmpty(); case PhotosMetaModelPackage.SPRING_BOOT_APPLICATION__RESTCONTROLLER: return restcontroller != null && !restcontroller.isEmpty(); case PhotosMetaModelPackage.SPRING_BOOT_APPLICATION__ENTITY: return entity != null && !entity.isEmpty(); case PhotosMetaModelPackage.SPRING_BOOT_APPLICATION__COMPONENT: return component != null && !component.isEmpty(); case PhotosMetaModelPackage.SPRING_BOOT_APPLICATION__CONFIGURATION: return configuration != null && !configuration.isEmpty(); } return super.eIsSet(featureID); } } //SpringBootApplicationImpl
<filename>backend/test/metric-template.js let session = require('./utils/session') //Require the dev-dependencies let chai = require("chai"); chai.should(); let MetricTemplate = require("../models/template/metric"); describe("Metric Templates", () => { var defaultObject = { name: "test", description: "something" }; beforeEach(done => { MetricTemplate.remove({}, err => { done(); }); }); /* * Test the /GET route */ describe("/GET metrics", () => { it("it should GET all the metrics", done => { session() .then(sess => { sess.get("/api/v1.0/template/metric").end((err, res) => { res.should.have.status(200); res.body.should.be.a("array"); res.body.length.should.be.eql(0); done(); }); }) .catch(err => { console.error(err); done(); }); }); it("it should fail GET non existing metric", done => { session() .then(sess => { sess.get("/api/v1.0/template/metric/5b48a8951c1e49438352cc35").end((err, res) => { res.should.have.status(404); done(); }); }) .catch(err => { console.error(err); done(); }); }); }); /* * Test the /POST route */ describe("/POST metric", () => { it("it should POST one metric", done => { session() .then(sess => { sess .post("/api/v1.0/template/metric") .send(defaultObject) .end((err, res) => { res.should.have.status(200); res.body.should.be.a("object"); res.body.should.have.property("name"); res.body.name.should.be.eql("test"); res.body.should.have.property("description"); res.body.description.should.be.eql("something"); res.body.should.have.property("_id"); done(); }); }) .catch(err => { console.error(err); done(); }); }); it("it should fail POST empty metric", done => { session() .then(sess => { sess .post("/api/v1.0/template/metric") .send({ }) .end((err, res) => { res.should.have.status(400); done(); }); }) .catch(err => { console.error(err); done(); }); }); it("it should POST one metric and find this with /", done => { session() .then(sess => { sess .post("/api/v1.0/template/metric") .send(defaultObject) .end((err, res) => { res.should.have.status(200); sess.get("/api/v1.0/template/metric").end((err, res) => { res.should.have.status(200); res.body.should.be.a("array"); res.body.length.should.be.eql(1); res.body[0].should.have.property("name"); res.body[0].should.have.property("description"); res.body[0].name.should.be.eql("test"); res.body[0].description.should.be.eql("something"); done(); }); }); }) .catch(err => { console.error(err); done(); }); }); it("it should POST one metric and find this with /:id", done => { session() .then(sess => { sess .post("/api/v1.0/template/metric") .send(defaultObject) .end((err, res) => { res.should.have.status(200); sess.get(`/api/v1.0/template/metric/${res.body._id}`).end((err, res) => { res.should.have.status(200); res.body.should.be.a("object"); res.body.should.have.property("name"); res.body.should.have.property("description"); res.body.name.should.be.eql("test"); res.body.description.should.be.eql("something"); done(); }); }); }) .catch(err => { console.error(err); done(); }); }); }); /* * Test the /PUT route */ describe("/PUT metric", () => { it("it should fail PUT non existing metric", done => { session() .then(sess => { sess .put("/api/v1.0/template/metric/5b48a8951c1e49438352cc35") .send(defaultObject) .end((err, res) => { res.should.have.status(404); done(); }); }) .catch(err => { console.error(err); done(); }); }); it("it should POST and PUT one the metric", done => { session() .then(sess => { sess .post("/api/v1.0/template/metric") .send(defaultObject) .end((err, res) => { res.should.have.status(200); sess .put(`/api/v1.0/template/metric/${res.body._id}`) .send({ name: "test2", description: "something2" }) .end((err, res) => { res.should.have.status(200); sess.get(`/api/v1.0/template/metric/${res.body._id}`).end((err, res) => { res.body.should.be.a("object"); res.body.should.have.property("name"); res.body.should.have.property("description"); res.body.should.have.property("_id"); res.body.name.should.be.eql("test2"); res.body.description.should.be.eql("something2"); done(); }); }); }); }) .catch(err => { console.error(err); done(); }); }); it("it should fail POST and PUT with empty metric", done => { session() .then(sess => { sess .post("/api/v1.0/template/metric") .send(defaultObject) .end((err, res) => { res.should.have.status(200); sess .put(`/api/v1.0/template/metric/${res.body._id}`) .send({}) .end((err, res) => { res.should.have.status(400); done(); }); }); }) .catch(err => { console.error(err); done(); }); }); }); /* * Test the /DELETE route */ describe("/DELETE metric", () => { it("it should fail DELETE non existing metric", done => { session() .then(sess => { sess .delete("/api/v1.0/template/metric/5b48a8951c1e49438352cc35") .end((err, res) => { res.should.have.status(404); done(); }); }) .catch(err => { console.error(err); done(); }); }); it("it should POST and DELETE one metric", done => { session() .then(sess => { sess .post("/api/v1.0/template/metric") .send(defaultObject) .end((err, res) => { res.should.have.status(200); sess .delete(`/api/v1.0/template/metric/${res.body._id}`) .end((err, res) => { res.should.have.status(200); sess.get(`/api/v1.0/template/metric/${res.body._id}`).end((err, res) => { res.should.have.status(404); done(); }); }); }); }) .catch(err => { console.error(err); done(); }); }); }); /* * Test the /DELETE route */ describe("/DELETE, /PUT and /POST should not allowed for Viewer", () => { it("it should fail DELETE", done => { session(true) .then(sess => { sess .delete("/api/v1.0/template/metric/5b48a8951c1e49438352cc35") .end((err, res) => { res.should.have.status(403); done(); }); }) .catch(err => { console.error(err); done(); }); }); it("it should fail PUT", done => { session(true) .then(sess => { sess .put("/api/v1.0/template/metric/5b48a8951c1e49438352cc35") .send(defaultObject) .end((err, res) => { res.should.have.status(403); done(); }); }) .catch(err => { console.error(err); done(); }); }); it("it should fail POST", done => { session(true) .then(sess => { sess .post("/api/v1.0/template/metric") .send(defaultObject) .end((err, res) => { res.should.have.status(403); done(); }); }) .catch(err => { console.error(err); done(); }); }); it("it should GET list", done => { session() .then(sess => { sess .post("/api/v1.0/template/metric") .send(defaultObject) .end((err, res) => { res.should.have.status(200); session(true) .then(sess => { sess.get("/api/v1.0/template/metric/").end((err, res) => { res.should.have.status(200); res.body.should.be.a("array"); res.body.length.should.be.eql(1); res.body[0].should.have.property("name"); res.body[0].should.have.property("description"); res.body[0].should.have.property("_id"); res.body[0].name.should.be.eql(defaultObject.name); res.body[0].description.should.be.eql(defaultObject.description); done(); }); }) .catch(err => { console.error(err); done(); }); }); }) .catch(err => { console.error(err); done(); }); }); it("it should GET entry", done => { session() .then(sess => { sess .post("/api/v1.0/template/metric") .send(defaultObject) .end((err, res) => { res.should.have.status(200); session(true) .then(sess => { sess.get("/api/v1.0/template/metric/" + res.body._id).end((err, res) => { res.should.have.status(200); res.body.should.be.a("object"); res.body.should.have.property("name"); res.body.should.have.property("description"); res.body.should.have.property("_id"); res.body.name.should.be.eql(defaultObject.name); res.body.description.should.be.eql(defaultObject.description); done(); }); }) .catch(err => { console.error(err); done(); }); }); }) .catch(err => { console.error(err); done(); }); }); }); });
//====-- UserSettingsController.h --------------------------------*- C++-*-===// // // The LLVM Compiler Infrastructure // // This file is distributed under the University of Illinois Open Source // License. See LICENSE.TXT for details. // //===----------------------------------------------------------------------===// #ifndef liblldb_UserSettingsController_h_ #define liblldb_UserSettingsController_h_ // C Includes // C++ Includes #include <string> #include <vector> // Other libraries and framework includes // Project includes #include "lldb/Core/ConstString.h" #include "lldb/Core/Stream.h" #include "lldb/Core/StreamString.h" #include "lldb/Core/StringList.h" #include "lldb/Interpreter/OptionValue.h" #include "lldb/lldb-private.h" namespace lldb_private { class Properties { public: Properties() : m_collection_sp() {} Properties(const lldb::OptionValuePropertiesSP &collection_sp) : m_collection_sp(collection_sp) {} virtual ~Properties() {} virtual lldb::OptionValuePropertiesSP GetValueProperties() const { // This function is virtual in case subclasses want to lazily // implement creating the properties. return m_collection_sp; } virtual lldb::OptionValueSP GetPropertyValue(const ExecutionContext *exe_ctx, llvm::StringRef property_path, bool will_modify, Error &error) const; virtual Error SetPropertyValue(const ExecutionContext *exe_ctx, VarSetOperationType op, llvm::StringRef property_path, llvm::StringRef value); virtual Error DumpPropertyValue(const ExecutionContext *exe_ctx, Stream &strm, llvm::StringRef property_path, uint32_t dump_mask); virtual void DumpAllPropertyValues(const ExecutionContext *exe_ctx, Stream &strm, uint32_t dump_mask); virtual void DumpAllDescriptions(CommandInterpreter &interpreter, Stream &strm) const; size_t Apropos(llvm::StringRef keyword, std::vector<const Property *> &matching_properties) const; lldb::OptionValuePropertiesSP GetSubProperty(const ExecutionContext *exe_ctx, const ConstString &name); // We sometimes need to introduce a setting to enable experimental features, // but then we don't want the setting for these to cause errors when the // setting // goes away. Add a sub-topic of the settings using this experimental name, // and // two things will happen. One is that settings that don't find the name will // not // be treated as errors. Also, if you decide to keep the settings just move // them into // the containing properties, and we will auto-forward the experimental // settings to the // real one. static const char *GetExperimentalSettingsName(); static bool IsSettingExperimental(llvm::StringRef setting); protected: lldb::OptionValuePropertiesSP m_collection_sp; }; } // namespace lldb_private #endif // liblldb_UserSettingsController_h_
<gh_stars>100-1000 // https://cses.fi/problemset/task/1071/ #include <iostream> using namespace std; typedef long long ll; int main() { int t, y, x; cin >> t; while (t--) { cin >> y >> x; int a = max(y, x); int b = min(y, x); ll s = ll(a - 1) * (a - 1); if (a == b) s += a; else if (a % 2) { if (x == a) s+= a * 2 - y; else s += x; } else { if (y == a) s += a * 2 - x; else s += y; } cout << s << endl; } }
#!/bin/bash # Determine the directory this script resides in SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" # shellcheck disable=SC1090 source "$SCRIPT_DIR/lib/header.sh" # shellcheck disable=SC1090 source "$LIBRARY_DIRECTORY/docker/container/container.sh" docker_container_list
<filename>MuduoServer/DataStruct/header.h #ifndef DATASTRUCT_HEADER_H #define DATASTRUCT_HEADER_H #include "Global/lib.h" #endif // HEADER_H
#!/usr/bin/env bash # bomb on any error set -e # globals CWD=$(cd -P -- "$(dirname -- "$0")" && pwd -P) [ -e "${CWD}/scripts/globals" ] && . ${CWD}/scripts/globals # import functions [ -e "/lib/lsb/init-functions" ] && . /lib/lsb/init-functions [ -e "${CWD}/scripts/functions" ] && . ${CWD}/scripts/functions # display usage usage() { echo "Usage: $0 [-b 0|1] [-c <ip>]" 1>&2;\ printf "\t-b\tgrab docker images from repository (0) or build locally (1) (default: 0)\n";\ printf "\t-c\tspecify client-ip instead of being taken from ssh_connection\n";\ exit 1; } # process options while getopts "b:c:" o; do case "${o}" in b) b=${OPTARG} ((b == 0|| b == 1)) || usage ;; c) c=${OPTARG} ;; *) usage ;; esac done shift $((OPTIND-1)) if [ ${b} ]; then DOCKER_BUILD=${b}; fi if [ ${c} ]; then CLIENTIP=${c}; fi # fix terminfo # http://ashberlin.co.uk/blog/2010/08/24/color-in-ubuntu-init-scripts/ if [[ $(infocmp | grep 'hpa=') == "" ]]; then (infocmp; printf '\thpa=\\E[%sG,\n' %i%p1%d) > tmp-${$}.tic && \ tic -s tmp-$$.tic -o /etc/terminfo && \ rm tmp-$$.tic && \ exec ${0} $@ fi log_action_begin_msg "checking OS compatibility" if [[ $(cat /etc/os-release | grep '^ID=') =~ ubuntu ]]\ || [[ $(cat /etc/os-release | grep '^ID=') =~ debian ]]; then true log_action_end_msg $? else false log_action_end_msg $? exit 1 fi log_action_begin_msg "checking if cURL is installed" which curl > /dev/null log_action_end_msg $? log_action_begin_msg "checking if Docker is installed" which docker > /dev/null log_action_end_msg $? log_action_begin_msg "checking if sudo is installed" which sudo > /dev/null log_action_end_msg $? log_action_begin_msg "checking if dig is installed" which dig > /dev/null log_action_end_msg $? log_action_begin_msg "installing net-tools" sudo apt-get -y update &>> ${CWD}/netflix-proxy.log\ && sudo apt-get -y install net-tools &>> ${CWD}/netflix-proxy.log log_action_end_msg $? log_action_begin_msg "testing available ports" for port in 80 443 53; do ! netstat -a -n -p | grep LISTEN | grep -P '\d+\.\d+\.\d+\.\d+::${port}' > /dev/null\ || (printf "required port ${port} already in use\n" && exit 1) done log_action_end_msg $? log_action_begin_msg "disabling ufw" if which ufw > /dev/null; then ufw disable &>> ${CWD}/netflix-proxy.log; fi log_action_end_msg $? if [[ $(cat /proc/swaps | wc -l) -le 1 ]]; then log_action_begin_msg "setting up swapfile" fallocate -l 2G /swapfile && \ chmod 600 /swapfile && \ mkswap /swapfile && \ swapon /swapfile && \ printf "/swapfile none swap sw 0 0\n" >> /etc/fstab log_action_end_msg $? fi # obtain the interface with the default gateway IFACE=$(get_iface 4) # obtain IP address of the Internet facing interface IPADDR=$(get_ipaddr) EXTIP=$(get_ext_ipaddr 4) IPV6=0 if cat /proc/net/if_inet6 | grep -v lo | grep -v fe80 > /dev/null\ && $(which curl) mgmt.unzoner.com --fail --silent -6 > /dev/null; then IPV6=1 IPADDR6=$(get_ipaddr6) EXTIP6=$(get_ext_ipaddr 6) fi # obtain client (home) ip address and address family if ! [ ${CLIENTIP} ]; then CLIENTIP=$(get_client_ipaddr) fi IS_CLIENT_IPV4=0 if ! is_ipv4 ${CLIENTIP}; then IS_CLIENT_IPV4=1; fi IS_CLIENT_IPV6=1 if [[ "${IPV6}" == '1' ]]; then if is_ipv6 ${CLIENTIP}; then IS_CLIENT_IPV6=0 fi fi # diagnostics info debug="$0: build=${DOCKER_BUILD} client=${CLIENTIP} is_client_ipv4=${IS_CLIENT_IPV4} ipaddr=${IPADDR} extip=${EXTIP}" if [[ "${IPV6}" == '1' ]]; then debug_v6="$0: is_client_ipv6=${IS_CLIENT_IPV6} ipaddr6=${IPADDR6} extip6=${EXTIP6}" fi sudo touch ${CWD}/netflix-proxy.log log_action_begin_msg "log start command line parameters" printf "${0}: ${@}\n" printf "${0}: ${@}\n" &>> ${CWD}/netflix-proxy.log log_action_end_msg $? log_action_begin_msg "log diagnostics info" printf "build=${DOCKER_BUILD} client=${CLIENTIP} local=${IPADDR} public=${EXTIP}\n" printf "${debug}\n" &>> ${CWD}/netflix-proxy.log log_action_end_msg $? if [[ ${debug_v6} ]]; then log_action_begin_msg "log diagnostics info (IPv6)" printf "local6=${IPADDR6} public6=${EXTIP6}\n" printf "${debug_v6}\n" &>> ${CWD}/netflix-proxy.log log_action_end_msg $? fi # switch to working directory pushd ${CWD} &>> ${CWD}/netflix-proxy.log # configure iptables if [[ -n "${CLIENTIP}" ]]; then log_action_begin_msg "authorising clientip=${CLIENTIP} on iface=${IFACE}" if [[ "${IS_CLIENT_IPV4}" == '0' ]]; then sudo iptables -t nat -A PREROUTING -s ${CLIENTIP}/32 -i ${IFACE} -j ACCEPT fi if [[ "${IS_CLIENT_IPV6}" == '0' ]]; then sudo ip6tables -t nat -A PREROUTING -s ${CLIENTIP}/128 -i ${IFACE} -j ACCEPT fi log_action_end_msg $? else log_action_cont_msg "unable to resolve and authorise client ip" fi log_action_begin_msg "adding IPv4 iptables rules" sudo iptables -t nat -A PREROUTING -i ${IFACE} -p tcp --dport 80 -j REDIRECT --to-port 8080\ && sudo iptables -t nat -A PREROUTING -i ${IFACE} -p tcp --dport 443 -j REDIRECT --to-port 8080\ && sudo iptables -t nat -A PREROUTING -i ${IFACE} -p udp --dport 53 -j REDIRECT --to-port 5353\ && sudo iptables -t nat -A POSTROUTING -o ${IFACE} -j MASQUERADE\ && sudo iptables -A INPUT -p icmp -j ACCEPT\ && sudo iptables -A INPUT -i lo -j ACCEPT\ && sudo iptables -A INPUT -p tcp -m state --state NEW -m tcp --dport 22 -j ACCEPT\ && sudo iptables -A INPUT -p tcp -m state --state NEW -m tcp --dport 5201 -j ACCEPT\ && sudo iptables -A INPUT -p tcp -m state --state NEW -m tcp --dport 11000 -j ACCEPT\ && sudo iptables -A INPUT -p udp -m state --state NEW -m udp --dport 11000 -j ACCEPT\ && sudo iptables -A INPUT -p tcp -m state --state NEW -m tcp --dport 12222 -j ACCEPT\ && sudo iptables -A INPUT -p udp -m state --state NEW -m udp --dport 12222 -j ACCEPT\ && sudo iptables -A INPUT -p tcp -m state --state NEW -m tcp --dport 30000 -j ACCEPT\ && sudo iptables -A INPUT -p udp -m state --state NEW -m udp --dport 30000 -j ACCEPT\ && sudo iptables -A INPUT -p tcp -m state --state NEW -m tcp --dport 42350 -j ACCEPT\ && sudo iptables -A INPUT -p udp -m state --state NEW -m udp --dport 42350 -j ACCEPT\ && sudo iptables -A INPUT -p tcp -m state --state NEW -m tcp --dport 50123 -j ACCEPT\ && sudo iptables -A INPUT -p udp -m state --state NEW -m udp --dport 50123 -j ACCEPT\ && sudo iptables -A INPUT -p tcp -m state --state NEW -m tcp --dport 8443 -j ACCEPT\ && sudo iptables -A INPUT -p udp -m state --state NEW -m udp --dport 8443 -j ACCEPT\ && sudo iptables -A INPUT -p tcp -m state --state NEW -m tcp --dport 3000 -j ACCEPT\ && sudo iptables -A INPUT -p udp -m state --state NEW -m udp --dport 3000 -j ACCEPT\ && sudo iptables -A INPUT -m state --state RELATED,ESTABLISHED -j ACCEPT\ && sudo iptables -A INPUT -p tcp -m tcp --dport 80 -j ACCEPT\ && sudo iptables -A INPUT -p tcp -m tcp --dport 443 -j ACCEPT\ && sudo iptables -A INPUT -p tcp -m tcp --dport 3000 -j ACCEPT\ && sudo iptables -A INPUT -p tcp -m tcp --dport 5201 -j ACCEPT\ && sudo iptables -A INPUT -p tcp -m tcp --dport 8080 -j ACCEPT\ && sudo iptables -A INPUT -p tcp -m tcp --dport 8443 -j ACCEPT\ && sudo iptables -A INPUT -p tcp -m tcp --dport 11000 -j ACCEPT\ && sudo iptables -A INPUT -p tcp -m tcp --dport 12222 -j ACCEPT\ && sudo iptables -A INPUT -p tcp -m tcp --dport 30000 -j ACCEPT\ && sudo iptables -A INPUT -p tcp -m tcp --dport 42350 -j ACCEPT\ && sudo iptables -A INPUT -p tcp -m tcp --dport 50123 -j ACCEPT\ && sudo iptables -A INPUT -p udp -m udp --dport 53 -j ACCEPT\ && sudo iptables -A INPUT -p udp -m udp --dport 3000 -j ACCEPT\ && sudo iptables -A INPUT -p udp -m udp --dport 5353 -j ACCEPT\ && sudo iptables -A INPUT -p udp -m udp --dport 8443 -j ACCEPT\ && sudo iptables -A INPUT -p udp -m udp --dport 11000 -j ACCEPT\ && sudo iptables -A INPUT -p udp -m udp --dport 30000 -j ACCEPT\ && sudo iptables -A INPUT -p udp -m udp --dport 42350 -j ACCEPT\ && sudo iptables -A INPUT -p udp -m udp --dport 50123 -j ACCEPT\ && sudo iptables -A INPUT -j REJECT --reject-with icmp-host-prohibited log_action_end_msg $? log_action_begin_msg "adding IPv6 iptables rules" sudo ip6tables -t nat -A PREROUTING -i ${IFACE} -p tcp --dport 80 -j REDIRECT --to-port 8080\ && sudo ip6tables -t nat -A PREROUTING -i ${IFACE} -p tcp --dport 443 -j REDIRECT --to-port 8080\ && sudo ip6tables -t nat -A PREROUTING -i ${IFACE} -p udp --dport 53 -j REDIRECT --to-port 5353\ && sudo iptables -t nat -A POSTROUTING -o ${IFACE} -j MASQUERADE\ && sudo ip6tables -A INPUT -p ipv6-icmp -j ACCEPT\ && sudo ip6tables -A INPUT -i lo -j ACCEPT\ && sudo ip6tables -A INPUT -p tcp -m state --state NEW -m tcp --dport 22 -j ACCEPT\ && sudo ip6tables -A INPUT -m state --state RELATED,ESTABLISHED -j ACCEPT\ && sudo ip6tables -A INPUT -p udp -m udp --dport 53 -j ACCEPT\ && sudo ip6tables -A INPUT -p udp -m udp --dport 5353 -j ACCEPT\ && sudo ip6tables -A INPUT -p tcp -m tcp --dport 80 -j ACCEPT\ && sudo ip6tables -A INPUT -p tcp -m tcp --dport 8080 -j ACCEPT\ && sudo ip6tables -A INPUT -p tcp -m tcp --dport 443 -j ACCEPT\ && sudo ip6tables -A INPUT -j REJECT --reject-with icmp6-adm-prohibited log_action_end_msg $? # check if public IPv6 access is available log_action_begin_msg "creating Docker and sniproxy configuration templates" sudo cp ${CWD}/docker-sniproxy/sniproxy.conf.template ${CWD}/docker-sniproxy/sniproxy.conf &>> ${CWD}/netflix-proxy.log\ && sudo cp ${CWD}/docker-compose.yml.template ${CWD}/docker-compose.yml &>> ${CWD}/netflix-proxy.log log_action_end_msg $? log_action_begin_msg "disabling Docker iptables control" cp ${CWD}/daemon.json /etc/docker/ log_action_end_msg $? if [[ "${IPV6}" == '1' ]]; then log_action_begin_msg "enabling sniproxy IPv6 priority" printf "\nresolver {\n nameserver ${RESOLVER_PRI}\n nameserver ${RESOLVER_SEC}\n mode ipv6_first\n}\n"\ | sudo tee -a ${CWD}/docker-sniproxy/sniproxy.conf &>> ${CWD}/netflix-proxy.log log_action_end_msg $? else log_action_begin_msg "configuring sniproxy and Docker" printf "\nresolver {\n nameserver ${RESOLVER_PRI}\n nameserver ${RESOLVER_SEC}\n mode ipv4_only\n}\n"\ | sudo tee -a ${CWD}/docker-sniproxy/sniproxy.conf &>> ${CWD}/netflix-proxy.log log_action_end_msg $? fi log_action_begin_msg "installing iptables|netfilter-persistent service" echo iptables-persistent iptables-persistent/autosave_v4 boolean true\ | sudo debconf-set-selections &>> ${CWD}/netflix-proxy.log\ && echo iptables-persistent iptables-persistent/autosave_v6 boolean true\ | sudo debconf-set-selections &>> ${CWD}/netflix-proxy.log\ && sudo apt-get -y install iptables-persistent &>> ${CWD}/netflix-proxy.log log_action_end_msg $? # Ubuntu and Debian have different service names for iptables-persistent service if [ -f "/etc/init.d/iptables-persistent" ]; then SERVICE=iptables elif [ -f "/etc/init.d/netfilter-persistent" ]; then SERVICE=netfilter fi # socialise Docker with iptables-persistent # https://groups.google.com/forum/#!topic/docker-dev/4SfOwCOmw-E if [ ! -f "/etc/init/docker.conf.bak" ] && [ -f "/etc/init/docker.conf" ]; then log_action_begin_msg "socialising Docker with iptables-persistent service" sudo $(which sed) -i.bak "s/ and net-device-up IFACE!=lo)/ and net-device-up IFACE!=lo and started ${SERVICE}-persistent)/" /etc/init/docker.conf || true &>> ${CWD}/netflix-proxy.log log_action_end_msg $? fi if [[ ${SERVICE} == "iptables" ]]; then if [ ! -f "/etc/init.d/iptables-persistent.bak" ] && [ -f "/etc/init.d/iptables-persistent" ]; then log_action_begin_msg "updating iptables-persistent init script" sudo $(which sed) -i.bak '/load_rules$/{N;s/load_rules\n\t;;/load_rules\n\tinitctl emit -n started JOB=iptables-persistent\n\t;;/}' /etc/init.d/iptables-persistent || true &>> ${CWD}/netflix-proxy.log\ && sudo $(which sed) -i'' 's/stop)/stop)\n\tinitctl emit stopping JOB=iptables-persistent/' /etc/init.d/iptables-persistent &>> ${CWD}/netflix-proxy.log log_action_end_msg $? fi fi log_action_begin_msg "saving iptables rules" sudo service ${SERVICE}-persistent save &>> ${CWD}/netflix-proxy.log log_action_end_msg $? log_action_begin_msg "creating dnsmasq.conf from template" sudo cp ${CWD}/dnsmasq.conf.template ${CWD}/dnsmasq.conf &>> ${CWD}/netflix-proxy.log log_action_end_msg $? if [[ "${IPV6}" == '1' ]] && [[ ${EXTIP6} ]]; then log_action_begin_msg "updating dnsmasq.conf extip=${EXTIP} extip6=${EXTIP6}" else log_action_begin_msg "updating dnsmasq.conf extip=${EXTIP}" fi if [[ -n "${EXTIP}" ]]; then for domain in $(cat ${CWD}/proxy-domains.txt); do printf "address=/${domain}/${EXTIP}\n"\ | sudo tee -a ${CWD}/dnsmasq.conf &>> ${CWD}/netflix-proxy.log done fi for domain in $(cat ${CWD}/bypass-domains.txt); do printf "server=/${domain}/${RESOLVER_PRI}\n"\ | sudo tee -a ${CWD}/dnsmasq.conf &>> ${CWD}/netflix-proxy.log printf "server=/${domain}/${RESOLVER_SEC}\n"\ | sudo tee -a ${CWD}/dnsmasq.conf &>> ${CWD}/netflix-proxy.log done if [[ "${IPV6}" == '1' ]] && [[ -n "${EXTIP6}" ]]; then for domain in $(cat ${CWD}/proxy-domains.txt); do printf "address=/${domain}/${EXTIP6}\n"\ | sudo tee -a ${CWD}/dnsmasq.conf &>> ${CWD}/netflix-proxy.log done fi log_action_end_msg $? log_action_begin_msg "installing python-pip and docker-compose" sudo apt-get -y update &>> ${CWD}/netflix-proxy.log\ && sudo apt-get -y install python-pip sqlite3 &>> ${CWD}/netflix-proxy.log\ && $(which pip) install --upgrade pip setuptools &>> ${CWD}/netflix-proxy.log\ && $(which pip) install virtualenv &>> ${CWD}/netflix-proxy.log\ && $(which virtualenv) venv &>> ${CWD}/netflix-proxy.log\ && source venv/bin/activate &>> ${CWD}/netflix-proxy.log\ && $(which pip) install docker-compose &>> ${CWD}/netflix-proxy.log log_action_end_msg $? log_action_begin_msg "configuring admin backend" sudo $(which pip) install -r ${CWD}/auth/requirements.txt &>> ${CWD}/netflix-proxy.log\ && PLAINTEXT=$(${CWD}/auth/pbkdf2_sha256_hash.py | awk '{print $1}')\ && HASH=$(${CWD}/auth/pbkdf2_sha256_hash.py ${PLAINTEXT} | awk '{print $2}')\ && sudo cp ${CWD}/auth/db/auth.default.db ${CWD}/auth/db/auth.db &>> ${CWD}/netflix-proxy.log\ && sudo $(which sqlite3) ${CWD}/auth/db/auth.db "UPDATE users SET password = '${HASH}' WHERE ID = 1;" &>> ${CWD}/netflix-proxy.log log_action_end_msg $? log_action_begin_msg "configuring admin frontend" sudo cp ${CWD}/Caddyfile.template ${CWD}/Caddyfile &>> ${CWD}/netflix-proxy.log\ && printf "proxy / localhost:${SDNS_ADMIN_PORT} {\n except /static\n header_upstream Host {host}\n header_upstream X-Forwarded-For {remote}\n header_upstream X-Real-IP {remote}\n header_upstream X-Forwarded-Proto {scheme}\n}\n"\ | sudo tee -a ${CWD}/Caddyfile &>> ${CWD}/netflix-proxy.log log_action_end_msg $? log_action_begin_msg "creating cron scripts" sudo cp ${CWD}/crond.template /etc/cron.d/netflix-proxy &>> ${CWD}/netflix-proxy.log\ && sudo $(which sed) -i'' "s#{{CWD}}#${CWD}#g" /etc/cron.d/netflix-proxy &>> ${CWD}/netflix-proxy.log\ && sudo service cron restart &>> ${CWD}/netflix-proxy.log log_action_end_msg $? if [[ "${DOCKER_BUILD}" == '1' ]]; then log_action_begin_msg "pulling and building docker containers from source" sudo $(which docker-compose) build &>> ${CWD}/netflix-proxy.log for service in dnsmasq-service dnsmasq-bogus-service caddy-service; do sudo $(which docker-compose) pull ${service} &>> ${CWD}/netflix-proxy.log done log_action_end_msg $? else log_action_begin_msg "pulling Docker containers" sudo $(which docker-compose) pull &>> ${CWD}/netflix-proxy.log log_action_end_msg $? fi log_action_begin_msg "creating and starting Docker containers" EXTIP=${EXTIP} EXTIP6=${EXTIP6}\ $(which docker-compose) up -d &>> ${CWD}/netflix-proxy.log log_action_end_msg $? # configure appropriate init system log_action_begin_msg "configuring init system" if [[ `/lib/systemd/systemd --version` =~ upstart ]]; then sudo cp ${CWD}/init/*.conf /etc/init/ &>> ${CWD}/netflix-proxy.log\ && sudo $(which sed) -i'' "s#{{CWD}}#${CWD}#g" /etc/init/netflix-proxy-admin.conf &>> ${CWD}/netflix-proxy.log\ && sudo service netflix-proxy-admin restart &>> ${CWD}/netflix-proxy.log fi if [[ `systemctl` =~ -\.mount ]]; then sudo cp ${CWD}/init/*.service /lib/systemd/system/ &>> ${CWD}/netflix-proxy.log\ && sudo $(which sed) -i'' "s#{{CWD}}#${CWD}#g" /lib/systemd/system/netflix-proxy-admin.service &>> ${CWD}/netflix-proxy.log\ && sudo systemctl daemon-reload &>> ${CWD}/netflix-proxy.log\ && sudo systemctl enable netflix-proxy-admin &>> ${CWD}/netflix-proxy.log\ && sudo systemctl enable systemd-networkd &>> ${CWD}/netflix-proxy.log\ && sudo systemctl enable systemd-networkd-wait-online &>> ${CWD}/netflix-proxy.log\ && sudo systemctl restart netflix-proxy-admin &>> ${CWD}/netflix-proxy.log fi log_action_end_msg $? log_action_begin_msg "reloading ipables rules" sudo service ${SERVICE}-persistent reload &>> ${CWD}/netflix-proxy.log log_action_end_msg $? log_action_begin_msg "testing DNS" with_backoff $(which dig) -4\ +time=${TIMEOUT} ${NETFLIX_HOST} @${EXTIP} &>> ${CWD}/netflix-proxy.log\ || with_backoff $(which dig) -4\ +time=${TIMEOUT} ${NETFLIX_HOST} @${IPADDR} &>> ${CWD}/netflix-proxy.log log_action_end_msg $? if [[ -n "${EXTIP6}" ]] && [[ -n "${IPADDR6}" ]]; then log_action_begin_msg "testing DNS ipv6" with_backoff $(which dig) -6\ +time=${TIMEOUT} ${NETFLIX_HOST} @${EXTIP6} &>> ${CWD}/netflix-proxy.log\ || with_backoff $(which dig) -6\ +time=${TIMEOUT} ${NETFLIX_HOST} @${IPADDR6} &>> ${CWD}/netflix-proxy.log log_action_end_msg $? fi log_action_begin_msg "testing proxy (cURL)" with_backoff $(which curl) -v -4 -L --fail -o /dev/null https://${NETFLIX_HOST}\ --resolve ${NETFLIX_HOST}:443:${EXTIP} &>> ${CWD}/netflix-proxy.log\ || with_backoff $(which curl) -v -4 -L --fail -o /dev/null https://${NETFLIX_HOST}\ --resolve ${NETFLIX_HOST}:443:${IPADDR} &>> ${CWD}/netflix-proxy.log log_action_end_msg $? if [[ -n "${EXTIP6}" ]] || [[ -n "${IPADDR6}" ]]; then log_action_begin_msg "testing proxy (cURL) ipv6" with_backoff $(which curl) -v -6 -L --fail -o /dev/null https://${NETFLIX_HOST}\ --resolve ${NETFLIX_HOST}:443:::1 &>> ${CWD}/netflix-proxy.log log_action_end_msg $? fi printf "\nnetflix-proxy-admin site=http://${EXTIP}:8080/ credentials=\e[1madmin:${PLAINTEXT}\033[0m\n" log_action_begin_msg "testing netflix-proxy admin site" (with_backoff $(which curl) --silent -4\ --fail http://${EXTIP}:8080/ &>> ${CWD}/netflix-proxy.log\ || with_backoff $(which curl) --silent -4\ --fail http://${IPADDR}:8080/) &>> ${CWD}/netflix-proxy.log\ && with_backoff $(which curl) --silent -4\ --fail http://localhost:${SDNS_ADMIN_PORT}/ &>> ${CWD}/netflix-proxy.log log_action_end_msg $? if [[ -n "${EXTIP6}" ]] && [[ -n "${IPADDR6}" ]]; then printf "\nnetflix-proxy-admin site=http://${EXTIP6}:8080/ credentials=\e[1madmin:${PLAINTEXT}\033[0m\n" log_action_begin_msg "testing netflix-proxy admin site ipv6" with_backoff $(which curl) --silent -6\ --fail http://ip6-localhost:8080/ &>> ${CWD}/netflix-proxy.log log_action_end_msg $? fi # change back to original directory popd &>> ${CWD}/netflix-proxy.log if [[ "${IPV6}" == '1' ]]; then printf "IPv6=\e[32mEnabled\033[0m\n" else printf "\e[1mWARNING:\033[0m IPv6=\e[31mDisabled\033[0m\n" fi # DO NOT change the text between these lines printf "Change your DNS to ${EXTIP} and start watching Netflix out of region.\n" # DO NOT change the text between these lines printf "\e[33mNote\033[0m: get \e[1mhttp://unzoner.com\033[0m if your app/service no longer works with DNS based solutions.\n" printf "\e[32mDone.\033[0m\n"
class DateComponentFilter { func filterKeys(_ components: [String: Any], _ ignoredKeys: [String]) -> [String: Any] { var filteredComponents = components ignoredKeys.forEach { key in filteredComponents.removeValue(forKey: key) } return filteredComponents } func fixedValues() -> [String: Any] { return ["year": 365.0] } } // Test the DateComponentFilter class let dateComponents: [String: Any] = [ "era": 1, "year": 2022, "month": 10, "day": 15, "weekday": 6, "hour": 15, "minute": 30, "second": 45 ] let filter = DateComponentFilter() let ignoredKeys = ["era", "weekday"] let filteredComponents = filter.filterKeys(dateComponents, ignoredKeys) print(filteredComponents) // Output: ["year": 2022, "month": 10, "day": 15, "hour": 15, "minute": 30, "second": 45] let fixedValues = filter.fixedValues() print(fixedValues) // Output: ["year": 365.0]
#!/bin/bash # Copyright 2018 The Kubernetes Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # This script is entrypoint to run e2e tests. # # It uses kubetest to setup/test/teardown kubernetes cluster. # # Examples: # # 1) To run against existing local cluster started by k8s.io/kubernetes/hack/local-up-cluster.sh # # KUBERNETES_SRC=$GOPATH/src/k8s.io/kubernetes DEPLOYMENT=none ./hack/e2e.sh # # Optionally, you can add extra test args, e.g. # # KUBERNETES_SRC=$GOPATH/src/k8s.io/kubernetes DEPLOYMENT=none ./hack/e2e.sh --test-cmd-args=-ginkgo.focus='.*discovery.*' # # 2) To run against new local cluster started by k8s.io/kubernetes/hack/local-up-cluster.sh # # KUBERNETES_SRC=$GOPATH/src/k8s.io/kubernetes sudo -E env "PATH=$PATH" ./hack/e2e.sh # # Note that current kubetest needs root permission to cleanup. # # 3) To run against cluster with GCE provider locally, specify following environments: # # export GOOGLE_APPLICATION_CREDENTIALS=<path-to-your-google-application-credentials> # export GCP_ZONE=<gcp-zone> # export GCP_PROJECT=<gcp-project> # # and create ssh keypair at ~/.ssh/google_compute_engine or specifc ssh keypair # with following environments: # # export JENKINS_GCE_SSH_PRIVATE_KEY_FILE=<path-to-your-ssh-private-key> # export JENKINS_GCE_SSH_PUBLIC_KEY_FILE=<path-to-your-ssh-public-key> # # 4) To run against cluster with GCE provider in test-infra/prow job, add # `preset-service-account: "true"` and `preset-k8s-ssh: "true"` labels in your # prow job. # # The first label will set `GOOGLE_APPLICATION_CREDENTIALS` environment for # you, and `kubetest` will acquire GCP project and zone from boskos # automatically. The latter will prepare SSH key pair. # set -o errexit set -o nounset set -o pipefail ROOT=$(unset CDPATH && cd $(dirname "${BASH_SOURCE[0]}")/.. && pwd) cd $ROOT source "$ROOT/hack/lib.sh" PROVIDER=${PROVIDER:-} GCP_ZONE=${GCP_ZONE:-} GCP_PROJECT=${GCP_PROJECT:-} EXTRACT_STRATEGY=${EXTRACT_STRATEGY:-ci/latest} DEPLOYMENT=${DEPLOYMENT:-} if [ -z "${KUBECTL:-}" ]; then KUBECTL=$(which kubectl 2>/dev/null || true) fi if [ -z "${KUBECTL:-}" ]; then echo "error: kubectl not found" >&2 exit 1 fi KUBERNETES_SRC=${KUBERNETES_SRC:-} # If set, skip extracting kubernetes, use it as kubernetes src. if [ -z "$PROVIDER" ]; then echo "PROVIDER not specified, detecting provider automatically" >&2 if [ -n "${GOOGLE_APPLICATION_CREDENTIALS:-}" ]; then echo "Found google application credentials at $GOOGLE_APPLICATION_CREDENTIALS, provider is set to gce" >&2 PROVIDER=gce else PROVIDER=local fi fi echo "PROVIDER: $PROVIDER" >&2 echo "KUBECTL: $KUBECTL" >&2 echo "GCP_PROJECT: $GCP_PROJECT" >&2 echo "GCP_ZONE: $GCP_ZONE" >&2 kubetest_args=( --provider "$PROVIDER" ) if [ -n "$KUBERNETES_SRC" ]; then echo "KUBERNETES_SRC is set to $KUBERNETES_SRC" >&2 if [ ! -d "$KUBERNETES_SRC" ]; then echo "$KUBERNETES_SRC is not a directory" >&2 exit 1 fi else kubetest_args+=( --extract "$EXTRACT_STRATEGY" ) fi if [ -n "$KUBERNETES_SRC" ]; then echo "KUBERNETES_SRC is set, entering into $KUBERNETES_SRC" >&2 cd $KUBERNETES_SRC fi if [ "$PROVIDER" == "gce" ]; then if [ -n "$GCP_PROJECT" ]; then kubetest_args+=( --gcp-project "$GCP_PROJECT" ) fi if [ -n "$GCP_ZONE" ]; then kubetest_args+=( --gcp-zone "$GCP_ZONE" ) fi # kubetest needs ssh keypair to ssh into nodes if [ ! -d ~/.ssh ]; then mkdir ~/.ssh fi if [ -e ~/.ssh/google_compute_engine -o -n "$JENKINS_GCE_SSH_PRIVATE_KEY_FILE" ]; then echo "Copying $JENKINS_GCE_SSH_PRIVATE_KEY_FILE to ~/.ssh/google_compute_engine" >&2 cp $JENKINS_GCE_SSH_PRIVATE_KEY_FILE ~/.ssh/google_compute_engine chmod 0600 ~/.ssh/google_compute_engine fi if [ -e ~/.ssh/google_compute_engine.pub -o -n "$JENKINS_GCE_SSH_PUBLIC_KEY_FILE" ]; then echo "Copying $JENKINS_GCE_SSH_PUBLIC_KEY_FILE to ~/.ssh/google_compute_engine.pub" >&2 cp $JENKINS_GCE_SSH_PUBLIC_KEY_FILE ~/.ssh/google_compute_engine.pub chmod 0600 ~/.ssh/google_compute_engine.pub fi if [ -z "$DEPLOYMENT" ]; then DEPLOYMENT=bash fi elif [ "$PROVIDER" == "local" ]; then if [ -z "$DEPLOYMENT" ]; then DEPLOYMENT=local fi else echo "error: unsupported provider '$KUBERNETES_PROVIDER'" >&2 exit 1 fi go run $ROOT/hack/e2e.go -- "${kubetest_args[@]}" \ --deployment "$DEPLOYMENT" \ --up \ --down \ --test-cmd bash \ --test-cmd-args="$ROOT/hack/run-e2e.sh" \ "$@"
/* * Copyright (c) 2021 Huawei Device Co., Ltd. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include <ctime> #include "utils/hash.h" #include "gtest/gtest.h" #include "utils/logger.h" #include "mem/mem.h" #include "os/mem.h" #include "utils/asan_interface.h" #include <sys/mman.h> namespace panda { class HashTest : public testing::Test { public: HashTest() { #ifdef PANDA_NIGHTLY_TEST_ON seed_ = std::time(NULL); #else seed_ = 0xDEADBEEF; #endif } ~HashTest() {} protected: template <class T> void OneObject32bitsHashTest() const; template <class T> void OneStringHashTest() const; template <class T> void StringMemHashTest() const; template <class T> void EndOfPageStringHashTest() const; static constexpr size_t KEY40INBYTES = 5; static constexpr size_t KEY32INBYTES = 4; static constexpr size_t KEY8INBYTES = 1; #ifndef PAGE_SIZE static constexpr size_t PAGE_SIZE = SIZE_1K * 4; #endif unsigned seed_; }; template <class T> void HashTest::OneObject32bitsHashTest() const { srand(seed_); uint32_t object32 = rand(); uint32_t first_hash = T::GetHash32(reinterpret_cast<uint8_t *>(&object32), KEY32INBYTES); uint32_t second_hash = T::GetHash32(reinterpret_cast<uint8_t *>(&object32), KEY32INBYTES); if (first_hash != second_hash) { std::cout << "Failed 32bit key hash on seed = 0x" << std::hex << seed_ << std::endl; } ASSERT_EQ(first_hash, second_hash); uint8_t object8 = rand(); first_hash = T::GetHash32(reinterpret_cast<uint8_t *>(&object8), KEY8INBYTES); second_hash = T::GetHash32(reinterpret_cast<uint8_t *>(&object8), KEY8INBYTES); if (first_hash != second_hash) { std::cout << "Failed 32bit key hash on seed = 0x" << std::hex << seed_ << std::endl; } ASSERT_EQ(first_hash, second_hash); // Set 64 bits value and use only 40 bits from it uint64_t object40 = rand(); first_hash = T::GetHash32(reinterpret_cast<uint8_t *>(&object40), KEY40INBYTES); second_hash = T::GetHash32(reinterpret_cast<uint8_t *>(&object40), KEY40INBYTES); if (first_hash != second_hash) { std::cout << "Failed 32bit key hash on seed = 0x" << std::hex << seed_ << std::endl; } ASSERT_EQ(first_hash, second_hash); } template <class T> void HashTest::OneStringHashTest() const { char string[] = "Over 1000!\0"; // Dummy check if (sizeof(char) != sizeof(uint8_t)) { return; } uint8_t *mutf8_string = reinterpret_cast<uint8_t *>(string); uint32_t first_hash = T::GetHash32String(mutf8_string); uint32_t second_hash = T::GetHash32String(mutf8_string); ASSERT_EQ(first_hash, second_hash); } template <class T> void HashTest::StringMemHashTest() const { char string[] = "COULD YOU CREATE MORE COMPLEX TESTS,OK?\0"; size_t string_size = strlen(string); uint8_t *mutf8_string = reinterpret_cast<uint8_t *>(string); uint32_t second_hash = T::GetHash32(mutf8_string, string_size); uint32_t first_hash = T::GetHash32String(mutf8_string); ASSERT_EQ(first_hash, second_hash); } template <class T> void HashTest::EndOfPageStringHashTest() const { size_t string_size = 3; constexpr size_t ALLOC_SIZE = PAGE_SIZE * 2; void *mem = panda::os::mem::MapRWAnonymousRaw(ALLOC_SIZE); ASAN_UNPOISON_MEMORY_REGION(mem, ALLOC_SIZE); mprotect(reinterpret_cast<void *>(reinterpret_cast<uintptr_t>(mem) + PAGE_SIZE), PAGE_SIZE, PROT_NONE); char *string = reinterpret_cast<char *>((reinterpret_cast<uintptr_t>(mem) + PAGE_SIZE) - sizeof(char) * string_size); string[0] = 'O'; string[1] = 'K'; string[2U] = '\0'; uint8_t *mutf8_string = reinterpret_cast<uint8_t *>(string); uint32_t second_hash = T::GetHash32(mutf8_string, string_size - 1); uint32_t first_hash = T::GetHash32String(mutf8_string); ASSERT_EQ(first_hash, second_hash); auto res = panda::os::mem::UnmapRaw(mem, ALLOC_SIZE); ASSERT_FALSE(res); } // If we hash an object twice, it must return the same value // Do it for 8 bits, 32 bits and 40 bits key. TEST_F(HashTest, OneObjectHashTest) { HashTest::OneObject32bitsHashTest<MurmurHash32<DEFAULT_SEED>>(); } // If we hash a string twice, it must return the same value TEST_F(HashTest, OneStringHashTest) { HashTest::OneStringHashTest<MurmurHash32<DEFAULT_SEED>>(); } // If we hash a string without string method, // we should get the same result as we use a pointer to string as a raw memory. TEST_F(HashTest, StringMemHashTest) { HashTest::StringMemHashTest<MurmurHash32<DEFAULT_SEED>>(); } // Try to hash the string which is located at the end of allocated page. // Check that we will not have SEGERROR here. TEST_F(HashTest, EndOfPageStringHashTest) { HashTest::EndOfPageStringHashTest<MurmurHash32<DEFAULT_SEED>>(); } } // namespace panda
CUDA_VISIBLE_DEVICES=7 python main.py \ --number_gpus 1 \ --model FlowNet2 \ --resume saved_models/FlowNet2_checkpoint.pth.tar \ --name tvqa-flow \ --save ./tvqa-flow \ --skip_training \ --inference \ --inference_batch_size 1 \ --inference_dataset TVQA \ --inference_dataset_root /proj/vondrick/datasets/TVQA/videos/frames_hq \ --save_flow
def switch_position(list, pos1, pos2): list[pos1], list[pos2] = list[pos2], list[pos1] return list
git checkout k8s/* git checkout grafana/* git checkout k8s/kube-state-metrics/*
<reponame>effie-ms/eeflows import React, { useState } from 'react'; import PropTypes from 'prop-types'; import classnames from 'classnames'; import { Card, Elevation, Divider, Button } from '@blueprintjs/core'; import { TabContent, TabPane, Nav, NavItem, NavLink } from 'reactstrap'; import { gettext } from 'utils/text'; import { LowFlowTabPane } from 'components/stations/configuration/LowFlowTabPane'; import { MeasurementsConfiguration } from 'components/stations/configuration/MeasurementsConfiguration'; export const Configuration = ({ startDate, endDate, onSetDateRange, secondAxisTimeSeriesType, onSetSecondAxisTimeSeriesType, secondAxisThreshold, onSetSecondAxisThreshold, meanLowFlowMethod, onSetMeanLowFlowMethod, meanLowFlowMethodFrequency, onSetMeanLowFlowMethodFrequency, onRunEstimation, }) => { const [activeTab, setActiveTab] = useState('1'); const toggle = (tab) => { if (activeTab !== tab) { setActiveTab(tab); } }; return ( <div className="configuration"> <Card interactive="true" elevation={Elevation.THREE} style={{ backgroundColor: '#ebf1f5' }} > <Card elevation={Elevation.TWO}> <Nav tabs> <NavItem> <NavLink className={classnames( { active: activeTab === '1' }, 'pl-0', 'ml-3', )} onClick={() => { toggle('1'); }} > {gettext('Environmental flow estimation')} </NavLink> </NavItem> <Divider /> <NavItem> <NavLink className={classnames({ active: activeTab === '2', })} onClick={() => { toggle('2'); }} > {gettext('Compound event')} </NavLink> </NavItem> </Nav> <TabContent activeTab={activeTab}> <TabPane tabId="1"> <LowFlowTabPane startDate={startDate} endDate={endDate} onSetDateRange={onSetDateRange} meanLowFlowMethod={meanLowFlowMethod} onSetMeanLowFlowMethod={onSetMeanLowFlowMethod} meanLowFlowMethodFrequency={ meanLowFlowMethodFrequency } onSetMeanLowFlowMethodFrequency={ onSetMeanLowFlowMethodFrequency } /> </TabPane> <TabPane tabId="2"> <MeasurementsConfiguration secondAxisTimeSeriesType={ secondAxisTimeSeriesType } onSetSecondAxisTimeSeriesType={ onSetSecondAxisTimeSeriesType } secondAxisThreshold={secondAxisThreshold} onSetSecondAxisThreshold={ onSetSecondAxisThreshold } /> </TabPane> </TabContent> <div className="d-flex justify-content-end"> <Button intent="success" text="Run estimation" style={{ height: 40 }} className="align-self-end mr-3" onClick={() => onRunEstimation()} large /> </div> </Card> </Card> </div> ); }; Configuration.propTypes = { startDate: PropTypes.instanceOf(Date), endDate: PropTypes.instanceOf(Date), onSetDateRange: PropTypes.func.isRequired, secondAxisTimeSeriesType: PropTypes.oneOf(['TW', 'WL']).isRequired, onSetSecondAxisTimeSeriesType: PropTypes.func.isRequired, secondAxisThreshold: PropTypes.number.isRequired, onSetSecondAxisThreshold: PropTypes.func.isRequired, meanLowFlowMethod: PropTypes.oneOf([ 'TNT30', 'TNT20', 'EXCEED95', 'EXCEED75', ]).isRequired, onSetMeanLowFlowMethod: PropTypes.func.isRequired, meanLowFlowMethodFrequency: PropTypes.oneOf([ 'LONG-TERM', 'SEASONAL', 'BIOPERIOD', 'MONTHLY', ]).isRequired, onSetMeanLowFlowMethodFrequency: PropTypes.func.isRequired, onRunEstimation: PropTypes.func.isRequired, }; Configuration.defaultProps = { startDate: null, endDate: null, };
import re inputString = "This string contains a number, 23, and some other text" outputString = re.sub(r'\D', "", inputString) #outputString = '23'
<gh_stars>0 package advent2016; import org.junit.Assert; import org.junit.Test; import advent2016.Direction; public class DirectionTest { @Test public void testPreviousNorth() throws Exception { Direction d = Direction.EAST; Assert.assertEquals(Direction.NORTH, d.previous()); } @Test public void testPreviousWest() throws Exception { Direction d = Direction.NORTH; Assert.assertEquals(Direction.WEST, d.previous()); } @Test public void testNextNorth() throws Exception { Direction d = Direction.WEST; Assert.assertEquals(Direction.NORTH, d.next()); } @Test public void testNextWest() throws Exception { Direction d = Direction.SOUTH; Assert.assertEquals(Direction.WEST, d.next()); } }
<filename>src/reducers/byFollowersDesc.js const byFollowersDesc = (state = true, action) => { switch (action.type) { case "BY_FOLLOWERS_DESC": return true; case "BY_FOLLOWERS_ASC": return false; case "BY_NAME_ASC": return false; case "BY_NAME_DESC": return false; default: return state; } }; export default byFollowersDesc;