text stringlengths 1 1.05M |
|---|
import { terser } from "rollup-plugin-terser";
export default {
input: "./jextract.js",
output: {
file: "./jextract.min.js",
format: "iife",
},
plugins: [
terser({
mangle: true,
output: {
preamble: `/**
jExtract: a function for extracting data from DOM.
Version: 1.1.1
Author: <NAME> (@kormanowsky)
Date: 18.07.2020
*/`,
},
}),
],
};
|
#!/bin/bash
#SBATCH --account=def-dkulic
#SBATCH --mem=8000M # memory per node
#SBATCH --time=23:00:00 # time (DD-HH:MM)
#SBATCH --output=/project/6001934/lingheng/Double_DDPG_Job_output/continuous_RoboschoolHopper-v1_doule_ddpg_hardcopy_epsilon_greedy_seed1_run1_%N-%j.out # %N for node name, %j for jobID
module load qt/5.9.6 python/3.6.3 nixpkgs/16.09 gcc/7.3.0 boost/1.68.0 cuda cudnn
source ~/tf_cpu/bin/activate
python ./ddpg_discrete_action.py --env RoboschoolHopper-v1 --random-seed 1 --exploration-strategy epsilon_greedy --summary-dir ../Double_DDPG_Results_no_monitor/continuous/RoboschoolHopper-v1/doule_ddpg_hardcopy_epsilon_greedy_seed1_run1 --continuous-act-space-flag --target-hard-copy-flag
|
<reponame>KASTKING/KASTKING-CLOUD1
package com.kastking.productCategory.mapper;
import com.kastking.productCategory.domain.ProductCategory;
import java.util.List;
/**
* 产品类目Mapper接口
*
* @author Michael
* @date 2020-01-16
*/
public interface ProductCategoryMapper {
/**
* 查询是否有子产品
*
* @param categoryId
* @return
*/
public Integer selectIsProductCategory(Long categoryId);
/**
* 查询产品类目
*
* @param categoryId 产品类目ID
* @return 产品类目
*/
public ProductCategory selectProductCategoryById(Long categoryId);
/**
* 查询产品类目列表
*
* @param productCategory 产品类目
* @return 产品类目集合
*/
public List<ProductCategory> selectProductCategoryList(ProductCategory productCategory);
/**
* 新增产品类目
*
* @param productCategory 产品类目
* @return 结果
*/
public int insertProductCategory(ProductCategory productCategory);
/**
* 修改产品类目
*
* @param productCategory 产品类目
* @return 结果
*/
public int updateProductCategory(ProductCategory productCategory);
/**
* 删除产品类目
*
* @param categoryId 产品类目ID
* @return 结果
*/
public int deleteProductCategoryById(Long categoryId);
/**
* 批量删除产品类目
*
* @param categoryIds 需要删除的数据ID
* @return 结果
*/
public int deleteProductCategoryByIds(String[] categoryIds);
}
|
/**
* OLAT - Online Learning and Training<br>
* http://www.olat.org
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); <br>
* you may not use this file except in compliance with the License.<br>
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing,<br>
* software distributed under the License is distributed on an "AS IS" BASIS, <br>
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br>
* See the License for the specific language governing permissions and <br>
* limitations under the License.
* <p>
* Copyright (c) since 2004 at Multimedia- & E-Learning Services (MELS),<br>
* University of Zurich, Switzerland.
* <hr>
* <a href="http://www.openolat.org">
* OpenOLAT - Online Learning and Training</a><br>
* This file has been modified by the OpenOLAT community. Changes are licensed
* under the Apache 2.0 license as the original file.
* <p>
*/
package org.olat.core.gui.control.generic.choice;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import org.olat.core.gui.UserRequest;
import org.olat.core.gui.components.form.flexible.FormItemContainer;
import org.olat.core.gui.components.form.flexible.elements.MultipleSelectionElement;
import org.olat.core.gui.components.form.flexible.elements.Reset;
import org.olat.core.gui.components.form.flexible.elements.SelectionElement;
import org.olat.core.gui.components.form.flexible.elements.SingleSelection;
import org.olat.core.gui.components.form.flexible.elements.Submit;
import org.olat.core.gui.components.form.flexible.impl.FormBasicController;
import org.olat.core.gui.components.form.flexible.impl.elements.FormReset;
import org.olat.core.gui.components.form.flexible.impl.elements.FormSubmit;
import org.olat.core.gui.control.Controller;
import org.olat.core.gui.control.Event;
import org.olat.core.gui.control.WindowControl;
/**
* Description:<br>
* Multiple/single choice controller with at least a <code>Submit</code> <code>FormItem</code>. <p>
* One could add a <code>Reset</code> <code>FormItem</code>, if neccessary. (see: addReset() method)
*
* <P>
* Initial Date: 06.08.2007 <br>
*
* @author <NAME>
*/
public class ChoiceController extends FormBasicController {
private String[] keysIn;
private String[] translatedKeys;
private boolean singleSelection = true;
private boolean layoutVertical = true;
private SelectionElement entrySelector;
private String selectionName = "choiceSelection";
private String submitI18nKey = "apply";
/**
*
* @param ureq
* @param wControl
* @param keys
* @param translatedKeys
* @param selectedKeys
* @param singleSelection
* @param layoutVertical
* @param submitI18nKey
*/
public ChoiceController(UserRequest ureq, WindowControl wControl, String[] keys, String[] translatedKeys, String[] selectedKeys,
boolean singleSelection, boolean layoutVertical, String submitI18nKey) {
super(ureq, wControl);
this.keysIn = keys;
this.translatedKeys = translatedKeys;
this.singleSelection = singleSelection;
this.layoutVertical = layoutVertical;
if (submitI18nKey != null) {
this.submitI18nKey = submitI18nKey;
}
/*
* init form element(s)
*/
initForm(this.flc, this, ureq);
/*
* after initialising the element, select the entries
*/
for (int i = 0; i < selectedKeys.length; i++) {
entrySelector.select(selectedKeys[i], true);
}
}
@Override
protected void formOK(UserRequest ureq) {
fireEvent(ureq, Event.DONE_EVENT);
}
@Override
protected void initForm(FormItemContainer formLayout, Controller listener, UserRequest ureq) {
if(singleSelection && layoutVertical){
entrySelector = uifactory.addRadiosVertical(selectionName, null, formLayout, keysIn, translatedKeys);
}else if(singleSelection && !layoutVertical){
entrySelector = uifactory.addRadiosHorizontal(selectionName, null, formLayout, keysIn, translatedKeys);
}else if(!singleSelection && layoutVertical){
entrySelector = uifactory.addCheckboxesVertical(selectionName, null, formLayout, keysIn, translatedKeys, 1);
}else if(!singleSelection && !layoutVertical){
entrySelector = uifactory.addCheckboxesHorizontal(selectionName, null, formLayout, keysIn, translatedKeys);
}
// add Submit
Submit subm = new FormSubmit("subm", submitI18nKey);
formLayout.add(subm);
}
/**
* Adds a <code>Reset</code> <code>FormItem</code> to the current <code>FormLayoutContainer</code>.
* @param i18nKey
*/
public void addReset(String i18nKey) {
Reset reset = new FormReset("reset", i18nKey);
this.flc.add(reset);
}
/**
* Gets the list of the selected entry's keys.<p>
* Do call this at event reception!
* @return a not null selected keys List.
*/
public List<String> getSelectedEntries() {
List<String> selected = new ArrayList<>();
if (entrySelector instanceof MultipleSelectionElement) {
//sort the selected keys according with the keysIn order
Collection<String> selectedKeys = ((MultipleSelectionElement) entrySelector).getSelectedKeys();
int numKeys = keysIn.length;
for(int i=0; i<numKeys; i++) {
if(selectedKeys.contains(keysIn[i])) {
selected.add(keysIn[i]);
}
}
} else if (entrySelector instanceof SingleSelection) {
selected.add(((SingleSelection) entrySelector).getSelectedKey());
return selected;
}
return selected;
}
}
|
#!/bin/bash
# Check if the correct number of arguments is provided
if [ "$#" -ne 2 ]; then
echo "Usage: $0 <input_c_file> <output_executable_name>"
exit 1
fi
input_file=$1
output_file=$2
# Check if the input C file exists
if [ ! -f "$input_file" ]; then
echo "Error: Input C file not found"
exit 1
fi
# Cross-compile the C file using arm-linux-gnueabi-gcc
arm-linux-gnueabi-gcc -mtune=cortex-a9 -march=armv7-a "$input_file" -o "$output_file"
# Check if the compilation was successful
if [ $? -eq 0 ]; then
echo "Cross-compilation successful. Executable file '$output_file' created."
else
echo "Error: Cross-compilation failed"
fi |
<gh_stars>0
package ru.job4j.accidents.repository;
import org.springframework.data.repository.CrudRepository;
import ru.job4j.accidents.model.Authority;
/**
* @author Sir-Hedgehog (mailto:<EMAIL>)
* @version 1.0
* @since 07.07.2020
*/
public interface AuthorityRepository extends CrudRepository<Authority, Integer> {
Authority findByAuthority(String authority);
}
|
package pulse.ui.components.panels;
|
<filename>src/util.ts
import { DateTimeLike, DateTime } from "./types";
import { TimeoutLimit, BigMonths, ReversedProps, Props } from "./consts";
export function ucfirst(str: string): string {
return str[0].toUpperCase() + str.substring(1);
}
export function isWildcard(data: any): boolean {
return typeof data == "string" && data[0] == "*";
}
export function getNextTick(dateInfo: DateTimeLike, date?: Date): DateTime {
date = date || new Date();
let tickInfo: DateTime = {},
shouldForward = true,
prevWildcarProp = "",
resetPrevProp = (prevProp: string, curProp: string): string => {
if (prevProp) {
if (prevProp == "date" || prevProp == "month")
tickInfo[prevProp] = 1;
else
tickInfo[prevProp] = 0;
}
return curProp;
};
for (let x of ReversedProps) {
if (!(x in dateInfo)) {
continue;
} else if (typeof dateInfo[x] == "number") {
let method: string;
if (["seconds", "minutes", "hours", "date", "day", "month"].indexOf(x)) {
method = "get" + ucfirst(x);
} else {
method = "getFullYear";
}
let currentValue = date[method]();
if (x == "month") currentValue += 1;
tickInfo[x] = dateInfo[x];
shouldForward = dateInfo[x] < currentValue;
} else {
if (shouldForward)
prevWildcarProp = resetPrevProp(prevWildcarProp, x);
let step = shouldForward ? parseInt(dateInfo[x].split("/")[1] || 1) : 0,
num: number;
switch (x) {
case "seconds":
num = date.getSeconds() + step;
shouldForward = num >= 60;
tickInfo[x] = shouldForward ? num - 60 : num;
break;
case "minutes":
num = date.getMinutes() + step;
shouldForward = num >= 60;
tickInfo[x] = shouldForward ? num - 60 : num;
break;
case "hours":
num = date.getHours() + step;
shouldForward = num >= 24;
tickInfo[x] = shouldForward ? num - 24 : num;
break;
case "date":
num = date.getDate() + step;
let currentYear = date.getFullYear(),
isLeapYear = currentYear % 4 === 0,
currentMonth = date.getMonth() + 1,
isBigMonth = BigMonths.indexOf(currentMonth) >= 0;
if (currentMonth == 2) { // Feb.
if (isLeapYear && num > 29 || num > 28) {
shouldForward = true;
tickInfo[x] = num - (isLeapYear ? 29 : 28);
} else {
shouldForward = false;
tickInfo[x] = num;
}
} else if (isBigMonth && num > 31 || num > 30) {
shouldForward = true;
tickInfo[x] = num - (isBigMonth ? 31 : 30);
} else {
shouldForward = false;
tickInfo[x] = num;
}
break;
case "day":
num = date.getDay() + (step > 0 ? (step - 1) * 7 : 0);
shouldForward = num > 6;
tickInfo[x] = shouldForward ? num - 6 : num;
break;
case "month":
num = date.getMonth() + 1 + step;
shouldForward = num > 12;
tickInfo[x] = shouldForward ? num - 12 : num;
break;
case "year":
tickInfo[x] = date.getFullYear() + step;
break;
}
}
}
return correctDates(tickInfo, date);
}
function correctDates(tickInfo: DateTime, date?: Date): DateTime {
date = date || new Date();
for (let x of ReversedProps) {
if (tickInfo[x] === undefined) {
continue;
} else if (x == "seconds" && tickInfo[x] >= 60) {
if (tickInfo.minutes !== undefined) {
tickInfo[x] -= 60;
tickInfo.minutes += 1;
} else {
tickInfo[x] = undefined;
}
} else if (x == "minutes" && tickInfo[x] >= 60) {
if (tickInfo.hours !== undefined) {
tickInfo[x] -= 60;
tickInfo.hours += 1;
} else {
tickInfo[x] = undefined;
}
} else if (x == "hours" && tickInfo[x] >= 24) {
if (tickInfo.date !== undefined) {
tickInfo[x] -= 24;
tickInfo.date += 1;
} else {
tickInfo[x] = undefined;
}
} else if (x == "date") {
let currentYear = date.getFullYear(),
isLeapYear = currentYear % 4 === 0,
currentMonth = date.getMonth() + 1,
isBigMonth = BigMonths.indexOf(currentMonth) >= 0;
if (currentMonth == 2) { // Feb.
if (isLeapYear && tickInfo[x] > 29 || tickInfo[x] > 28) {
if (tickInfo.month !== undefined) {
tickInfo[x] -= isLeapYear ? 29 : 28;
tickInfo.month += 1;
} else {
tickInfo[x] = undefined;
}
}
} else if (isBigMonth && tickInfo[x] > 31 || tickInfo[x] > 30) {
if (tickInfo.month !== undefined) {
tickInfo[x] -= isLeapYear ? 31 : 30;
tickInfo.month += 1;
} else {
tickInfo[x] = undefined;
}
}
} else if (x == "day" && tickInfo[x] > 6) {
if (tickInfo.month !== undefined) {
tickInfo[x] %= 7;
tickInfo.month += Math.ceil(tickInfo[x] / 7 / 4.1) + 1;
} else {
tickInfo[x] = undefined;
}
} else if (x == "month" && tickInfo[x] > 12) {
if (tickInfo.year !== undefined) {
tickInfo[x] -= 12;
tickInfo.year += 1;
} else {
tickInfo[x] = undefined;
}
}
}
return tickInfo;
}
export function getNextTickTime(tickInfo: DateTime, date?: Date): number {
date = date || new Date();
var seconds = tickInfo.seconds !== undefined ? tickInfo.seconds : date.getSeconds(),
minutes = tickInfo.minutes !== undefined ? tickInfo.minutes : date.getMinutes(),
hours = tickInfo.hours !== undefined ? tickInfo.hours : date.getHours(),
month = tickInfo.month !== undefined ? tickInfo.month : date.getMonth() + 1,
year = tickInfo.year !== undefined ? tickInfo.year : date.getFullYear(),
_date: number;
if (tickInfo.date !== undefined) { // use monthly date as the first choice
_date = tickInfo.date;
} else if (tickInfo.day !== undefined) { // if missing date, use weekday instead
if (tickInfo.day >= date.getDay()) {
_date = tickInfo.day - date.getDay() + date.getDate();
} else {
_date = tickInfo.day - date.getDay() + 7 + date.getDate();
}
} else {
_date = date.getDate();
}
var { seconds, minutes, hours, date: _date, month, year } = correctDates({
seconds, minutes, hours, date: _date, month, year
}, date);
return new Date(year, month - 1, _date, hours, minutes, seconds).getTime();
}
function getCurrentTick(): DateTime {
let date = new Date();
return {
year: date.getFullYear(),
day: date.getDay(),
month: date.getMonth() + 1,
date: date.getDate(),
hours: date.getHours(),
minutes: date.getMinutes(),
seconds: date.getSeconds()
}
}
/**
* The possible returning values is:
* -1: tick is expired;
* 0: tick is just on time;
* 1: tick is still awaiting.
*/
export function getTicKState(dateInfo: DateTimeLike, tickInfo: DateTime): number {
let currentTick = getCurrentTick(),
waitNextTick = false,
state = 1;
for (let x of Props) {
if (!waitNextTick && isWildcard(dateInfo[x]))
waitNextTick = true;
if (tickInfo[x] === undefined) {
continue;
} else if (tickInfo[x] === currentTick[x]) {
state = 0;
} else if (tickInfo[x] > currentTick[x]) {
state = 1;
break;
} else { // tickInfo[x] < currentTick[x]
state = waitNextTick ? 1 : -1;
break;
}
}
return state;
}
export function getBestTimeout(tickInfo: DateTime, date?: Date): number {
let timeout = getNextTickTime(tickInfo, date) - (date ? date.getTime() : Date.now());
return timeout > TimeoutLimit ? TimeoutLimit : timeout;
}
export function getBestInterval(tickInfo: DateTime): number {
let timeouts = {
seconds: 1000,
minutes: 1000 * 60,
hours: 1000 * 60 * 60,
date: 1000 * 60 * 60 * 24,
};
for (let x in timeouts) {
if (tickInfo[x] !== undefined)
return timeouts[x];
}
return timeouts.date;
}
export function shouldRunOnce(dateInfo: DateTimeLike): boolean {
let should = true;
for (let x in dateInfo) {
if (isWildcard(dateInfo[x])) {
should = false
break;
}
}
return should;
} |
<html>
<head>
<title>Tic-Tac-Toe Solution</title>
</head>
<body>
<h1>Tic-Tac-Toe Solution</h1>
<div>
<table style="border: 1px solid black; border-collapse: collapse;">
<tr>
<td style="width:40px;height:40px;text-align:center;border: 1px solid black;">
<?php echo board[0][0]; ?></td>
<td style="width:40px;height:40px;text-align:center;border: 1px solid black;">
<?php echo board[1][0]; ?></td>
<td style="width:40px;height:40px;text-align:center;border: 1px solid black;">
<?php echo board[2][0]; ?></td>
</tr>
<tr>
<td style="width:40px;height:40px;text-align:center;border: 1px solid black;">
<?php echo board[0][1]; ?></td>
<td style="width:40px;height:40px;text-align:center;border: 1px solid black;">
<?php echo board[1][1]; ?></td>
<td style="width:40px;height:40px;text-align:center;border: 1px solid black;">
<?php echo board[2][1]; ?></td>
</tr>
<tr>
<td style="width:40px;height:40px;text-align:center;border: 1px solid black;">
<?php echo board[0][2]; ?></td>
<td style="width:40px;height:40px;text-align:center;border: 1px solid black;">
<?php echo board[1][2]; ?></td>
<td style="width:40px;height:40px;text-align:center;border: 1px solid black;">
<?php echo board[2][2]; ?></td>
</tr>
</table>
</div>
<h2>Solution</h2>
<p>X is the winner!</p>
</body>
</html> |
<reponame>lizij/Leetcode
package Pyramid_Transition_Matrix;
import java.util.*;
public class Solution {
int[][] map;
Set<Long> solvedRow;
public boolean pyramidTransition(String bottom, List<String> allowed) {
/**
* Copied from AC Solution
* Build a map from the list of allowed triples
* Because letters can only be chose from 'A' to 'G', we can use 7 bits to represents them
*/
map = new int[7][7];
for (String s: allowed) {
// use 7 bits to represent letters in ['A', ‘G’]
map[s.charAt(0) - 'A'][s.charAt(1) - 'A'] |= 1 << (s.charAt(2) - 'A');
}
solvedRow = new HashSet<>();
// set the bottom of pyramid
int N = bottom.length();
int[][] pyramid = new int[N][N]; // representing the pyramid
int t = 0;
for (char c: bottom.toCharArray()) {
pyramid[N - 1][t++] = c - 'A';
}
return solve(pyramid, 0, N - 1, 0);
}
public boolean solve(int[][] pyramid, long curRow, int length, int index) {
if (length == 1 && index == 1) { // If successfully placed entire pyramid
return true;
} else if (index == length) {
if (solvedRow.contains(curRow)) {
return false; // If we've already tried this row, give up
}
solvedRow.add(curRow); // Add row to cache
return solve(pyramid, 0, length - 1, 0); // Calculate next row
} else {
// w's jth bit is true if block #j could be a parent of pyramid[N][i] and pyramid[N][i+1]
int w = map[pyramid[length][index]][pyramid[length][index+1]];
// for each set bit in w...
for (int b = 0; b < 7; ++b) {
if (((w >> b) & 1) != 0) {
pyramid[length-1][index] = b; //set parent to be equal to block #b
//If rest of pyramid can be built, return true
// current row's ith bit set to b+1 in base 8.
if (solve(pyramid, curRow * 8 + (b + 1), length, index + 1)) return true;
}
}
return false;
}
}
public static void main(String[] args) {
Solution s = new Solution();
System.out.println(s.pyramidTransition("ABC", Arrays.asList("ABD", "BCE", "DEA", "FFF"))); // true
System.out.println(s.pyramidTransition("AABA", Arrays.asList("AAA", "AAB", "ABA", "ABB", "BAC"))); // false
}
} |
/**
* Copyright (C) 2012 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.analytics.financial.model.volatility.smile.fitting.interpolation;
import java.util.ArrayList;
import org.apache.commons.lang.ObjectUtils;
import com.opengamma.analytics.math.differentiation.ScalarFirstOrderDifferentiator;
import com.opengamma.analytics.math.function.Function1D;
import com.opengamma.analytics.math.interpolation.DoubleQuadraticInterpolator1D;
import com.opengamma.analytics.math.interpolation.Interpolator1D;
import com.opengamma.analytics.math.interpolation.data.Interpolator1DDataBundle;
import com.opengamma.util.ArgumentChecker;
/**
* Fits a set of implied volatilities at given strikes by interpolating log-moneyness (ln(strike/forward)) against implied volatility using the supplied interpolator (the default
* is double quadratic). While this will fit any input data, there is no guarantee that the smile is arbitrage free, or indeed always positive, and should therefore be used with
* care, and only when other smile interpolators fail. The smile is extrapolated in both directions using shifted log-normals set to match the level and slope of the smile at
* the end point.
*/
public class SmileInterpolatorSpline implements GeneralSmileInterpolator {
// private static final Logger LOG = LoggerFactory.getLogger(ShiftedLogNormalTailExtrapolationFitter.class);
private static final Interpolator1D DEFAULT_INTERPOLATOR = new DoubleQuadraticInterpolator1D();
private static final ScalarFirstOrderDifferentiator DIFFERENTIATOR = new ScalarFirstOrderDifferentiator();
private static final ShiftedLogNormalTailExtrapolationFitter TAIL_FITTER = new ShiftedLogNormalTailExtrapolationFitter();
private final Interpolator1D _interpolator;
private final String _extrapolatorFailureBehaviour;
public SmileInterpolatorSpline() {
this(DEFAULT_INTERPOLATOR);
}
public SmileInterpolatorSpline(final Interpolator1D interpolator) {
ArgumentChecker.notNull(interpolator, "null interpolator");
_interpolator = interpolator;
_extrapolatorFailureBehaviour = "Exception"; // This follows pattern of OG-Financial's BlackVolatilitySurfacePropertyNamesAndValues.EXCEPTION_SPLINE_EXTRAPOLATOR_FAILURE
}
public SmileInterpolatorSpline(final Interpolator1D interpolator, String extrapolatorFailureBehaviour) {
ArgumentChecker.notNull(interpolator, "null interpolator");
_interpolator = interpolator;
_extrapolatorFailureBehaviour = extrapolatorFailureBehaviour;
}
/**
* Gets the extrapolatorFailureBehaviour. If a shiftedLognormal model (Black with additional free paramter, F' = F*exp(mu)) fails to fit the boundary vol and the vol smile at that point...<p>
* "Exception": an exception will be thrown <p>
* "Quiet": the failing vol/strike will be tossed away, and we try the closest interior point. This repeats until a solution is found.
* @return the extrapolatorFailureBehaviour
*/
public final String getExtrapolatorFailureBehaviour() {
return _extrapolatorFailureBehaviour;
}
@Override
public Function1D<Double, Double> getVolatilityFunction(final double forward, final double[] strikes, final double expiry, final double[] impliedVols) {
ArgumentChecker.notNull(strikes, "strikes");
ArgumentChecker.notNull(impliedVols, "implied vols");
final int n = strikes.length;
ArgumentChecker.isTrue(impliedVols.length == n, "#strikes {} does not match #vols {}", n, impliedVols.length);
final double kL = strikes[0];
final double kH = strikes[n - 1];
ArgumentChecker.isTrue(kL <= forward, "Cannot do left tail extrapolation when the lowest strike ({}) is greater than the forward ({})", kL, forward);
ArgumentChecker.isTrue(kH >= forward, "Cannot do right tail extrapolation when the highest strike ({}) is less than the forward ({})", kH, forward);
final double[] x = new double[n];
for (int i = 0; i < n; i++) {
x[i] = Math.log(strikes[i] / forward);
}
// Interpolator
final Interpolator1DDataBundle data = _interpolator.getDataBundle(x, impliedVols);
final Function1D<Double, Double> interpFunc = new Function1D<Double, Double>() {
@SuppressWarnings("synthetic-access")
@Override
public Double evaluate(final Double k) {
final double m = Math.log(k / forward);
return _interpolator.interpolate(data, m);
}
};
final Function1D<Double, Boolean> domain = new Function1D<Double, Boolean>() {
@Override
public Boolean evaluate(final Double k) {
return k >= kL && k <= kH;
}
};
// Extrapolation of High and Low Strikes by ShiftedLogNormalTailExtrapolationFitter
final Function1D<Double, Double> dSigmaDx = DIFFERENTIATOR.differentiate(interpFunc, domain); //
final ArrayList<Double> highParamsShiftVolStrike = new ArrayList<Double>();;
final ArrayList<Double> lowParamsShiftVolStrike = new ArrayList<Double>();;
if (_extrapolatorFailureBehaviour.equalsIgnoreCase("Quiet")) {
final ArrayList<Double> tempHighExtrapParams = TAIL_FITTER.fitVolatilityAndGradRecursively(forward, strikes, impliedVols, dSigmaDx, expiry, false);
final ArrayList<Double> tempLowExtrapParams = TAIL_FITTER.fitVolatilityAndGradRecursively(forward, strikes, impliedVols, dSigmaDx, expiry, true);
highParamsShiftVolStrike.addAll(tempHighExtrapParams);
lowParamsShiftVolStrike.addAll(tempLowExtrapParams);
} else {
final double[] shiftLnVolLow = TAIL_FITTER.fitVolatilityAndGrad(forward, kL, impliedVols[0], dSigmaDx.evaluate(kL), expiry);
lowParamsShiftVolStrike.add(0, shiftLnVolLow[0]); // mu = ln(shiftedForward / originalForward)
lowParamsShiftVolStrike.add(1, shiftLnVolLow[1]); // theta = new ln volatility to use
lowParamsShiftVolStrike.add(2, strikes[0]); // new extapolation boundary
final double[] shiftLnVolHigh = TAIL_FITTER.fitVolatilityAndGrad(forward, kH, impliedVols[n - 1], dSigmaDx.evaluate(kH), expiry);
highParamsShiftVolStrike.add(0, shiftLnVolHigh[0]); // mu = ln(shiftedForward / originalForward)
highParamsShiftVolStrike.add(1, shiftLnVolHigh[1]); // theta = new ln volatility to use
highParamsShiftVolStrike.add(2, strikes[n - 1]); // new extapolation boundary
}
// Resulting Functional Vol Surface
Function1D<Double, Double> volSmileFunction = new Function1D<Double, Double>() {
@Override
public Double evaluate(final Double k) {
if (k < lowParamsShiftVolStrike.get(2)) {
return ShiftedLogNormalTailExtrapolation.impliedVolatility(forward, k, expiry, lowParamsShiftVolStrike.get(0), lowParamsShiftVolStrike.get(1));
} else if (k > highParamsShiftVolStrike.get(2)) {
return ShiftedLogNormalTailExtrapolation.impliedVolatility(forward, k, expiry, highParamsShiftVolStrike.get(0), highParamsShiftVolStrike.get(1));
} else {
return interpFunc.evaluate(k);
}
}
};
// for (int k = 0; k < 2000; k = k + 100) {
// System.out.println(k + "," + volSmileFunction.evaluate((double) k));
// }
return volSmileFunction;
}
public Interpolator1D getInterpolator() {
return _interpolator;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + _interpolator.hashCode();
return result;
}
@Override
public boolean equals(final Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final SmileInterpolatorSpline other = (SmileInterpolatorSpline) obj;
return ObjectUtils.equals(_interpolator, other._interpolator);
}
}
|
<reponame>albinsony/foam2
/**
* @license
* Copyright 2017 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
foam.CLASS({
name: 'VisibilityTest',
properties: [
{
class: 'String',
name: 'readWrite',
value: 'testing...',
visibility: foam.u2.Visibility.RW
},
{
class: 'String',
name: 'final',
value: 'testing...',
visibility: foam.u2.Visibility.FINAL
},
{
class: 'String',
name: 'disabled',
value: 'testing...',
visibility: foam.u2.Visibility.DISABLED
},
{
class: 'String',
name: 'readOnly',
value: 'testing...',
visibility: foam.u2.Visibility.RO
},
{
class: 'String',
name: 'hidden',
value: 'testing...',
visibility: foam.u2.Visibility.HIDDEN
}
]
});
var ctx = foam.__context__;
document.write('Default');
foam.u2.DetailView.create(
{
data: VisibilityTest.create()
}
).write();
document.write('<br>Create');
foam.u2.DetailView.create(
{
data: VisibilityTest.create(),
controllerMode: foam.u2.ControllerMode.CREATE
}
).write();
document.write('<br>View');
foam.u2.DetailView.create(
{
data: VisibilityTest.create(),
controllerMode: foam.u2.ControllerMode.VIEW
}
).write();
document.write('<br>Edit');
foam.u2.DetailView.create(
{
data: VisibilityTest.create(),
controllerMode: foam.u2.ControllerMode.EDIT
}
).write();
|
#!/bin/bash
echo "Compile without opt"
icc 2_* -S -parallel -par-report2 -std=c99
echo "======================================"
# cat 2_1.s
echo "======================================"
echo "Compile with opt"
icc 2_* -ipo-S -parallel -par-report2 -std=c99 -o optimized.out
echo "======================================"
# cat optimized.out
echo "======================================"
|
#! /bin/sh
DATE=`date '+%I:%M:%S %p'`
# Buat tulisan selamat siang,sore,malam,pagi sesuai jam
JAM=`date '+%H'`
if [[ $JAM -le 9 ]] # Jika jam <= 9 (00 - 09)
then
echo "Selamat pagi, $DATE "
fi
if [[ $JAM -le 14 ]] && [[ $JAM -ge 10 ]]
then
echo "Selamat siang, $DATE "
fi
if [[ $JAM -le 18 ]] && [[ $JAM -ge 15 ]]
then
echo "Selamat sore, $DATE "
fi
if [[ $JAM -le 23 ]] && [[ $JAM -ge 19 ]]
then
echo "Selamat malam, $DATE "
fi
|
package set summary "C library of Git core methods that is re-entrant and linkable"
package set webpage "https://libgit2.github.com"
package set git.url "https://github.com/libgit2/libgit2.git"
package set src.url "https://github.com/libgit2/libgit2/archive/v1.3.0.tar.gz"
package set src.sum "192eeff84596ff09efb6b01835a066f2df7cd7985e0991c79595688e6b36444e"
package set license "GPL-2.0-only"
package set dep.pkg "libssh2 pcre2"
package set dep.cmd "pkg-config"
package set bsystem "cmake"
build() {
cmakew \
-DBUILD_CLAR=OFF \
-DBUILD_FUZZERS=OFF \
-DBUILD_EXAMPLES=OFF \
-DBUILD_SHARED_LIBS=ON \
-DENABLE_WERROR=OFF \
-DENABLE_TRACE=OFF \
-DDEPRECATE_HARD=OFF \
-DPROFILE=OFF \
-DTHREADSAFE=ON \
-DUSE_SSH=ON \
-DUSE_HTTPS=OpenSSL \
-DUSE_BUNDLED_ZLIB=OFF \
-DREGEX_BACKEND=pcre2 \
-DPCRE2_INCLUDE_DIR="$pcre2_INCLUDE_DIR" \
-DPCRE2_LIBRARY="$pcre2_LIBRARY_DIR/libpcre2-8.a" \
-DZLIB_INCLUDE_DIR="$zlib_INCLUDE_DIR" \
-DZLIB_LIBRARY_RELEASE="$zlib_LIBRARY_DIR/libz.a"
}
|
<filename>console/src/boost_1_78_0/libs/asio/test/readable_pipe.cpp<gh_stars>1-10
//
// readable_pipe.cpp
// ~~~~~~~~~~~~~~~~~
//
// Copyright (c) 2003-2021 <NAME> (chris at kohlhoff dot com)
//
// Distributed under the Boost Software License, Version 1.0. (See accompanying
// file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
//
// Disable autolinking for unit tests.
#if !defined(BOOST_ALL_NO_LIB)
#define BOOST_ALL_NO_LIB 1
#endif // !defined(BOOST_ALL_NO_LIB)
// Test that header pipe is self-contained.
#include <boost/asio/readable_pipe.hpp>
#include "archetypes/async_result.hpp"
#include <boost/asio/io_context.hpp>
#include "unit_test.hpp"
// readable_pipe_compile test
// ~~~~~~~~~~~~~~~~~~~~~~~~~~
// The following test checks that all public member functions on the class
// readable_pipe compile and link correctly. Runtime failures are ignored.
namespace readable_pipe_compile {
struct write_some_handler
{
write_some_handler() {}
void operator()(const boost::system::error_code&, std::size_t) {}
#if defined(BOOST_ASIO_HAS_MOVE)
write_some_handler(write_some_handler&&) {}
private:
write_some_handler(const write_some_handler&);
#endif // defined(BOOST_ASIO_HAS_MOVE)
};
struct read_some_handler
{
read_some_handler() {}
void operator()(const boost::system::error_code&, std::size_t) {}
#if defined(BOOST_ASIO_HAS_MOVE)
read_some_handler(read_some_handler&&) {}
private:
read_some_handler(const read_some_handler&);
#endif // defined(BOOST_ASIO_HAS_MOVE)
};
void test()
{
#if defined(BOOST_ASIO_HAS_PIPE)
using namespace boost::asio;
try
{
io_context ioc;
const io_context::executor_type ioc_ex = ioc.get_executor();
char mutable_char_buffer[128] = "";
archetypes::lazy_handler lazy;
boost::system::error_code ec;
const std::string path;
// basic_readable_pipe constructors.
readable_pipe pipe1(ioc);
readable_pipe::native_handle_type native_pipe1 = pipe1.native_handle();
readable_pipe pipe2(ioc, native_pipe1);
readable_pipe pipe3(ioc_ex);
readable_pipe::native_handle_type native_pipe2 = pipe1.native_handle();
readable_pipe pipe4(ioc_ex, native_pipe2);
#if defined(BOOST_ASIO_HAS_MOVE)
readable_pipe pipe5(std::move(pipe4));
#endif // defined(BOOST_ASIO_HAS_MOVE)
// basic_readable_pipe operators.
#if defined(BOOST_ASIO_HAS_MOVE)
pipe1 = readable_pipe(ioc);
pipe1 = std::move(pipe2);
#endif // defined(BOOST_ASIO_HAS_MOVE)
// basic_io_object functions.
readable_pipe::executor_type ex = pipe1.get_executor();
(void)ex;
// basic_readable_pipe functions.
readable_pipe::native_handle_type native_pipe3 = pipe1.native_handle();
pipe1.assign(native_pipe3);
readable_pipe::native_handle_type native_pipe4 = pipe1.native_handle();
pipe1.assign(native_pipe4, ec);
bool is_open = pipe1.is_open();
(void)is_open;
pipe1.close();
pipe1.close(ec);
readable_pipe::native_handle_type native_pipe5 = pipe1.native_handle();
(void)native_pipe5;
pipe1.cancel();
pipe1.cancel(ec);
pipe1.read_some(buffer(mutable_char_buffer));
pipe1.read_some(buffer(mutable_char_buffer), ec);
pipe1.async_read_some(buffer(mutable_char_buffer), read_some_handler());
int i3 = pipe1.async_read_some(buffer(mutable_char_buffer), lazy);
(void)i3;
}
catch (std::exception&)
{
}
#endif // defined(BOOST_ASIO_HAS_PIPE)
}
} // namespace readable_pipe_compile
BOOST_ASIO_TEST_SUITE
(
"readable_pipe",
BOOST_ASIO_TEST_CASE(readable_pipe_compile::test)
)
|
import { flatPackageName } from '@ssen/flat-package-name';
import fs from 'fs-extra';
import getPackageJson, { Options } from 'package-json';
import path from 'path';
import { PackageJson } from 'type-fest';
import { PackageInfo, PublishOption } from './types';
export type GetRemotePackageJson = (
params: { name: string } & Options,
) => Promise<PackageJson | undefined>;
const getNpmRemotePackageJson: GetRemotePackageJson = ({
name,
...options
}) => {
return getPackageJson(name, options)
.then((value) =>
value && typeof value.version === 'string'
? (value as PackageJson)
: undefined,
)
.catch(() => undefined);
};
interface Params {
packages: Map<string, PackageInfo>;
outDir: string;
tag: string | undefined;
registry: string | undefined;
getRemotePackageJson?: GetRemotePackageJson;
}
export async function getPublishOptions({
packages,
outDir,
tag: forceTag,
registry: forceRegistry,
getRemotePackageJson = getNpmRemotePackageJson,
}: Params): Promise<Map<string, PublishOption>> {
if (!fs.existsSync(outDir) || !fs.statSync(outDir).isDirectory()) {
throw new Error(`"${outDir}" directory is not exists`);
}
const tags: Map<string, string> = new Map();
for (const [name, { tag }] of packages) {
tags.set(name, tag ?? 'latest');
}
const currentPackageJsons: PackageJson[] = Array.from(packages.values())
// PackageInfo => /path/to/dist/{name}/package.json
.map(({ name: packageName }) =>
path.join(outDir, flatPackageName(packageName), 'package.json'),
)
// /path/to/dist/{name}/package.json => boolean
.filter((packageJsonFile) => fs.existsSync(packageJsonFile))
// /path/to/dist/{name}/package.json => PackageJson
.map((packageJsonFile) => fs.readJsonSync(packageJsonFile))
// PackageJson => boolean
.filter(({ name }) => typeof name === 'string');
const remotePackageJsons: (PackageJson | undefined)[] = await Promise.all<
PackageJson | undefined
>(
currentPackageJsons.map(({ name }) => {
if (!name) throw new Error(``);
return getRemotePackageJson({
name,
version: forceTag || tags.get(name),
registryUrl: forceRegistry,
fullMetadata: true,
});
}),
);
return currentPackageJsons.reduce((map, current, i) => {
if (!current || !current.name) {
throw new Error(``);
}
map.set(current.name, {
name: current.name,
tag: tags.get(current.name)!,
current,
remote: remotePackageJsons[i],
});
return map;
}, new Map<string, PublishOption>());
}
|
#!/bin/bash
## Play folder with vlc
function play() {
# Check vlc installation
if [ -n "$(command -v vlc)" ]; then
echo_error 'vlc required, enter: "sudo apt-get install -y vlc" to install'
return 1
fi
local ARGUMENTS=()
local OPTARG
local OPTION
while [ "$#" -gt 0 ]; do
OPTIND=0
while getopts :h OPTION; do
case "${OPTION}" in
h) echo_warning 'play';
echo_label 14 ' description:'; echo_primary 'Play folder with vlc'
echo_label 14 ' usage:'; echo_primary 'play [folder] -h (help)'
return 0;;
:) echo_error "\"${OPTARG}\" requires value"
return 1;;
\?) echo_error "invalid option \"${OPTARG}\""
return 1;;
esac
done
if [ "${OPTIND}" -gt 1 ]; then
shift $(( OPTIND-1 ))
fi
if [ "${OPTIND}" -eq 1 ]; then
ARGUMENTS+=("$1")
shift
fi
done
# Check argument count
if [ "${#ARGUMENTS[@]}" -gt 1 ]; then
echo_error "too many arguments (${#ARGUMENTS[@]})"
echo_label 8 'usage:'; echo_primary 'play [folder] -h (help)'
return 1
fi
# excluding last forward slash if any
local FOLDER
FOLDER="$(realpath "${ARGUMENTS[${LBOUND}]}")"
# Check folder validity
if [ ! -d "${FOLDER}" ]; then
echo_error 'source must be a folder'
echo_label 8 'usage:'; echo_primary 'play [folder] -h (help)'
return 1
fi
echo_info "nohup vlc \"${FOLDER}\" &>/dev/null &"
nohup vlc "${FOLDER}" &>/dev/null &
} |
<gh_stars>0
#pragma once
#include "mc-types.h"
class mcCurlPerformerFactory {
public:
static mcIPerformer* create(void);
};
|
<gh_stars>0
import {FirewallConfig} from './firewall.commons';
import {CommonBackendConfigType, CommonKeywordMapperConfigType} from '../backend-commons/modules/backend.commons';
import {CacheConfig} from './datacache.module';
export interface CommonServerConfigType<B extends CommonBackendConfigType<CommonKeywordMapperConfigType, CacheConfig>,
F extends FirewallConfig> {
apiDataPrefix: string;
apiAssetsPrefix: string;
apiPublicPrefix: string;
filePathErrorDocs: string;
backendConfig: B;
firewallConfig: F;
}
|
#!/bin/bash -x
set -e
function ensure_venv {
VENV_PATH="${HOME}/venv/${JOB_NAME}"
[ -x ${VENV_PATH}/bin/pip ] || virtualenv ${VENV_PATH}
. ${VENV_PATH}/bin/activate
pip install -q ghtools
}
function notify {
local STATUS="$1"
local MESSAGE="$2"
echo $STATUS
gh-status "$REPO" "$GIT_COMMIT" "${STATUS}" -d "\"Build #${BUILD_NUMBER} ${MESSAGE} on Jenkins\"" -u "$BUILD_URL" >/dev/null
}
REPO="alphagov/call-rota"
rm -f Gemfile.lock
bundle install --path "${HOME}/bundles/${JOB_NAME}"
notify pending "is running"
if bundle exec rake; then
notify success "succeeded"
bundle exec rake publish_gem --trace
else
notify failure "failed"
exit 1
fi
|
#include <bits/stdc++.h>
using namespace std;
#define R 3
#define C 6
void spiralPrint(int m, int n, int arr[R][C])
{
int i, k = 0, l = 0;
/* k - starting row index
m - ending row index
l - starting column index
n - ending column index
i - iterator
*/
while (k < m && l < n) {
/* Print the first row from
the remaining rows */
for (i = l; i < n; ++i) {
cout << arr[k][i] << " ";
}
k++;
/* Print the last column
from the remaining columns */
for (i = k; i < m; ++i) {
cout << arr[i][n - 1] << " ";
}
n--;
/* Print the last row from
the remaining rows */
if (k < m) {
for (i = n - 1; i >= l; --i) {
cout << arr[m - 1][i] << " ";
}
m--;
}
/* Print the first column from
the remaining columns */
if (l < n) {
for (i = m - 1; i >= k; --i) {
cout << arr[i][l] << " ";
}
l++;
}
}
}
int main()
{
int arr[R][C] = { { 1, 2, 3, 4, 5, 6 },
{ 7, 8, 9, 10, 11, 12 },
{ 13, 14, 15, 16, 17, 18 } };
spiralPrint(R, C, arr);
return 0;
} |
#include "Etterna/Globals/global.h"
#include "Etterna/Models/Misc/CommonMetrics.h"
#include "Etterna/Singletons/GameState.h"
#include "RageUtil/Utils/RageUtil.h"
#include "SongOptions.h"
static const char* AutosyncTypeNames[] = {
"Off",
"Song",
"Machine",
};
XToString(AutosyncType);
XToLocalizedString(AutosyncType);
LuaXType(AutosyncType);
static const char* SoundEffectTypeNames[] = {
"Off",
"Speed",
"Pitch",
};
XToString(SoundEffectType);
XToLocalizedString(SoundEffectType);
LuaXType(SoundEffectType);
void
SongOptions::Init()
{
m_bAssistClap = false;
m_bAssistMetronome = false;
m_fMusicRate = 1.0f;
m_SpeedfMusicRate = 1.0f;
m_AutosyncType = AutosyncType_Off;
m_SoundEffectType = SoundEffectType_Off;
m_bStaticBackground = false;
m_bRandomBGOnly = false;
m_bSaveScore = true;
}
void
SongOptions::Approach(const SongOptions& other, float fDeltaSeconds)
{
#define APPROACH(opt) \
fapproach(m_##opt, other.m_##opt, fDeltaSeconds* other.m_Speed##opt);
#define DO_COPY(x) x = other.x;
APPROACH(fMusicRate);
DO_COPY(m_bAssistClap);
DO_COPY(m_bAssistMetronome);
DO_COPY(m_AutosyncType);
DO_COPY(m_SoundEffectType);
DO_COPY(m_bStaticBackground);
DO_COPY(m_bRandomBGOnly);
DO_COPY(m_bSaveScore);
#undef APPROACH
#undef DO_COPY
}
static void
AddPart(vector<RString>& AddTo, float level, RString name)
{
if (level == 0)
return;
const RString LevelStr =
(level == 1) ? RString("") : ssprintf("%ld%% ", lround(level * 100));
AddTo.push_back(LevelStr + name);
}
void
SongOptions::GetMods(vector<RString>& AddTo) const
{
if (m_fMusicRate != 1) {
RString s = ssprintf("%2.2f", m_fMusicRate);
if (s[s.size() - 1] == '0')
s.erase(s.size() - 1);
AddTo.push_back(s + "xMusic");
}
switch (m_AutosyncType) {
case AutosyncType_Off:
break;
case AutosyncType_Song:
AddTo.push_back("AutosyncSong");
break;
case AutosyncType_Machine:
AddTo.push_back("AutosyncMachine");
break;
default:
FAIL_M(ssprintf("Invalid autosync type: %i", m_AutosyncType));
}
switch (m_SoundEffectType) {
case SoundEffectType_Off:
break;
case SoundEffectType_Speed:
AddTo.push_back("EffectSpeed");
break;
case SoundEffectType_Pitch:
AddTo.push_back("EffectPitch");
break;
default:
FAIL_M(
ssprintf("Invalid sound effect type: %i", m_SoundEffectType));
}
if (m_bAssistClap)
AddTo.push_back("Clap");
if (m_bAssistMetronome)
AddTo.push_back("Metronome");
if (m_bStaticBackground)
AddTo.push_back("StaticBG");
if (m_bRandomBGOnly)
AddTo.push_back("RandomBG");
}
void
SongOptions::GetLocalizedMods(vector<RString>& v) const
{
GetMods(v);
FOREACH(RString, v, s) { *s = CommonMetrics::LocalizeOptionItem(*s, true); }
}
RString
SongOptions::GetString() const
{
vector<RString> v;
GetMods(v);
return join(", ", v);
}
RString
SongOptions::GetLocalizedString() const
{
vector<RString> v;
GetLocalizedMods(v);
return join(", ", v);
}
/* Options are added to the current settings; call Init() beforehand if
* you don't want this. */
void
SongOptions::FromString(const RString& sMultipleMods)
{
RString sTemp = sMultipleMods;
vector<RString> vs;
split(sTemp, ",", vs, true);
RString sThrowAway;
FOREACH(RString, vs, s) { FromOneModString(*s, sThrowAway); }
}
bool
SongOptions::FromOneModString(const RString& sOneMod, RString& sErrorOut)
{
RString sBit = sOneMod;
sBit.MakeLower();
Trim(sBit);
Regex mult("^([0-9]+(\\.[0-9]+)?)xmusic$");
vector<RString> matches;
if (mult.Compare(sBit, matches)) {
m_fMusicRate = StringToFloat(matches[0]);
MESSAGEMAN->Broadcast("RateChanged");
return true;
}
matches.clear();
vector<RString> asParts;
split(sBit, " ", asParts, true);
bool on = true;
if (asParts.size() > 1) {
sBit = asParts[1];
if (asParts[0] == "no")
on = false;
}
if (sBit == "clap")
m_bAssistClap = on;
else if (sBit == "metronome")
m_bAssistMetronome = on;
else if (sBit == "autosync" || sBit == "autosyncsong")
m_AutosyncType = on ? AutosyncType_Song : AutosyncType_Off;
else if (sBit == "autosyncmachine")
m_AutosyncType = on ? AutosyncType_Machine : AutosyncType_Off;
else if (sBit == "effect" && !on)
m_SoundEffectType = SoundEffectType_Off;
else if (sBit == "effectspeed")
m_SoundEffectType = on ? SoundEffectType_Speed : SoundEffectType_Off;
else if (sBit == "effectpitch")
m_SoundEffectType = on ? SoundEffectType_Pitch : SoundEffectType_Off;
else if (sBit == "staticbg")
m_bStaticBackground = on;
else if (sBit == "randombg")
m_bRandomBGOnly = on;
else if (sBit == "savescore")
m_bSaveScore = on;
else
return false;
return true;
}
bool
SongOptions::operator==(const SongOptions& other) const
{
#define COMPARE(x) \
{ \
if ((x) != other.x) \
return false; \
}
COMPARE(m_fMusicRate);
COMPARE(m_bAssistClap);
COMPARE(m_bAssistMetronome);
COMPARE(m_AutosyncType);
COMPARE(m_SoundEffectType);
COMPARE(m_bStaticBackground);
COMPARE(m_bRandomBGOnly);
COMPARE(m_bSaveScore);
#undef COMPARE
return true;
}
// lua start
#include "Etterna/Models/Lua/LuaBinding.h"
#include "Etterna/Globals/OptionsBinding.h"
/** @brief Allow Lua to have access to SongOptions. */
class LunaSongOptions : public Luna<SongOptions>
{
public:
ENUM_INTERFACE(AutosyncSetting, AutosyncType, AutosyncType);
// ENUM_INTERFACE(SoundEffectSetting, SoundEffectType, SoundEffectType);
// Broken, SoundEffectType_Speed disables rate mod, other settings have no
// effect. -Kyz
BOOL_INTERFACE(AssistClap, AssistClap);
BOOL_INTERFACE(AssistMetronome, AssistMetronome);
BOOL_INTERFACE(StaticBackground, StaticBackground);
BOOL_INTERFACE(RandomBGOnly, RandomBGOnly);
BOOL_INTERFACE(SaveScore, SaveScore);
static int MusicRate(T* p, lua_State* L)
{
int original_top = lua_gettop(L);
lua_pushnumber(L, p->m_fMusicRate);
lua_pushnumber(L, p->m_SpeedfMusicRate);
if (lua_isnumber(L, 1) && original_top >= 1) {
float v = FArg(1);
if (!(v > 0.0f && v <= 3.0f)) {
luaL_error(L, "Invalid value %f", v);
} else {
p->m_fMusicRate = v;
MESSAGEMAN->Broadcast("RateChanged");
}
}
if (original_top >= 2 && lua_isnumber(L, 2)) {
p->m_SpeedfMusicRate = FArgGTEZero(L, 2);
}
OPTIONAL_RETURN_SELF(original_top);
return 2;
}
LunaSongOptions()
{
ADD_METHOD(AutosyncSetting);
// ADD_METHOD(SoundEffectSetting);
ADD_METHOD(AssistClap);
ADD_METHOD(AssistMetronome);
ADD_METHOD(StaticBackground);
ADD_METHOD(RandomBGOnly);
ADD_METHOD(SaveScore);
ADD_METHOD(MusicRate);
}
};
LUA_REGISTER_CLASS(SongOptions)
// lua end
|
pip install sagemaker_studio_autoshutdown-0.1.1.tar.gz
jlpm config set cache-folder /tmp/yarncache
jupyter lab build --debug --minimize=False
echo "#######################################################"
echo "The installation was successful. This terminal window will close in 10 seconds. Refresh your IDE to see the extension on the sidebar."
sleep 10
nohup supervisorctl -c /etc/supervisor/conf.d/supervisord.conf restart jupyterlabserver
|
import string
import random
def generate_password():
characters = string.ascii_letters + string.digits
password = "".join(random.choice(characters) for i in range(10))
# Check if password contains a digit
if any(char.isdigit() for char in password):
return password
else:
return False
# Generate a password
generated_password = generate_password()
print(generated_password) # Output: 0isg2D23Lh |
// import IndexComponent from './components/Dashboard.vue';
//
// export const routes = [
// {
// name: 'Dashboard',
// path: '/admin/dashboard',
// component: IndexComponent,
// }
// ]
|
#!/bin/bash
dl_url=$1
dl_dir=$2
echo "Downloading..."
echo $dl_url
wget --load-cookies ~/.urs_cookies --save-cookies ~/.urs_cookies --keep-session-cookies --no-check-certificate --auth-no-challenge=on -r --reject "index.html*" --accept "*.hdf" -l1 -np -e robots=off --no-directories --waitretry=300 -t 100 --directory-prefix=$dl_dir --secure-protocol=TLSv1 $dl_url
echo "Done!" |
#!/usr/bin/env bats
load 'test_helper'
fixtures 'exist'
setup () {
sudo mknod ${TEST_FIXTURE_ROOT}/dir/blockfile b 89 1
}
teardown () {
rm -f ${TEST_FIXTURE_ROOT}/dir/blockfile
}
# Correctness
@test 'assert_block_not_exist() <file>: returns 0 if <file> block special does not exist' {
local -r file="${TEST_FIXTURE_ROOT}/dir/file"
run assert_block_not_exist "$file"
[ "$status" -eq 0 ]
[ "${#lines[@]}" -eq 0 ]
}
@test 'assert_block_not_exist() <file>: returns 1 and displays path if <file> block special file exists, but it was expected to be absent' {
local -r file="${TEST_FIXTURE_ROOT}/dir/blockfile"
run assert_block_not_exist "$file"
[ "$status" -eq 1 ]
[ "${#lines[@]}" -eq 3 ]
[ "${lines[0]}" == '-- block special file exists, but it was expected to be absent --' ]
[ "${lines[1]}" == "path : $file" ]
[ "${lines[2]}" == '--' ]
}
# Transforming path
@test 'assert_block_not_exist() <file>: replace prefix of displayed path' {
local -r BATSLIB_FILE_PATH_REM="#${TEST_FIXTURE_ROOT}"
local -r BATSLIB_FILE_PATH_ADD='..'
run assert_block_not_exist "${TEST_FIXTURE_ROOT}/dir/blockfile"
[ "$status" -eq 1 ]
[ "${#lines[@]}" -eq 3 ]
[ "${lines[0]}" == '-- block special file exists, but it was expected to be absent --' ]
[ "${lines[1]}" == "path : ../dir/blockfile" ]
[ "${lines[2]}" == '--' ]
}
@test 'assert_block_not_exist() <file>: replace suffix of displayed path' {
local -r BATSLIB_FILE_PATH_REM='%blockfile'
local -r BATSLIB_FILE_PATH_ADD='..'
run assert_block_not_exist "${TEST_FIXTURE_ROOT}/dir/blockfile"
[ "$status" -eq 1 ]
[ "${#lines[@]}" -eq 3 ]
[ "${lines[0]}" == '-- block special file exists, but it was expected to be absent --' ]
[ "${lines[1]}" == "path : ${TEST_FIXTURE_ROOT}/dir/.." ]
[ "${lines[2]}" == '--' ]
}
@test 'assert_block_not_exist() <file>: replace infix of displayed path' {
local -r BATSLIB_FILE_PATH_REM='dir'
local -r BATSLIB_FILE_PATH_ADD='..'
run assert_block_not_exist "${TEST_FIXTURE_ROOT}/dir/blockfile"
[ "$status" -eq 1 ]
[ "${#lines[@]}" -eq 3 ]
[ "${lines[0]}" == '-- block special file exists, but it was expected to be absent --' ]
[ "${lines[1]}" == "path : ${TEST_FIXTURE_ROOT}/../blockfile" ]
[ "${lines[2]}" == '--' ]
}
|
#!/usr/bin/env zsh
alias tmux="tmux -f $XDG_CONFIG_HOME/tmux/tmux.conf"
|
SELECT COUNT(*) as count_of_employees FROM employees; |
<gh_stars>1-10
/*
TITLE File I/O & Statistics Chapter10Exercise4.cpp
Bjarne Stroustrup "Programming: Principles and Practice Using C++"
COMMENT
Objective: 1. Populate a file with temperature readings in
the format (hour, temperature, unit), where
hour: [0, 23], temperature: [min, max],
unit: C - Celcius, min = -273.15, max = 1.4e32,
F - Fahrenheit, min = −459.67, max = 2.5e32.
2. Read the file containing the temperatures, convert
them all into Fahrenheit and calcualte their mean
and median.
Input: -
Output: -
Author: <NAME>
Date: 08.04.2015
*/
#include <iostream>
#include <fstream>
#include <sstream>
#include <string>
#include <vector>
#include <algorithm> // sort
#include <time.h> // time
#include "Chapter10Exercise4.h"
//===========================================================================================================
int main()
{
std::string file_name("Chapter10Exercise4raw_temps.txt");
populate_file(file_name);
std::ifstream ifs(file_name.c_str());
if (!ifs)
{
std::cerr <<"Can't open input file: "<< file_name <<"\n";
}
std::vector<Reading> file_data;
fill_vector(ifs, file_data);
std::cout <<"Temperature Mean: "<< vector_mean(file_data) <<" [F]\n";
std::cout <<"Temperature Median: "<< vector_median(file_data) <<" [F]\n";
}
|
// Copyright 2015 CoreOS, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package etcdserver
import (
"io"
"os"
"path"
pb "github.com/coreos/etcd/etcdserver/etcdserverpb"
"github.com/coreos/etcd/pkg/pbutil"
"github.com/coreos/etcd/pkg/types"
"github.com/coreos/etcd/raft/raftpb"
"github.com/coreos/etcd/snap"
"github.com/coreos/etcd/version"
"github.com/coreos/etcd/wal"
"github.com/coreos/etcd/wal/walpb"
)
type Storage interface {
// Save function saves ents and state to the underlying stable storage.
// Save MUST block until st and ents are on stable storage.
Save(st raftpb.HardState, ents []raftpb.Entry) error
// SaveSnap function saves snapshot to the underlying stable storage.
SaveSnap(snap raftpb.Snapshot) error
// DBFilePath returns the file path of database snapshot saved with given
// id.
DBFilePath(id uint64) (string, error)
// Close closes the Storage and performs finalization.
Close() error
}
type storage struct {
*wal.WAL
*snap.Snapshotter
}
func NewStorage(w *wal.WAL, s *snap.Snapshotter) Storage {
return &storage{w, s}
}
// SaveSnap saves the snapshot to disk and release the locked
// wal files since they will not be used.
func (st *storage) SaveSnap(snap raftpb.Snapshot) error {
walsnap := walpb.Snapshot{
Index: snap.Metadata.Index,
Term: snap.Metadata.Term,
}
err := st.WAL.SaveSnapshot(walsnap)
if err != nil {
return err
}
err = st.Snapshotter.SaveSnap(snap)
if err != nil {
return err
}
err = st.WAL.ReleaseLockTo(snap.Metadata.Index)
if err != nil {
return err
}
return nil
}
func readWAL(waldir string, snap walpb.Snapshot) (w *wal.WAL, id, cid types.ID, st raftpb.HardState, ents []raftpb.Entry) {
var (
err error
wmetadata []byte
)
repaired := false
for {
if w, err = wal.Open(waldir, snap); err != nil {
plog.Fatalf("open wal error: %v", err)
}
if wmetadata, st, ents, err = w.ReadAll(); err != nil {
w.Close()
// we can only repair ErrUnexpectedEOF and we never repair twice.
if repaired || err != io.ErrUnexpectedEOF {
plog.Fatalf("read wal error (%v) and cannot be repaired", err)
}
if !wal.Repair(waldir) {
plog.Fatalf("WAL error (%v) cannot be repaired", err)
} else {
plog.Infof("repaired WAL error (%v)", err)
repaired = true
}
continue
}
break
}
var metadata pb.Metadata
pbutil.MustUnmarshal(&metadata, wmetadata)
id = types.ID(metadata.NodeID)
cid = types.ID(metadata.ClusterID)
return
}
// upgradeDataDir converts an older version of the etcdServer data to the newest version.
// It must ensure that, after upgrading, the most recent version is present.
func upgradeDataDir(baseDataDir string, name string, ver version.DataDirVersion) error {
switch ver {
case version.DataDir2_0:
err := makeMemberDir(baseDataDir)
if err != nil {
return err
}
fallthrough
case version.DataDir2_0_1:
fallthrough
default:
}
return nil
}
func makeMemberDir(dir string) error {
membdir := path.Join(dir, "member")
_, err := os.Stat(membdir)
switch {
case err == nil:
return nil
case !os.IsNotExist(err):
return err
}
if err := os.MkdirAll(membdir, 0700); err != nil {
return err
}
names := []string{"snap", "wal"}
for _, name := range names {
if err := os.Rename(path.Join(dir, name), path.Join(membdir, name)); err != nil {
return err
}
}
return nil
}
|
<gh_stars>0
import React, { ReactElement } from "react"
import { Card, List, Typography } from "antd"
import { Link } from "gatsby"
import Base from "../layouts/Base"
import { SiteDataItem } from "../types"
function Index(): ReactElement {
return (
<Base
title={`Formula 1 Almanac`}
breadcrumbs={[
{
name: "Home",
path: "/",
},
]}
>
<List
grid={{
gutter: 32,
}}
dataSource={[
{
link: `/seasons`,
title: "Seasons",
},
]}
renderItem={(item: SiteDataItem) => (
<List.Item>
<Link to={item.link}>
<Card hoverable>
<Typography.Text strong>{item.title}</Typography.Text>
</Card>
</Link>
</List.Item>
)}
/>
</Base>
)
}
export default React.memo(Index)
|
#!/bin/bash
#2020.12.25
mkdir -p /tmp/packages && \
wget -O /tmp/packages/php.tar.xz "https://secure.php.net/get/php-7.4.13.tar.xz/from/this/mirror" && \
wget -O /tmp/packages/libuuid.tgz "http://nchc.dl.sourceforge.net/project/libuuid/libuuid-1.0.3.tar.gz" && \
# wget -O /tmp/packages/imagemagick.tgz "https://www.imagemagick.org/download/ImageMagick.tar.gz" && \
wget -O /tmp/packages/swoole.tar.gz "https://codeload.github.com/swoole/swoole-src/tar.gz/master" && \
mv /tmp/packages .
|
const readline = require('readline');
const rl = readline.createInterface({
input: process.stdin,
output: process.stdout
});
let numbers = [];
console.log('Enter a number or Ctrl-d to exit.');
rl.on('line', (num) => {
numbers.push(Number(num));
console.log('Enter a number or Ctrl-d to exit.');
});
rl.on('close', () => {
console.log('Mean:', numbers.reduce((total, n) => total + n, 0) / numbers.length);
}); |
#!/bin/bash
BRAKE='bundle exec rake'
#db_cycle
OIFS="$IFS"
IFS=$'\n'
DIRPATH=$1
echo $DIRPATH
#DIRPATH=~/work/data_management_system
#FILEPATH="private/pseudonymised_data/updated_files/"
FILEPATH=$2
echo $FILEPATH
RVJ () {
echo $DIRPATH/$FILEPATH
PROV='RVJ'
IFS=$'\n'
for x in $(find $DIRPATH/$FILEPATH -type f -name "*.pseudo" -path "*/$PROV/*" \
-not -path "*/2017-05-09/*" \
-not -path "*/2017-06-15/*" )
do
IFS="$OIFS"
$BRAKE import:brca fname="$(echo "$x" | sed -e 's:.*pseudonymised_data/\(.*\):\1:')" prov_code=$PROV
done
}
RTD () {
echo $DIRPATH/$FILEPATH
PROV='RTD'
IFS=$'\n'
for x in $(find $DIRPATH/$FILEPATH -type f -name "*.pseudo" -path "*/$PROV/*" \
-not -path "*/2017-05-11/*" \
-not -path "*/2017-08-02/*" \
-not -path "*/2017-12-04/*" \
! -name "c687591ef56698ec8dde11e2a7420caea0c6173c_09.2018_CAPP2 Pilot data upload.xlsx.pseudo" \
! -name "3268d1e4e28913926cc534d3b8658eca36907050_09.2018_CAPP2 Full Data Upload.xlsx.pseudo" \
! -name "a34f62c8499c20a48d5c5c228f5084f8f994e84c_10.2018_CAPP2 Full Data Upload.xlsx.pseudo" \
! -name "dda53265da69898d1e9725919a3a706c5003cd79_01.11.2006 to 30.06.2019_NUTH Colorectal Data - November 2006 to June 2019.xlsx.pseudo" \
! -name "1b2d27101d5a8ba9471d3e31ee60ad26964affb7_12.2019_NDR - Colorectal.xls.pseudo" \
! -name "0731b0a6b8087fa77e4488ea7cb0f4254864a4bb_12.2019_NDR - Other Cancers.xls.pseudo" \
! -name "b725d664af1bcce326a9433794e00243f36a5227_2019_NDR - Colorectal Cancer.xlsx.pseudo" \
! -name "ebe61e790a57c3d6d082754b0a1bf3d02323a0f8_01.10.2019 to 30.11.2019_NDR - Colorectal.xls.pseudo" \
! -name "fba361cdb4aaa08ee00ea7670956ca81b6242941_10.2020_Colorectal Gene Data.xlsx.pseudo" \
! -name "245c2c5c8f10bebb655228a94b756b94e07f8aa4_08.2020_Colorectal Cancer 01.08.2020 - 31.08.2020.xlsx.pseudo" \
! -name "2fa4bc9f31a6e0c6276156f935dc35065e4c2bd1_07.2019_NDR - Colorectal.xls.pseudo" \
! -name "a8e43ecadd553e06d8dce4631de725d50f6f85a0_08.2019_NDR - Colorectal.xls.pseudo" \
! -name "5590b86d6e705f08d93ceef477bb5c6b46e3d358_09.2019_NDR - Colorectal.xls.pseudo" \
! -name "bcc0938c18088b6b483e664c14bb1bb9b5248781_01.2020_NDR - Colorectal Cancer.xlsx.pseudo" \
! -name "fd047a2a4cf34f6e5aa4bbecdc8e08b012da42c5_02.2020_NDR - Colorectal.xls.pseudo" \
! -name "41ae35e88b2e9f2b41f1e1b22ea6a9a010ca7885_03.2020_NDR - Colorectal.xls.pseudo" \
! -name "8d6891c2dc52226ca77b301180d58ec22f10922d_05.2020_NDR - Colorectal.xlsx.pseudo" \
! -name "ab107f8ba21a6c823de18b62aab2b0f459f74d63_06.2020_Colorectal Gene Data.xlsx.pseudo" \
! -name "e869c9f6c1b96b2b1c4bad564978b827e44fa944_01.07.2019 to 31.07.2020_Colorectal Cancer 01.07.2019 - 31.07.2020.xlsx.pseudo" \
! -name "4bae180b69902c9618d214fb8867ca7be3afcf57_09.2020_Colorectal Data 01.09.2020 - 30.09.2020.xlsx.pseudo" \
! -name "3f879c487642bc77e25868e6caa7686e7c86770e_11.2020_Colorectal Gene Data.xlsx.pseudo" )
do
IFS="$OIFS"
$BRAKE import:brca fname="$(echo "$x" | sed -e 's:.*pseudonymised_data/\(.*\):\1:')" prov_code=$PROV
done
}
RR8 () {
MBIS=$1
PROV='RR8'
IFS=$'\n'
for x in $(find $DIRPATH/$FILEPATH -type f -name "*.pseudo" -path "*/$PROV/*" \
-not -path "*/2017-03-17/*" \
! -name "3a4d3dc703789864fa6d2b8f5d9fe60749205979_01.01.2013 to 30.09.2018_010113_300918.xlsx.pseudo" \
! -name "c658a0516d5e91acefc59ece77126c50b6a774cc_01.01.2006 to 31.03.2018_MMR gene 2006_31032018.xlsx.pseudo" \
! -name "cadc0b639036cbbce5a1bc51e630bde90e8d1ee0_01.10.2018 to 27.12.2019_other cancers 011018_271219.xlsx.pseudo" \
! -name "abb7505fa1c13e0d675e969d52f357002b560dab_01.04.2018 to 27.12.2019_MMR 010418_271219.xlsx.pseudo" \
! -name "41ae35e88b2e9f2b41f1e1b22ea6a9a010ca7885_03.2020_NDR - Colorectal.xls.pseudo" \
! -name "a8e43ecadd553e06d8dce4631de725d50f6f85a0_08.2019_NDR - Colorectal.xls.pseudo" \
! -name "0ca547381e5644e9bfafc86ed56ba317a5a00b84_28.12.2019 to 30.11.2020_MMR 281219_301120.xlsx.pseudo" )
do
IFS="$OIFS"
$BRAKE import:brca fname="$(echo "$x" | sed -e 's:.*pseudonymised_data/\(.*\):\1:')" prov_code=$PROV
done
}
RNZ () {
MBIS=$1
PROV='RNZ'
IFS=$'\n'
for x in $(find $DIRPATH/$FILEPATH -type f -name "*.pseudo" -path "*/$PROV/*" \
-not -path "*/2018-02-06/*" \
! -name "32b125df2fd306c7b2b6b7a6ec1362d368a02536_2017_Lynch full and predictives 2017.xlsx.pseudo" \
! -name "655e2321cd97be403ad7cf120b4132c52a26d79b_2018_Lynch full and predictives 2018.xlsx.pseudo" \
! -name "d47bfb9be436f0132fedb88be4a1685a02709fcf_2016_Lynch full and predictives 2016.xlsx.pseudo" \
! -name "ef6964b8789476f4e302b8ec199bd7718b1d101d_2019_Lynch full and predictives 2019.xlsx.pseudo" \
! -name "89d0d99aaccbcb34797c16015267c4cadbee61de_2015_Lynch full and predictives 2015.xlsx.pseudo" \
! -name "3a89d5a61f61b343adca31471ff39f8254226777_2019_PTEN full and predictives 2019.xlsx.pseudo" \
! -name "e10489ceaf13fb0c6bc31ec2195ed6511752571b_01.01.2020 to 31.07.2020_Lynch full and predictives 2020 Jan to July inclusive.xlsx.pseudo" )
do
IFS="$OIFS"
$BRAKE import:brca fname="$(echo "$x" | sed -e 's:.*pseudonymised_data/\(.*\):\1:')" prov_code=$PROV
done
}
RX1 () {
MBIS=$1
PROV='RX1'
IFS=$'\n'
for x in $(find $DIRPATH/$FILEPATH -type f -name "*.pseudo" -path "*/$PROV/*" -not -path "*/2017-06-14/*" \
! -name "89201abfdac7685739944b5a6ea314065ec95d41_01.07.2018 to 01.12.2018_Lynch Challenge July2018_Dec2018v9.xlsx.pseudo" \
! -name "a67fbd953bb8a2df2b9b2ed793a74c6f4ab2efe5_01.01.2010 to 30.06.2018_Lynch Challenge Jan2010_June2018v19.xlsx.pseudo" \
! -name "c7005f8821f316a795e007bff866eaab5e1b0b0d_01.01.2019 to 30.11.2019_Lynch Challenge Jan2019_Nov2019v3.xlsx.pseudo" \
! -name "dc7887680a728a55cb0ae04d1209f963f3d9f429_12.2019_Lynch Challenge Dec2019v7.xlsx.pseudo" \
! -name "4101dd36f57f03ebc1711ec74dba30e15760612d_01.01.2018 to 30.06.2018_Lynch Challenge Jan2018_June2018 additional genes v10.xlsx.pseudo" \
! -name "5108ccb441cb1bf07ab87a580c747aeb644151f6_01.01.2020 to 31.03.2020_Bowel Jan2020_March2020v6.xlsx.pseudo" \
! -name "04401b8fe2742e875d0ce7ebd53ffad7b73954c8_01.04.2020 to 30.06.2020_Bowel April2020_June2020v6.xlsx.pseudo" \
! -name "2781c1ff8d9be78a711016b5348cd1f78a8365cc_01.07.2020 to 30.09.2020_Bowel Julyl2020_Sept2020v5.xlsx.pseudo" \
! -name "6143beeed638c81f50dc60118086e5f4ad5ebfeb_01.10.2020 to 31.12.2020_Bowel Oct2020_Dec2020v3.xlsx.pseudo" )
do
IFS="$OIFS"
$BRAKE import:brca fname="$(echo "$x" | sed -e 's:.*pseudonymised_data/\(.*\):\1:')" prov_code=$PROV
done
}
RCU () {
MBIS=$1
PROV='RCU'
IFS=$'\n'
for x in $(find $DIRPATH/$FILEPATH -type f -name "*.pseudo" -path "*/$PROV/*" \
! -name "1acb5f31aa1f9d057b2105b9ac814c51f6f8bf44_01.04.2014 to 31.12.2018_Colorectal Cancer_09570820181212.csv.pseudo" \
! -name "723f1a253c120dfafe1439bf9150870cd0deda78_01.04.2014 to 01.12.2018_Historical_NonBRCA_NonColorectal_01042014-01122018.csv.pseudo" \
! -name "576f0670b0490bc788f673a5653c28cc1f7e7f7a_01.12.2019 to 31.12.2020_clean_Hereditary_Cancer_BRCA_CRC_31122019_31122020.csv.pseudo" )
do
IFS="$OIFS"
$BRAKE import:brca fname="$(echo "$x" | sed -e 's:.*pseudonymised_data/\(.*\):\1:')" prov_code=$PROV
done
}
RQ3 () {
PROV='RQ3'
IFS=$'\n'
for x in $(find $DIRPATH/$FILEPATH -type f \( -name "a5a6c2470626d7c3b8e8e62380e30a02288a37f8_09.2020_BRCA Challenge AZOVCA 2015_2017.xlsx.pseudo" -o -name "9159d17e34ae13c12e8515f5ac930b49a3eb11a9_11.2020_BRCA Challenge BRCA to upload.xlsx.pseudo" \) -path "*/$PROV/*" )
do
IFS="$OIFS"
$BRAKE import:brca fname="$(echo "$x" | sed -e 's:.*pseudonymised_data/\(.*\):\1:')" prov_code=$PROV
done
}
RJ1 () {
MBIS=$1
PROV='RJ1'
IFS=$'\n'
for x in $(find $DIRPATH/$FILEPATH -type f -name "*.pseudo" -path "*/$PROV/*" )
do
IFS="$OIFS"
$BRAKE import:brca fname="$(echo "$x" | sed -e 's:.*pseudonymised_data/\(.*\):\1:')" prov_code=$PROV
done
}
RGT () {
MBIS=$1
PROV='RGT'
IFS=$'\n'
for x in $(find $DIRPATH/$FILEPATH -type f -name "*.pseudo" -path "*/$PROV/*" \
! -name "45eab0f71c0fe071e320deea24d3ef89da0a4fe2_07.2016_mlpa_test.csv.pseudo" \
! -name "6c97b564b29711c02dbfb7d139cc6d4cbd6441e0_07.2016_mlpa_test.csv.pseudo" \
! -name "a6cb7307428d9895c2703300904e688eaa04e0e7_01.2013_brca_chal_dummy_171013.csv.pseudo" \
! -name "b7781fafea5cd1564e1270d69949643ff3d53323_01.2013_brca_chal_dummy.csv.pseudo" \
! -name "e9e4360b7b1cbd9e6d43fb15b9492b032af27b77_01.01.2009 to 31.10.2019_Lynch data 2009 to 2019 for checking no errors.csv.pseudo" \
! -name "fea859a7be837b797e84999e67f8fbe5397dfcff_12.2019_Lynch data 2009 to 2019 for checking.csv.pseudo" \
! -name "ea90909e3b33dc27f5e265bee8a583a56773cd29_01.08.2019 to 27.04.2020_Lynch_190801_200427_UPLOAD.csv.pseudo" )
do
IFS="$OIFS"
$BRAKE import:brca fname="$(echo "$x" | sed -e 's:.*pseudonymised_data/\(.*\):\1:')" prov_code=$PROV
done
}
R0A () {
MBIS=$1
PROV='R0A'
IFS=$'\n'
for x in $(find $DIRPATH/$FILEPATH -type f -name "*.pseudo" -path "*/$PROV/*" \
! -name "41cef5485574dddbb9704c4bc84973dde2fe2ca5_01.01.2007 to 31.12.2018_PHE HNPCC 2007-18.xls.pseudo" \
! -name "09cf085a0adc0df9a7c8c8ca4a894c0c242a2de6_12.2019_PHE HNPCC Extract_Dec19-Dec19.xls.pseudo" \
! -name "eb53bf3ca058b8bba047c4859985d78eb2fe99a1_01.01.2019 to 30.11.2019_PHE HNPCC Extract_Jan19-Nov19-mod.xls.pseudo")
do
IFS="$OIFS"
bundle exec rake import:brca fname="$(echo "$x" | sed -e 's:.*pseudonymised_data/\(.*\):\1:')" prov_code=$PROV
done
}
RPY () {
MBIS=$1
PROV='RPY'
IFS=$'\n'
for x in $(find $DIRPATH/$FILEPATH -type f -name "*.pseudo" -path "*/$PROV/*" \
! -name "8cd7b9ccf005413e8af99be1df07764b596751d3_12.2019_REST and TRIP13.xlsx.pseudo" \
! -name "440ae0dda3d4bdf26044aba95ee7ae4ea68241f7_12.2019_ready to submit TGL data only mainland uk with NHS numbers without BRCA_CORRECTED_wo_REST_TRIP13.xlsx.pseudo" \
! -name "8f98012da7c87b12ca1221dd3dc9d34a10952720_05.2019_BRCA only TGL data_cleaned2.xlsx.pseudo")
do
IFS="$OIFS"
$BRAKE import:brca fname="$(echo "$x" | sed -e 's:.*pseudonymised_data/\(.*\):\1:')" prov_code=$PROV
done
}
RJ7 () {
MBIS=$1
PROV='RJ7'
IFS=$'\n'
for x in $(find $DIRPATH/$FILEPATH -type f -name "*.pseudo" -path "*/$PROV/*" \
-not -path "*/2021/*" \
! -name "c466c80823235315f4df98bb4a14c4937ee5cbc4_08.2020_STG HBOC PHE reported till 28082020.xlsx.pseudo")
do
IFS="$OIFS"
bundle exec rake import:brca fname="$(echo "$x" | sed -e 's:.*pseudonymised_data/\(.*\):\1:')" prov_code=$PROV
done
}
RTH () {
MBIS=$1
PROV='RTH'
IFS=$'\n'
for x in $(find $DIRPATH/$FILEPATH -type f -name "*.pseudo" -path "*/$PROV/*")
do
IFS="$OIFS"
$BRAKE import:brca fname="$(echo "$x" | sed -e 's:.*pseudonymised_data/\(.*\):\1:')" prov_code=$PROV
done
}
R1K () {
MBIS=$1
PROV='R1K'
IFS=$'\n'
for x in $(find $DIRPATH/$FILEPATH -type f -name "*BRCA*.pseudo" -path "*/$PROV/*" )
do
IFS="$OIFS"
$BRAKE import:brca fname="$(echo "$x" | sed -e 's:.*pseudonymised_data/\(.*\):\1:')" prov_code=$PROV
done
}
RP4 () {
MBIS=$1
PROV='RP4'
IFS=$'\n'
for x in $(find $DIRPATH/$FILEPATH -type f -name "*BRCA*.pseudo" -path "*/$PROV/*")
do
IFS="$OIFS"
$BRAKE import:brca fname="$(echo "$x" | sed -e 's:.*pseudonymised_data/\(.*\):\1:')" prov_code=$PROV
done
}
RTD; RQ3; RR8; RNZ; RVJ; RX1; RCU; RJ1; RGT; RPY; R0A; RJ7; RTH; R1K; RP4
|
#!/bin/bash
T=$(date)
PYTHON_PACKAGES="aws-encryption-sdk pathlib flask pyopenssl requests"
if [ -z ${TEST_ENVIRONMENT+x} ] || [ "${TEST_ENVIRONMENT}" != "1" ]; then
PYTHON_PACKAGES="${PACKAGE_NAMES} ikp3db"
fi
echo configuring python environment, please wait...
echo $T > setup.log
sudo rm -rf /root/.cache/pip 2>&1 >> setup.log
sudo python -m pip install --upgrade pip 2>&1 >> setup.log
#sudo python -m pip uninstall -y aws-sam-cli 2>&1 >> setup.log
sudo python -m pip install --upgrade boto3 awscli 2>&1 >> setup.log
python -m pip install --user ${PYTHON_PACKAGES} 2>&1 >> setup.log
echo python setup complete
T=$(date)
echo $T >> setup.log
echo environment setup complete |
/*
* An XML attribute type.
* Localname: offset
* Namespace: http://schemas.xmlsoap.org/soap/encoding/
* Java type: org.xmlsoap.schemas.soap.encoding.OffsetAttribute
*
* Automatically generated - do not modify.
*/
package org.xmlsoap.schemas.soap.encoding.impl;
/**
* A document containing one offset(@http://schemas.xmlsoap.org/soap/encoding/) attribute.
*
* This is a complex type.
*/
public class OffsetAttributeImpl extends org.apache.xmlbeans.impl.values.XmlComplexContentImpl implements org.xmlsoap.schemas.soap.encoding.OffsetAttribute
{
private static final long serialVersionUID = 1L;
public OffsetAttributeImpl(org.apache.xmlbeans.SchemaType sType)
{
super(sType);
}
private static final javax.xml.namespace.QName OFFSET$0 =
new javax.xml.namespace.QName("http://schemas.xmlsoap.org/soap/encoding/", "offset");
/**
* Gets the "offset" attribute
*/
public java.lang.String getOffset()
{
synchronized (monitor())
{
check_orphaned();
org.apache.xmlbeans.SimpleValue target = null;
target = (org.apache.xmlbeans.SimpleValue)get_store().find_attribute_user(OFFSET$0);
if (target == null)
{
return null;
}
return target.getStringValue();
}
}
/**
* Gets (as xml) the "offset" attribute
*/
public org.xmlsoap.schemas.soap.encoding.ArrayCoordinate xgetOffset()
{
synchronized (monitor())
{
check_orphaned();
org.xmlsoap.schemas.soap.encoding.ArrayCoordinate target = null;
target = (org.xmlsoap.schemas.soap.encoding.ArrayCoordinate)get_store().find_attribute_user(OFFSET$0);
return target;
}
}
/**
* True if has "offset" attribute
*/
public boolean isSetOffset()
{
synchronized (monitor())
{
check_orphaned();
return get_store().find_attribute_user(OFFSET$0) != null;
}
}
/**
* Sets the "offset" attribute
*/
public void setOffset(java.lang.String offset)
{
synchronized (monitor())
{
check_orphaned();
org.apache.xmlbeans.SimpleValue target = null;
target = (org.apache.xmlbeans.SimpleValue)get_store().find_attribute_user(OFFSET$0);
if (target == null)
{
target = (org.apache.xmlbeans.SimpleValue)get_store().add_attribute_user(OFFSET$0);
}
target.setStringValue(offset);
}
}
/**
* Sets (as xml) the "offset" attribute
*/
public void xsetOffset(org.xmlsoap.schemas.soap.encoding.ArrayCoordinate offset)
{
synchronized (monitor())
{
check_orphaned();
org.xmlsoap.schemas.soap.encoding.ArrayCoordinate target = null;
target = (org.xmlsoap.schemas.soap.encoding.ArrayCoordinate)get_store().find_attribute_user(OFFSET$0);
if (target == null)
{
target = (org.xmlsoap.schemas.soap.encoding.ArrayCoordinate)get_store().add_attribute_user(OFFSET$0);
}
target.set(offset);
}
}
/**
* Unsets the "offset" attribute
*/
public void unsetOffset()
{
synchronized (monitor())
{
check_orphaned();
get_store().remove_attribute(OFFSET$0);
}
}
}
|
package main
import (
"fmt"
"os"
"strconv"
"time"
"github.com/ipfs/go-log"
logrus "github.com/sirupsen/logrus"
)
type BlockFactory struct {
Address string
PeerBFChan chan *Block
BFPeerChan chan *Block
BFMemChan chan bool
MemBFChan chan map[string]*Transaction
ReturnBFMemChan chan map[string]*Transaction
}
var bfLogger = log.Logger("bf")
var lastBlock *Block = nil
var currentSlot int64
var blockNo int64
var index *Index
//only store tx when isLeader = true
var isLeader bool = false
func (b *BlockFactory) ticker() {
_, g := GetGenesis()
//sleep 5 block before start
sinceGenesis := blockTime - ((time.Now().UnixNano() - g.Timestamp) % blockTime)
bfLogger.Infof("Sleep %d", sinceGenesis)
time.Sleep(time.Duration(sinceGenesis))
ticker := time.NewTicker(time.Duration(blockTime))
for {
for now := range ticker.C {
//blockNo := (now.UnixNano() - g.Timestamp) / blockTime
currentSlot = (now.UnixNano() - g.Timestamp) % (TopK * blockTime) / 1000000000
blockNo = (time.Now().UnixNano() - g.Timestamp) / blockTime
// bfLogger.Infof("Current slot: %d", currentSlot)
if i, _ := strconv.Atoi(b.Address); i < 21 {
bfLogger.Infof("Slot now: %d", int(currentSlot))
}
// lastblock, err := GetLastBlock()
//am i the current bp?
// bfLogger.Infof("I am %s", b.Address)
if lastBlock != nil {
// bfLogger.Info("last block ", lastBlock)
if SliceExists(lastBlock.BPs, b.Address) {
lock.Lock()
isLeader = true
lock.Unlock()
} else {
lock.Lock()
isLeader = false
lock.Unlock()
}
if b.Address == lastBlock.BPs[currentSlot] {
b.BFMemChan <- true
bfLogger.Infof("I am %s", b.Address)
}
//use initial bps
} else {
// bfLogger.Info("genesis ", g)
if SliceExists(g.BPs, b.Address) {
lock.Lock()
isLeader = true
lock.Unlock()
} else {
lock.Lock()
isLeader = false
lock.Unlock()
}
if b.Address == g.BPs[currentSlot] {
b.BFMemChan <- true
}
}
}
}
}
func (b *BlockFactory) ServeInternal() {
for {
select {
//gather txs to produce block
case txs := <-b.MemBFChan:
// blockNo := (time.Now().UnixNano() - g.Timestamp) / blockTime
// currentSlot := (time.Now().UnixNano() - g.Timestamp) % (TopK * blockTime) / 1000000000
var tnx []Transaction
for _, tx := range txs {
tnx = append(tnx, *tx)
}
var bps []string
// bl, err := GetLastBlock()
//end of epoch -> recalculate bps
fmt.Printf("for: %d\n", currentSlot)
if currentSlot == 0 {
topk := index.GetTopKVote(int(TopK))
bfLogger.Infof("top k: %s", topk)
bps = topk
} else {
if lastBlock == nil {
bps = GetInitialBPs()
} else {
bps = lastBlock.BPs
}
}
var prevHash []byte
if lastBlock == nil {
prevHash = []byte("genesis")
} else {
prevHash = lastBlock.Hash
}
block := Block{
Hash: nil,
PrevHash: prevHash,
Index: int(blockNo),
Timestamp: time.Now().UnixNano(),
Creator: b.Address,
Txs: tnx,
BPs: bps,
}
//bfLogger.Info("Bps ", bps)
block.SetHash()
// bfLogger.Infof("New block produced %d", int(block.Index))
//update database
index.Update(&block)
// bfLogger.Info("replace last block")
lastBlock = &block
b.ReturnBFMemChan <- txs
b.BFPeerChan <- &block
bfLogger.Info("done pr")
case block := <-b.PeerBFChan:
txs := make(map[string]*Transaction)
logrus.Infof("Got %d transaction, timestamp: %d", len(block.Txs), block.Timestamp)
// bfLogger.Infof("Got %d transaction", len(block.Txs))
for _, tx := range block.Txs {
txs[string(tx.ID)] = &tx
}
if lastBlock != nil {
if block.Index > lastBlock.Index {
index.Update(block)
}
} else {
index.Update(block)
}
//bfLogger.Info("replace last block")
lastBlock = block
// block.Save()
b.ReturnBFMemChan <- txs
}
}
}
func (b *BlockFactory) init() {
_, g := GetGenesis()
TopK = int64(len(g.BPs))
index = GetOrInitIndex()
}
func (b *BlockFactory) Start() {
log.SetLogLevel("bf", "info")
f, err := os.OpenFile(logFileName, os.O_WRONLY|os.O_APPEND|os.O_CREATE, 0644)
if err != nil {
// Cannot open log file. Logging to stderr
fmt.Println(err)
} else {
logrus.SetOutput(f)
}
// logger.Infof("i am %s", b.Address)
b.init()
go b.ticker()
go b.ServeInternal()
bfLogger.Infof("BF started")
}
//func main() {
// b := BlockFactory{}
// b.Start()
//}
|
TERMUX_PKG_HOMEPAGE=https://www.gnu.org/software/chess/
TERMUX_PKG_DESCRIPTION="Chess-playing program"
TERMUX_PKG_LICENSE="GPL-3.0"
TERMUX_PKG_MAINTAINER="@termux"
TERMUX_PKG_VERSION=6.2.8
TERMUX_PKG_REVISION=1
TERMUX_PKG_SRCURL=https://mirrors.kernel.org/gnu/chess/gnuchess-${TERMUX_PKG_VERSION}.tar.gz
TERMUX_PKG_SHA256=d50446cda8012240321da39cddbb4df4d08458a8d538a4738882814139583847
TERMUX_PKG_DEPENDS="libc++, ncurses, readline"
TERMUX_PKG_RM_AFTER_INSTALL="bin/gnuchessu bin/gnuchessx"
|
<gh_stars>1-10
module SpreadsheetGoodies::GoogleDrive
# Accesses GoogleDrive and returns a SpreadsheetGoodies::GoogleDrive::Worksheet
def self.read_worksheet(spreadsheet_key:, worksheet_title:nil, num_header_rows:1)
Worksheet.new(spreadsheet_key, worksheet_title, num_header_rows)
end
end
# loads all files in google_drive folder
Dir[File.join(File.dirname(__FILE__), "google_drive/**/*.rb")].each { |f| require f }
|
import { createFactory, createToPlain, property, required } from '../../../src';
export class ChildObject {
@property()
@required()
id!: string;
@property()
name: string = '';
@property()
options?: Record<string, unknown>;
static factory = createFactory(ChildObject);
static toPlain = createToPlain(ChildObject);
}
|
<filename>app/controllers/errors_controller.rb
class ErrorsController < ApplicationController
def not_found
render status: 404
end
def internal_server_error
render status: 500
end
def forbidden
render status: 403
end
def unauthorized
render status: 401
end
def unprocessable_entity
render status: 422
end
end
|
<reponame>kassovix/sonarqube-pullrequest-decorator-plugin
package com.github.goober.sonarqube.plugin.decorator.sonarqube.model;
import lombok.Builder;
import lombok.Value;
@Builder
@Value
public class Metric {
String key;
String name;
String description;
String type;
}
|
#!/bin/sh
java -Dfile.encoding=UTF-8 -Xmx10G -jar /opt/spotlight/dbpedia-spotlight-nightly-build.jar /opt/spotlight/en http://0.0.0.0:80/rest
|
import pandas as pd
def add_abbreviated_position(df, formation_dict):
df['Position'] = df['Position Index'].map(formation_dict)
return df
# Example usage
players_df = pd.DataFrame({'Player': ['John', 'Mike', 'Sarah', 'Emma'],
'Position Index': [1, 4, 6, 11]})
formation_dict = {1: 'GK', 2: 'RB', 3: 'RCB', 4: 'CB', 5: 'LCB', 6: 'LB', 7: 'RWB',
8: 'CDM', 9: 'CM', 10: 'CAM', 11: 'LW', 12: 'RW', 13: 'CF', 14: 'ST'}
add_abbreviated_position(players_df, formation_dict)
# The players_df dataframe will now have the 'Position' column added with abbreviated player positions based on the formation dictionary. |
<reponame>AWIXOR/nodejs-graphql
import React, { Component } from 'react';
import {Link} from "react-router-dom";
import LinearProgress from '@material-ui/core/LinearProgress';
import CircularProgress from '@material-ui/core/CircularProgress';
class Country extends Component {
constructor(props) {
super(props);
this.state = {
data : [],
modified : null,
render: false
};
}
compare( a, b ) {
if ( a.public_contributions + a.private_contributions < b.public_contributions + b.private_contributions ){
return 1;
}
if ( a.public_contributions + a.private_contributions > b.public_contributions + b.private_contributions ){
return -1;
}
return 0;
}
check_username(value) {
if(value === null) {
return 'Null'
} else {
return value
}
}
limit(value) {
return value.slice((this.props.match.params.from), (this.props.match.params.to))
}
mDate(date){
if(!this.state.render){return <CircularProgress color="secondary" />}
else return new Date(date).toUTCString();
}
renderCountryData(param){
console.log(param);
fetch('/contributions/'+ param)
.then((response) => response.json())
.then((responseJson) => {
console.log(responseJson);
try {
this.setState({data: responseJson[0].dataset, modified: responseJson[0].modified, render: true});
} catch (e) {
console.log(e);
}
}).catch((error) => {
console.error(error);
});
}
componentDidMount(){
var param = this.props.match.params.name;
this.renderCountryData(param);
}
componentDidUpdate(prevProps) {
var param = this.props.match.params.name;
if (param !== prevProps.match.params.name)
this.renderCountryData(param);
}
titleCase(str) {
str = str.replace(/_/g,' ');
var splitStr = str.toLowerCase().split(' ');
for (var i = 0; i < splitStr.length; i++) {
splitStr[i] = splitStr[i].charAt(0).toUpperCase() + splitStr[i].substring(1);
}
return splitStr.join(' ');
}
render() {
var isRender = this.state.render;
if (!isRender) {
return (
<div className="App">
<div className=" bg-white w-100">
<LinearProgress color="secondary" />
</div>
</div>) // note you can also return null here to render nothing
} else {
return (
<div className="App">
<div className="bg-white rounded w-100">
<h6 className="mb-4"><Link className="font-weight-bold" to="/home">← Choose a country</Link></h6>
<h2 className="mb-2">Active users from {this.titleCase(this.props.match.params.name)}</h2>
<div className="table-responsive">
<table className="table table-borderless">
<caption>Last modified {this.mDate(this.state.modified)}</caption>
<thead>
<tr className="border-bottom">
<th scope="col">#</th>
<th scope="col">Image</th>
<th scope="col">Username</th>
<th scope="col">Location</th>
<th scope="col">Total Contributions</th>
</tr>
</thead>
<tbody>
{this.limit(this.state.data.sort(this.compare)).map((item, key) => {
return <tr key={key} className="border-bottom">
<td className="align-middle">{parseInt(this.props.match.params.from) + key + 1}</td>
<td className={"align-middle"}>
<img className="rounded shadow" src={item.avatar_url}
alt={item.login} width="48" height="48"/>
</td>
<td className="align-middle">
<a className="text-gray-dark text-black-50" target={'_blank'} rel="noopener" href={'https://github.com/' + item.login}>
<strong>{this.check_username(item.name)}</strong>
<span className="d-block">
@{item.login} ({item.followers})
</span>
</a>
</td>
<td className="text-black-50 align-middle text-break">{item.location}</td>
<td className="font-weight-bold align-middle">{item.public_contributions + item.private_contributions}</td>
</tr>
})}
</tbody>
</table>
</div>
<div className="bg-white rounded">
<nav className="nav pt-4 pb-2 table-responsive">
<ul className="pagination m-auto">
<li className={"page-item " + (this.props.match.params.to === '300' ? "active" : null)}>
<Link className="page-link" to={"/country/" + this.props.match.params.name + "/0/300"}>All</Link>
</li>
<li className={"page-item " + (this.props.match.params.to === '25' ? "active" : null)}>
<Link className="page-link" to={"/country/" + this.props.match.params.name + "/0/25"}>25</Link>
</li>
<li className={"page-item " + (this.props.match.params.to === '50' ? "active" : null)}>
<Link className="page-link" to={"/country/" + this.props.match.params.name + "/0/50"}>50</Link>
</li>
<li className={"page-item " + (this.props.match.params.to === '75' ? "active" : null)}>
<Link className="page-link" to={"/country/" + this.props.match.params.name + "/0/75"}>75</Link>
</li>
<li className={"page-item " + (this.props.match.params.to === '100' ? "active" : null)}>
<Link className="page-link" to={"/country/" + this.props.match.params.name + "/0/100"}>100</Link>
</li>
<li className={"page-item " + (this.props.match.params.to === '125' ? "active" : null)}>
<Link className="page-link" to={"/country/" + this.props.match.params.name + "/0/125"}>125</Link>
</li>
<li className={"page-item " + (this.props.match.params.to === '150' ? "active" : null)}>
<Link className="page-link" to={"/country/" + this.props.match.params.name + "/0/150"}>150</Link>
</li>
<li className={"page-item " + (this.props.match.params.to === '175' ? "active" : null)}>
<Link className="page-link" to={"/country/" + this.props.match.params.name + "/0/175"}>175</Link>
</li>
<li className={"page-item " + (this.props.match.params.to === '200' ? "active" : null)}>
<Link className="page-link" to={"/country/" + this.props.match.params.name + "/0/200"}>200</Link>
</li>
<li className={"page-item " + (this.props.match.params.to === '225' ? "active" : null)}>
<Link className="page-link" to={"/country/" + this.props.match.params.name + "/0/225"}>225</Link>
</li>
<li className={"page-item " + (this.props.match.params.to === '250' ? "active" : null)}>
<Link className="page-link" to={"/country/" + this.props.match.params.name + "/0/250"}>250</Link>
</li>
<li className={"page-item " + (this.props.match.params.to === '275' ? "active" : null)}>
<Link className="page-link" to={"/country/" + this.props.match.params.name + "/0/275"}>275</Link>
</li>
<li className={"page-item " + (this.props.match.params.to === '300' ? "active" : null)}>
<Link className="page-link" to={"/country/" + this.props.match.params.name + "/0/300"}>300</Link>
</li>
</ul>
</nav>
</div>
</div>
</div>
);
}
}
}
export default Country;
|
<reponame>minyong-jeong/hello-algorithm<gh_stars>0
/*
https://leetcode.com/problems/validate-binary-search-tree/
98. Validate Binary Search Tree (Medium)
*/
/**
* Definition for a binary tree node.
* public class TreeNode {
* int val;
* TreeNode left;
* TreeNode right;
* TreeNode() {}
* TreeNode(int val) { this.val = val; }
* TreeNode(int val, TreeNode left, TreeNode right) {
* this.val = val;
* this.left = left;
* this.right = right;
* }
* }
*/
class Solution {
public boolean validate(TreeNode root, Integer low, Integer high) {
if (root == null) {
return true;
}
if ((low != null && root.val <= low) || (high != null && root.val >= high)) {
return false;
}
return validate(root.left, low, root.val) && validate(root.right, root.val, high);
}
public boolean isValidBST(TreeNode root) {
return validate(root, null, null);
}
}
|
#!/bin/bash
# Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
set -ex
script_dir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
. "$script_dir/pkg_helpers.bash"
VERSION=$(python -c "exec(open('${script_dir}/../pytorch3d/__init__.py').read()); print(__version__)")
# Prevent dev tag in the version string.
export BUILD_VERSION=$VERSION
export BUILD_TYPE=conda
setup_env "$VERSION"
export SOURCE_ROOT_DIR="$PWD"
setup_conda_pytorch_constraint
setup_conda_cudatoolkit_constraint
setup_visual_studio_constraint
if [[ "$JUST_TESTRUN" == "1" ]]
then
# We are not building for other users, we
# are only trying to see if the tests pass.
# So save time by only building for our own GPU.
unset NVCC_FLAGS
fi
# shellcheck disable=SC2086
conda build $CONDA_CHANNEL_FLAGS ${TEST_FLAG:-} -c bottler -c fvcore -c iopath -c conda-forge --no-anaconda-upload --python "$PYTHON_VERSION" packaging/pytorch3d
|
<gh_stars>1-10
package pl.allegro.tech.boot.leader.only.curator;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.test.context.DynamicPropertyRegistry;
import org.springframework.test.context.DynamicPropertySource;
import org.testcontainers.containers.GenericContainer;
import org.testcontainers.junit.jupiter.Container;
import org.testcontainers.junit.jupiter.Testcontainers;
import org.testcontainers.utility.DockerImageName;
import pl.allegro.tech.boot.leader.only.fixtures.SampleApplication;
import pl.allegro.tech.boot.leader.only.fixtures.SampleLeaderOnlyExecutor;
import static java.util.concurrent.TimeUnit.SECONDS;
import static java.util.function.Predicate.isEqual;
import static org.awaitility.Awaitility.await;
import static org.junit.jupiter.api.Assertions.assertTrue;
@Testcontainers
@SpringBootTest(classes = SampleApplication.class)
class CuratorLeadershipTest {
private static final int PORT = 2181;
@Container
public static final GenericContainer<?> zookeeper = new GenericContainer<>(DockerImageName.parse("zookeeper:3.6.2"))
.withExposedPorts(PORT);
@DynamicPropertySource
static void zookeeperProperties(DynamicPropertyRegistry registry) {
registry.add("curator-leadership.connection-string", () ->
zookeeper.getContainerIpAddress() + ":" + zookeeper.getMappedPort(PORT));
registry.add("curator-leadership.namespace", () -> "test/path");
}
@Autowired
SampleLeaderOnlyExecutor underTest;
@Test
void shouldRespondOnlyOnLeader() {
assertTrue(zookeeper.isRunning());
await().atMost(5, SECONDS).until(() -> underTest.calculateWhatIsTwoPlusTwo(), isEqual(4));
}
}
|
#!/usr/bin/env bash
set -e +o pipefail
# Log levels
i="\033[0;36m" # info
g="\033[0;32m" # green
e="\033[0;31m" # error
l="\033[0;90m" # log test
r="\033[0m" # reset
# Logger
# This function log messages
# Usage: log <LEVEL> <MESSAGE>
log() {
echo -e " $1--> $l$2$r"
}
# Fatal logger
# This function log fatal messages
# Usage: fatal <MESSAGE> <EXIT_CODE>
fatal() {
log "$e" "$1"
exit "$([ $# -eq 2 ] && echo "$2" || echo 1)"
}
# Greetings
# This function print the greetings message for this coverage reporter
# Usage: greetings
greetings() {
cat << EOF
______ __
/ ____/___ ____/ /___ ________ __
/ / / __ \/ __ / __ \`/ ___/ / / /
/ /___/ /_/ / /_/ / /_/ / /__/ /_/ /
\____/\____/\__,_/\__,_/\___/\__, /
/____/
Codacy Coverage Reporter
EOF
}
greetings
# Error trap
# This function prints succeeded or failed message depending on last exit status
# Usage: exit_trap
exit_trap() {
EXIT_NUM=$?
echo
if [ $EXIT_NUM -eq 0 ];
then
log "$g" "Succeeded!"
else
fatal "Failed!"
fi
}
trap exit_trap EXIT
trap 'fatal Interrupted' INT
# Temporary folder for downloaded files
if [ -z "$CODACY_REPORTER_TMP_FOLDER" ]; then
CODACY_REPORTER_TMP_FOLDER=".codacy-coverage"
fi
mkdir -p "$CODACY_REPORTER_TMP_FOLDER"
if [ -z "$CODACY_REPORTER_VERSION" ]; then
CODACY_REPORTER_VERSION="latest"
fi
download() {
local url="$1"
local output="${2:--}"
if [ -x "$(which curl)" ]; then
curl -# -LS "$url" -o "$output"
elif [ -x "$(which wget)" ] ; then
wget "$url" -O "$output"
else
fatal "Could not find curl or wget, please install one."
fi
}
get_version() {
if [ "$CODACY_REPORTER_VERSION" == "latest" ]; then
bintray_latest_api_url="https://api.bintray.com/packages/codacy/Binaries/codacy-coverage-reporter/versions/_latest"
download $bintray_latest_api_url | sed -e 's/.*name[^0-9]*\([0-9]\{1,\}[.][0-9]\{1,\}[.][0-9]\{1,\}\).*/\1/'
else
echo "$CODACY_REPORTER_VERSION"
fi
}
download_coverage_reporter() {
local binary_name=$1
local codacy_reporter=$2
if [ ! -f "$codacy_reporter" ]
then
log "$i" "Download the codacy reporter $1... ($CODACY_REPORTER_VERSION)"
bintray_api_url="https://dl.bintray.com/codacy/Binaries/$(get_version)/$binary_name"
download "$bintray_api_url" "$codacy_reporter"
else
log "$i" "Using codacy reporter $1 from cache"
fi
}
run() {
eval "$@"
}
codacy_reporter_native_start_cmd() {
local suffix=$1
local codacy_reporter="$CODACY_REPORTER_TMP_FOLDER/codacy-coverage-reporter"
download_coverage_reporter "codacy-coverage-reporter-$suffix" "$codacy_reporter"
chmod +x $codacy_reporter
run_command="$codacy_reporter"
}
codacy_reporter_jar_start_cmd() {
local codacy_reporter="$CODACY_REPORTER_TMP_FOLDER/codacy-coverage-reporter-assembly.jar"
download_coverage_reporter "codacy-coverage-reporter-assembly.jar" "$codacy_reporter"
run_command="java -jar \"$codacy_reporter\""
}
run_command=""
unamestr=`uname`
if [ "$unamestr" = "Linux" ]; then
codacy_reporter_native_start_cmd "linux"
elif [ "$unamestr" = "Darwin" ]; then
codacy_reporter_native_start_cmd "darwin"
else
codacy_reporter_jar_start_cmd
fi
if [ -z "$run_command" ]
then
fatal "Codacy coverage reporter command could not be found."
fi
if [ "$#" -gt 0 ];
then
run "$run_command $@"
else
run "$run_command \"report\""
fi
|
// Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
// protoc-gen-go v1.25.0
// protoc v3.11.4
// source: user/userpb/user.proto
package userpb
import (
context "context"
proto "github.com/golang/protobuf/proto"
paymentpb "github.com/xidongc/mongo_ebenchmark/model/payment/paymentpb"
_ "google.golang.org/genproto/googleapis/api/annotations"
grpc "google.golang.org/grpc"
codes "google.golang.org/grpc/codes"
status "google.golang.org/grpc/status"
protoreflect "google.golang.org/protobuf/reflect/protoreflect"
protoimpl "google.golang.org/protobuf/runtime/protoimpl"
reflect "reflect"
sync "sync"
)
const (
// Verify that this generated code is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion)
// Verify that runtime/protoimpl is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20)
)
// This is a compile-time assertion that a sufficiently up-to-date version
// of the legacy proto package is being used.
const _ = proto.ProtoPackageIsVersion4
type NewRequest struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
Nickname string `protobuf:"bytes,2,opt,name=nickname,proto3" json:"nickname,omitempty"`
Email string `protobuf:"bytes,3,opt,name=email,proto3" json:"email,omitempty"`
Active bool `protobuf:"varint,4,opt,name=active,proto3" json:"active,omitempty"`
Balance int64 `protobuf:"varint,5,opt,name=balance,proto3" json:"balance,omitempty"`
Currency paymentpb.Currency `protobuf:"varint,6,opt,name=currency,proto3,enum=paymentpb.Currency" json:"currency,omitempty"`
Image string `protobuf:"bytes,7,opt,name=image,proto3" json:"image,omitempty"`
Pwd string `protobuf:"bytes,8,opt,name=pwd,proto3" json:"pwd,omitempty"`
Metadata map[string]string `protobuf:"bytes,9,rep,name=metadata,proto3" json:"metadata,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
}
func (x *NewRequest) Reset() {
*x = NewRequest{}
if protoimpl.UnsafeEnabled {
mi := &file_user_userpb_user_proto_msgTypes[0]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *NewRequest) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*NewRequest) ProtoMessage() {}
func (x *NewRequest) ProtoReflect() protoreflect.Message {
mi := &file_user_userpb_user_proto_msgTypes[0]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use NewRequest.ProtoReflect.Descriptor instead.
func (*NewRequest) Descriptor() ([]byte, []int) {
return file_user_userpb_user_proto_rawDescGZIP(), []int{0}
}
func (x *NewRequest) GetName() string {
if x != nil {
return x.Name
}
return ""
}
func (x *NewRequest) GetNickname() string {
if x != nil {
return x.Nickname
}
return ""
}
func (x *NewRequest) GetEmail() string {
if x != nil {
return x.Email
}
return ""
}
func (x *NewRequest) GetActive() bool {
if x != nil {
return x.Active
}
return false
}
func (x *NewRequest) GetBalance() int64 {
if x != nil {
return x.Balance
}
return 0
}
func (x *NewRequest) GetCurrency() paymentpb.Currency {
if x != nil {
return x.Currency
}
return paymentpb.Currency_CUR_RESERVED
}
func (x *NewRequest) GetImage() string {
if x != nil {
return x.Image
}
return ""
}
func (x *NewRequest) GetPwd() string {
if x != nil {
return x.Pwd
}
return ""
}
func (x *NewRequest) GetMetadata() map[string]string {
if x != nil {
return x.Metadata
}
return nil
}
type Empty struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
}
func (x *Empty) Reset() {
*x = Empty{}
if protoimpl.UnsafeEnabled {
mi := &file_user_userpb_user_proto_msgTypes[1]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *Empty) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*Empty) ProtoMessage() {}
func (x *Empty) ProtoReflect() protoreflect.Message {
mi := &file_user_userpb_user_proto_msgTypes[1]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use Empty.ProtoReflect.Descriptor instead.
func (*Empty) Descriptor() ([]byte, []int) {
return file_user_userpb_user_proto_rawDescGZIP(), []int{1}
}
type GetRequest struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Nickname string `protobuf:"bytes,1,opt,name=nickname,proto3" json:"nickname,omitempty"`
}
func (x *GetRequest) Reset() {
*x = GetRequest{}
if protoimpl.UnsafeEnabled {
mi := &file_user_userpb_user_proto_msgTypes[2]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *GetRequest) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*GetRequest) ProtoMessage() {}
func (x *GetRequest) ProtoReflect() protoreflect.Message {
mi := &file_user_userpb_user_proto_msgTypes[2]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use GetRequest.ProtoReflect.Descriptor instead.
func (*GetRequest) Descriptor() ([]byte, []int) {
return file_user_userpb_user_proto_rawDescGZIP(), []int{2}
}
func (x *GetRequest) GetNickname() string {
if x != nil {
return x.Nickname
}
return ""
}
type UpdateRequest struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Nickname string `protobuf:"bytes,1,opt,name=nickname,proto3" json:"nickname,omitempty"`
Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"`
Pwd string `protobuf:"bytes,3,opt,name=pwd,proto3" json:"pwd,omitempty"`
Email string `protobuf:"bytes,4,opt,name=email,proto3" json:"email,omitempty"`
Active bool `protobuf:"varint,5,opt,name=active,proto3" json:"active,omitempty"`
Balance int64 `protobuf:"varint,6,opt,name=balance,proto3" json:"balance,omitempty"`
Currency paymentpb.Currency `protobuf:"varint,7,opt,name=currency,proto3,enum=paymentpb.Currency" json:"currency,omitempty"`
Image string `protobuf:"bytes,8,opt,name=image,proto3" json:"image,omitempty"`
Metadata map[string]string `protobuf:"bytes,9,rep,name=metadata,proto3" json:"metadata,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
}
func (x *UpdateRequest) Reset() {
*x = UpdateRequest{}
if protoimpl.UnsafeEnabled {
mi := &file_user_userpb_user_proto_msgTypes[3]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *UpdateRequest) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*UpdateRequest) ProtoMessage() {}
func (x *UpdateRequest) ProtoReflect() protoreflect.Message {
mi := &file_user_userpb_user_proto_msgTypes[3]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use UpdateRequest.ProtoReflect.Descriptor instead.
func (*UpdateRequest) Descriptor() ([]byte, []int) {
return file_user_userpb_user_proto_rawDescGZIP(), []int{3}
}
func (x *UpdateRequest) GetNickname() string {
if x != nil {
return x.Nickname
}
return ""
}
func (x *UpdateRequest) GetName() string {
if x != nil {
return x.Name
}
return ""
}
func (x *UpdateRequest) GetPwd() string {
if x != nil {
return x.Pwd
}
return ""
}
func (x *UpdateRequest) GetEmail() string {
if x != nil {
return x.Email
}
return ""
}
func (x *UpdateRequest) GetActive() bool {
if x != nil {
return x.Active
}
return false
}
func (x *UpdateRequest) GetBalance() int64 {
if x != nil {
return x.Balance
}
return 0
}
func (x *UpdateRequest) GetCurrency() paymentpb.Currency {
if x != nil {
return x.Currency
}
return paymentpb.Currency_CUR_RESERVED
}
func (x *UpdateRequest) GetImage() string {
if x != nil {
return x.Image
}
return ""
}
func (x *UpdateRequest) GetMetadata() map[string]string {
if x != nil {
return x.Metadata
}
return nil
}
type DeleteRequest struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Nickname string `protobuf:"bytes,1,opt,name=nickname,proto3" json:"nickname,omitempty"`
}
func (x *DeleteRequest) Reset() {
*x = DeleteRequest{}
if protoimpl.UnsafeEnabled {
mi := &file_user_userpb_user_proto_msgTypes[4]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *DeleteRequest) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*DeleteRequest) ProtoMessage() {}
func (x *DeleteRequest) ProtoReflect() protoreflect.Message {
mi := &file_user_userpb_user_proto_msgTypes[4]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use DeleteRequest.ProtoReflect.Descriptor instead.
func (*DeleteRequest) Descriptor() ([]byte, []int) {
return file_user_userpb_user_proto_rawDescGZIP(), []int{4}
}
func (x *DeleteRequest) GetNickname() string {
if x != nil {
return x.Nickname
}
return ""
}
type User struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"`
Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"`
Nickname string `protobuf:"bytes,3,opt,name=nickname,proto3" json:"nickname,omitempty"`
Email string `protobuf:"bytes,4,opt,name=email,proto3" json:"email,omitempty"`
Active bool `protobuf:"varint,5,opt,name=active,proto3" json:"active,omitempty"`
Balance int64 `protobuf:"varint,6,opt,name=balance,proto3" json:"balance,omitempty"`
Currency paymentpb.Currency `protobuf:"varint,7,opt,name=currency,proto3,enum=paymentpb.Currency" json:"currency,omitempty"`
Image string `protobuf:"bytes,8,opt,name=image,proto3" json:"image,omitempty"`
Pwd string `protobuf:"bytes,9,opt,name=pwd,proto3" json:"pwd,omitempty"`
Metadata map[string]string `protobuf:"bytes,10,rep,name=metadata,proto3" json:"metadata,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
Created int64 `protobuf:"varint,998,opt,name=created,proto3" json:"created,omitempty"`
Updated int64 `protobuf:"varint,999,opt,name=updated,proto3" json:"updated,omitempty"`
}
func (x *User) Reset() {
*x = User{}
if protoimpl.UnsafeEnabled {
mi := &file_user_userpb_user_proto_msgTypes[5]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *User) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*User) ProtoMessage() {}
func (x *User) ProtoReflect() protoreflect.Message {
mi := &file_user_userpb_user_proto_msgTypes[5]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use User.ProtoReflect.Descriptor instead.
func (*User) Descriptor() ([]byte, []int) {
return file_user_userpb_user_proto_rawDescGZIP(), []int{5}
}
func (x *User) GetId() string {
if x != nil {
return x.Id
}
return ""
}
func (x *User) GetName() string {
if x != nil {
return x.Name
}
return ""
}
func (x *User) GetNickname() string {
if x != nil {
return x.Nickname
}
return ""
}
func (x *User) GetEmail() string {
if x != nil {
return x.Email
}
return ""
}
func (x *User) GetActive() bool {
if x != nil {
return x.Active
}
return false
}
func (x *User) GetBalance() int64 {
if x != nil {
return x.Balance
}
return 0
}
func (x *User) GetCurrency() paymentpb.Currency {
if x != nil {
return x.Currency
}
return paymentpb.Currency_CUR_RESERVED
}
func (x *User) GetImage() string {
if x != nil {
return x.Image
}
return ""
}
func (x *User) GetPwd() string {
if x != nil {
return x.Pwd
}
return ""
}
func (x *User) GetMetadata() map[string]string {
if x != nil {
return x.Metadata
}
return nil
}
func (x *User) GetCreated() int64 {
if x != nil {
return x.Created
}
return 0
}
func (x *User) GetUpdated() int64 {
if x != nil {
return x.Updated
}
return 0
}
var File_user_userpb_user_proto protoreflect.FileDescriptor
var file_user_userpb_user_proto_rawDesc = []byte{
0x0a, 0x16, 0x75, 0x73, 0x65, 0x72, 0x2f, 0x75, 0x73, 0x65, 0x72, 0x70, 0x62, 0x2f, 0x75, 0x73,
0x65, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x06, 0x75, 0x73, 0x65, 0x72, 0x70, 0x62,
0x1a, 0x1f, 0x70, 0x61, 0x79, 0x6d, 0x65, 0x6e, 0x74, 0x2f, 0x70, 0x61, 0x79, 0x6d, 0x65, 0x6e,
0x74, 0x70, 0x62, 0x2f, 0x70, 0x61, 0x79, 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x74,
0x6f, 0x1a, 0x1c, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x61, 0x6e,
0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22,
0xd8, 0x02, 0x0a, 0x0a, 0x4e, 0x65, 0x77, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x12,
0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61,
0x6d, 0x65, 0x12, 0x1a, 0x0a, 0x08, 0x6e, 0x69, 0x63, 0x6b, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02,
0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x6e, 0x69, 0x63, 0x6b, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x14,
0x0a, 0x05, 0x65, 0x6d, 0x61, 0x69, 0x6c, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x65,
0x6d, 0x61, 0x69, 0x6c, 0x12, 0x16, 0x0a, 0x06, 0x61, 0x63, 0x74, 0x69, 0x76, 0x65, 0x18, 0x04,
0x20, 0x01, 0x28, 0x08, 0x52, 0x06, 0x61, 0x63, 0x74, 0x69, 0x76, 0x65, 0x12, 0x18, 0x0a, 0x07,
0x62, 0x61, 0x6c, 0x61, 0x6e, 0x63, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x03, 0x52, 0x07, 0x62,
0x61, 0x6c, 0x61, 0x6e, 0x63, 0x65, 0x12, 0x2f, 0x0a, 0x08, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e,
0x63, 0x79, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x13, 0x2e, 0x70, 0x61, 0x79, 0x6d, 0x65,
0x6e, 0x74, 0x70, 0x62, 0x2e, 0x43, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x63, 0x79, 0x52, 0x08, 0x63,
0x75, 0x72, 0x72, 0x65, 0x6e, 0x63, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x69, 0x6d, 0x61, 0x67, 0x65,
0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x12, 0x10, 0x0a,
0x03, 0x70, 0x77, 0x64, 0x18, 0x08, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x70, 0x77, 0x64, 0x12,
0x3c, 0x0a, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x09, 0x20, 0x03, 0x28,
0x0b, 0x32, 0x20, 0x2e, 0x75, 0x73, 0x65, 0x72, 0x70, 0x62, 0x2e, 0x4e, 0x65, 0x77, 0x52, 0x65,
0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x45, 0x6e,
0x74, 0x72, 0x79, 0x52, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x1a, 0x3b, 0x0a,
0x0d, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10,
0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79,
0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52,
0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0x07, 0x0a, 0x05, 0x45, 0x6d,
0x70, 0x74, 0x79, 0x22, 0x28, 0x0a, 0x0a, 0x47, 0x65, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73,
0x74, 0x12, 0x1a, 0x0a, 0x08, 0x6e, 0x69, 0x63, 0x6b, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20,
0x01, 0x28, 0x09, 0x52, 0x08, 0x6e, 0x69, 0x63, 0x6b, 0x6e, 0x61, 0x6d, 0x65, 0x22, 0xde, 0x02,
0x0a, 0x0d, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12,
0x1a, 0x0a, 0x08, 0x6e, 0x69, 0x63, 0x6b, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28,
0x09, 0x52, 0x08, 0x6e, 0x69, 0x63, 0x6b, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6e,
0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12,
0x10, 0x0a, 0x03, 0x70, 0x77, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x70, 0x77,
0x64, 0x12, 0x14, 0x0a, 0x05, 0x65, 0x6d, 0x61, 0x69, 0x6c, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09,
0x52, 0x05, 0x65, 0x6d, 0x61, 0x69, 0x6c, 0x12, 0x16, 0x0a, 0x06, 0x61, 0x63, 0x74, 0x69, 0x76,
0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x08, 0x52, 0x06, 0x61, 0x63, 0x74, 0x69, 0x76, 0x65, 0x12,
0x18, 0x0a, 0x07, 0x62, 0x61, 0x6c, 0x61, 0x6e, 0x63, 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, 0x03,
0x52, 0x07, 0x62, 0x61, 0x6c, 0x61, 0x6e, 0x63, 0x65, 0x12, 0x2f, 0x0a, 0x08, 0x63, 0x75, 0x72,
0x72, 0x65, 0x6e, 0x63, 0x79, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x13, 0x2e, 0x70, 0x61,
0x79, 0x6d, 0x65, 0x6e, 0x74, 0x70, 0x62, 0x2e, 0x43, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x63, 0x79,
0x52, 0x08, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x63, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x69, 0x6d,
0x61, 0x67, 0x65, 0x18, 0x08, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x69, 0x6d, 0x61, 0x67, 0x65,
0x12, 0x3f, 0x0a, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x09, 0x20, 0x03,
0x28, 0x0b, 0x32, 0x23, 0x2e, 0x75, 0x73, 0x65, 0x72, 0x70, 0x62, 0x2e, 0x55, 0x70, 0x64, 0x61,
0x74, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61,
0x74, 0x61, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74,
0x61, 0x1a, 0x3b, 0x0a, 0x0d, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x45, 0x6e, 0x74,
0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52,
0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20,
0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0x2b,
0x0a, 0x0d, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12,
0x1a, 0x0a, 0x08, 0x6e, 0x69, 0x63, 0x6b, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28,
0x09, 0x52, 0x08, 0x6e, 0x69, 0x63, 0x6b, 0x6e, 0x61, 0x6d, 0x65, 0x22, 0x92, 0x03, 0x0a, 0x04,
0x55, 0x73, 0x65, 0x72, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09,
0x52, 0x02, 0x69, 0x64, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01,
0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x1a, 0x0a, 0x08, 0x6e, 0x69, 0x63, 0x6b,
0x6e, 0x61, 0x6d, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x6e, 0x69, 0x63, 0x6b,
0x6e, 0x61, 0x6d, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x65, 0x6d, 0x61, 0x69, 0x6c, 0x18, 0x04, 0x20,
0x01, 0x28, 0x09, 0x52, 0x05, 0x65, 0x6d, 0x61, 0x69, 0x6c, 0x12, 0x16, 0x0a, 0x06, 0x61, 0x63,
0x74, 0x69, 0x76, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x08, 0x52, 0x06, 0x61, 0x63, 0x74, 0x69,
0x76, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x62, 0x61, 0x6c, 0x61, 0x6e, 0x63, 0x65, 0x18, 0x06, 0x20,
0x01, 0x28, 0x03, 0x52, 0x07, 0x62, 0x61, 0x6c, 0x61, 0x6e, 0x63, 0x65, 0x12, 0x2f, 0x0a, 0x08,
0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x63, 0x79, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x13,
0x2e, 0x70, 0x61, 0x79, 0x6d, 0x65, 0x6e, 0x74, 0x70, 0x62, 0x2e, 0x43, 0x75, 0x72, 0x72, 0x65,
0x6e, 0x63, 0x79, 0x52, 0x08, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x63, 0x79, 0x12, 0x14, 0x0a,
0x05, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x18, 0x08, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x69, 0x6d,
0x61, 0x67, 0x65, 0x12, 0x10, 0x0a, 0x03, 0x70, 0x77, 0x64, 0x18, 0x09, 0x20, 0x01, 0x28, 0x09,
0x52, 0x03, 0x70, 0x77, 0x64, 0x12, 0x36, 0x0a, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74,
0x61, 0x18, 0x0a, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x75, 0x73, 0x65, 0x72, 0x70, 0x62,
0x2e, 0x55, 0x73, 0x65, 0x72, 0x2e, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x45, 0x6e,
0x74, 0x72, 0x79, 0x52, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x19, 0x0a,
0x07, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x18, 0xe6, 0x07, 0x20, 0x01, 0x28, 0x03, 0x52,
0x07, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x12, 0x19, 0x0a, 0x07, 0x75, 0x70, 0x64, 0x61,
0x74, 0x65, 0x64, 0x18, 0xe7, 0x07, 0x20, 0x01, 0x28, 0x03, 0x52, 0x07, 0x75, 0x70, 0x64, 0x61,
0x74, 0x65, 0x64, 0x1a, 0x3b, 0x0a, 0x0d, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x45,
0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28,
0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18,
0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01,
0x32, 0xc8, 0x01, 0x0a, 0x0b, 0x55, 0x73, 0x65, 0x72, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65,
0x12, 0x39, 0x0a, 0x03, 0x4e, 0x65, 0x77, 0x12, 0x12, 0x2e, 0x75, 0x73, 0x65, 0x72, 0x70, 0x62,
0x2e, 0x4e, 0x65, 0x77, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x0c, 0x2e, 0x75, 0x73,
0x65, 0x72, 0x70, 0x62, 0x2e, 0x55, 0x73, 0x65, 0x72, 0x22, 0x10, 0x82, 0xd3, 0xe4, 0x93, 0x02,
0x0a, 0x22, 0x05, 0x2f, 0x75, 0x73, 0x65, 0x72, 0x3a, 0x01, 0x2a, 0x12, 0x39, 0x0a, 0x03, 0x47,
0x65, 0x74, 0x12, 0x12, 0x2e, 0x75, 0x73, 0x65, 0x72, 0x70, 0x62, 0x2e, 0x47, 0x65, 0x74, 0x52,
0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x0c, 0x2e, 0x75, 0x73, 0x65, 0x72, 0x70, 0x62, 0x2e,
0x55, 0x73, 0x65, 0x72, 0x22, 0x10, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x0a, 0x12, 0x05, 0x2f, 0x75,
0x73, 0x65, 0x72, 0x3a, 0x01, 0x2a, 0x12, 0x43, 0x0a, 0x0a, 0x44, 0x65, 0x61, 0x63, 0x74, 0x69,
0x76, 0x61, 0x74, 0x65, 0x12, 0x15, 0x2e, 0x75, 0x73, 0x65, 0x72, 0x70, 0x62, 0x2e, 0x44, 0x65,
0x6c, 0x65, 0x74, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x0c, 0x2e, 0x75, 0x73,
0x65, 0x72, 0x70, 0x62, 0x2e, 0x55, 0x73, 0x65, 0x72, 0x22, 0x10, 0x82, 0xd3, 0xe4, 0x93, 0x02,
0x0a, 0x2a, 0x05, 0x2f, 0x75, 0x73, 0x65, 0x72, 0x3a, 0x01, 0x2a, 0x42, 0x37, 0x5a, 0x35, 0x67,
0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x78, 0x69, 0x64, 0x6f, 0x6e, 0x67,
0x63, 0x2f, 0x6d, 0x6f, 0x6e, 0x67, 0x6f, 0x5f, 0x65, 0x62, 0x65, 0x6e, 0x63, 0x68, 0x6d, 0x61,
0x72, 0x6b, 0x2f, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x2f, 0x75, 0x73, 0x65, 0x72, 0x2f, 0x75, 0x73,
0x65, 0x72, 0x70, 0x62, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
}
var (
file_user_userpb_user_proto_rawDescOnce sync.Once
file_user_userpb_user_proto_rawDescData = file_user_userpb_user_proto_rawDesc
)
func file_user_userpb_user_proto_rawDescGZIP() []byte {
file_user_userpb_user_proto_rawDescOnce.Do(func() {
file_user_userpb_user_proto_rawDescData = protoimpl.X.CompressGZIP(file_user_userpb_user_proto_rawDescData)
})
return file_user_userpb_user_proto_rawDescData
}
var file_user_userpb_user_proto_msgTypes = make([]protoimpl.MessageInfo, 9)
var file_user_userpb_user_proto_goTypes = []interface{}{
(*NewRequest)(nil), // 0: userpb.NewRequest
(*Empty)(nil), // 1: userpb.Empty
(*GetRequest)(nil), // 2: userpb.GetRequest
(*UpdateRequest)(nil), // 3: userpb.UpdateRequest
(*DeleteRequest)(nil), // 4: userpb.DeleteRequest
(*User)(nil), // 5: userpb.User
nil, // 6: userpb.NewRequest.MetadataEntry
nil, // 7: userpb.UpdateRequest.MetadataEntry
nil, // 8: userpb.User.MetadataEntry
(paymentpb.Currency)(0), // 9: paymentpb.Currency
}
var file_user_userpb_user_proto_depIdxs = []int32{
9, // 0: userpb.NewRequest.currency:type_name -> paymentpb.Currency
6, // 1: userpb.NewRequest.metadata:type_name -> userpb.NewRequest.MetadataEntry
9, // 2: userpb.UpdateRequest.currency:type_name -> paymentpb.Currency
7, // 3: userpb.UpdateRequest.metadata:type_name -> userpb.UpdateRequest.MetadataEntry
9, // 4: userpb.User.currency:type_name -> paymentpb.Currency
8, // 5: userpb.User.metadata:type_name -> userpb.User.MetadataEntry
0, // 6: userpb.UserService.New:input_type -> userpb.NewRequest
2, // 7: userpb.UserService.Get:input_type -> userpb.GetRequest
4, // 8: userpb.UserService.Deactivate:input_type -> userpb.DeleteRequest
5, // 9: userpb.UserService.New:output_type -> userpb.User
5, // 10: userpb.UserService.Get:output_type -> userpb.User
5, // 11: userpb.UserService.Deactivate:output_type -> userpb.User
9, // [9:12] is the sub-list for method output_type
6, // [6:9] is the sub-list for method input_type
6, // [6:6] is the sub-list for extension type_name
6, // [6:6] is the sub-list for extension extendee
0, // [0:6] is the sub-list for field type_name
}
func init() { file_user_userpb_user_proto_init() }
func file_user_userpb_user_proto_init() {
if File_user_userpb_user_proto != nil {
return
}
if !protoimpl.UnsafeEnabled {
file_user_userpb_user_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*NewRequest); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_user_userpb_user_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*Empty); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_user_userpb_user_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*GetRequest); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_user_userpb_user_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*UpdateRequest); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_user_userpb_user_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*DeleteRequest); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_user_userpb_user_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*User); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
}
type x struct{}
out := protoimpl.TypeBuilder{
File: protoimpl.DescBuilder{
GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
RawDescriptor: file_user_userpb_user_proto_rawDesc,
NumEnums: 0,
NumMessages: 9,
NumExtensions: 0,
NumServices: 1,
},
GoTypes: file_user_userpb_user_proto_goTypes,
DependencyIndexes: file_user_userpb_user_proto_depIdxs,
MessageInfos: file_user_userpb_user_proto_msgTypes,
}.Build()
File_user_userpb_user_proto = out.File
file_user_userpb_user_proto_rawDesc = nil
file_user_userpb_user_proto_goTypes = nil
file_user_userpb_user_proto_depIdxs = nil
}
// Reference imports to suppress errors if they are not otherwise used.
var _ context.Context
var _ grpc.ClientConnInterface
// This is a compile-time assertion to ensure that this generated file
// is compatible with the grpc package it is being compiled against.
const _ = grpc.SupportPackageIsVersion6
// UserServiceClient is the client API for UserService service.
//
// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream.
type UserServiceClient interface {
New(ctx context.Context, in *NewRequest, opts ...grpc.CallOption) (*User, error)
Get(ctx context.Context, in *GetRequest, opts ...grpc.CallOption) (*User, error)
Deactivate(ctx context.Context, in *DeleteRequest, opts ...grpc.CallOption) (*User, error)
}
type userServiceClient struct {
cc grpc.ClientConnInterface
}
func NewUserServiceClient(cc grpc.ClientConnInterface) UserServiceClient {
return &userServiceClient{cc}
}
func (c *userServiceClient) New(ctx context.Context, in *NewRequest, opts ...grpc.CallOption) (*User, error) {
out := new(User)
err := c.cc.Invoke(ctx, "/userpb.UserService/New", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *userServiceClient) Get(ctx context.Context, in *GetRequest, opts ...grpc.CallOption) (*User, error) {
out := new(User)
err := c.cc.Invoke(ctx, "/userpb.UserService/Get", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *userServiceClient) Deactivate(ctx context.Context, in *DeleteRequest, opts ...grpc.CallOption) (*User, error) {
out := new(User)
err := c.cc.Invoke(ctx, "/userpb.UserService/Deactivate", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
// UserServiceServer is the cfg API for UserService service.
type UserServiceServer interface {
New(context.Context, *NewRequest) (*User, error)
Get(context.Context, *GetRequest) (*User, error)
Deactivate(context.Context, *DeleteRequest) (*User, error)
}
// UnimplementedUserServiceServer can be embedded to have forward compatible implementations.
type UnimplementedUserServiceServer struct {
}
func (*UnimplementedUserServiceServer) New(context.Context, *NewRequest) (*User, error) {
return nil, status.Errorf(codes.Unimplemented, "method New not implemented")
}
func (*UnimplementedUserServiceServer) Get(context.Context, *GetRequest) (*User, error) {
return nil, status.Errorf(codes.Unimplemented, "method Get not implemented")
}
func (*UnimplementedUserServiceServer) Deactivate(context.Context, *DeleteRequest) (*User, error) {
return nil, status.Errorf(codes.Unimplemented, "method Deactivate not implemented")
}
func RegisterUserServiceServer(s *grpc.Server, srv UserServiceServer) {
s.RegisterService(&_UserService_serviceDesc, srv)
}
func _UserService_New_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(NewRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(UserServiceServer).New(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/userpb.UserService/New",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(UserServiceServer).New(ctx, req.(*NewRequest))
}
return interceptor(ctx, in, info, handler)
}
func _UserService_Get_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(GetRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(UserServiceServer).Get(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/userpb.UserService/Get",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(UserServiceServer).Get(ctx, req.(*GetRequest))
}
return interceptor(ctx, in, info, handler)
}
func _UserService_Deactivate_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(DeleteRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(UserServiceServer).Deactivate(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/userpb.UserService/Deactivate",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(UserServiceServer).Deactivate(ctx, req.(*DeleteRequest))
}
return interceptor(ctx, in, info, handler)
}
var _UserService_serviceDesc = grpc.ServiceDesc{
ServiceName: "userpb.UserService",
HandlerType: (*UserServiceServer)(nil),
Methods: []grpc.MethodDesc{
{
MethodName: "New",
Handler: _UserService_New_Handler,
},
{
MethodName: "Get",
Handler: _UserService_Get_Handler,
},
{
MethodName: "Deactivate",
Handler: _UserService_Deactivate_Handler,
},
},
Streams: []grpc.StreamDesc{},
Metadata: "user/userpb/user.proto",
}
|
#!/usr/bin/env bash
# Copyright 2020 The Knative Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This script runs the end-to-end tests against eventing built from source.
# If you already have the *_OVERRIDE environment variables set, call
# this script with the --run-tests arguments and it will use the cluster
# and run the tests.
# Calling this script without arguments will create a new cluster in
# project $PROJECT_ID, start Knative eventing system, run the tests and
# delete the cluster.
export GO111MODULE=on
source "$(dirname "$0")/e2e-common.sh"
# Script entry point.
initialize $@ --skip-istio-addon
echo "Running Conformance tests for: Multi Tenant Channel Based Broker (v1beta1), Channel (v1beta1, v1), InMemoryChannel (v1beta1, v1) , ApiServerSource (v1beta1, v1), ContainerSource (v1alpha2, v1) and PingSource (v1beta1, v1beta2)"
go_test_e2e -timeout=30m -parallel=12 ./test/conformance \
-brokers=eventing.knative.dev/v1beta1:MTChannelBasedBroker \
-channels=messaging.knative.dev/v1beta1:Channel,messaging.knative.dev/v1beta1:InMemoryChannel,messaging.knative.dev/v1:Channel,messaging.knative.dev/v1:InMemoryChannel \
-sources=sources.knative.dev/v1beta1:ApiServerSource,sources.knative.dev/v1alpha2:ContainerSource,sources.knative.dev/v1beta1:PingSource,sources.knative.dev/v1beta2:PingSource,sources.knative.dev/v1:ApiServerSource,sources.knative.dev/v1:ContainerSource \
|| fail_test
success
|
import logging
class DHCP:
def __init__(self, *args, **kwargs):
self.db = dhcp_db.DHCPDB()
def handle_discover(self, pkt, datapath, in_port):
"""Handle discover
This method provides hooks for extra processing, needs to be done
for specific to requirement. If no extra information is needed then
call super method.
"""
logging.info("Handling dhcp 'discover' from %s datapath on %s port",
datapath.id, in_port)
return super(DHCP, self).handle_discover(pkt, datapath, in_port) |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.datasources.druid
import java.util
import com.google.common.collect.ImmutableList
import io.druid.data.input.InputRow
import io.druid.hll.HyperLogLogCollector
import io.druid.query.filter.{Filter => _, _}
import org.apache.hadoop.io.NullWritable
import org.joda.time.{DateTime, Interval}
import scala.collection.mutable.ArrayBuffer
import scala.concurrent.ExecutionContext
import scala.util.{Failure, Success}
import org.apache.spark.internal.Logging
import org.apache.spark.rdd.{NewHadoopRDD, RDD}
import org.apache.spark.sql.{Row, SQLContext}
import org.apache.spark.sql.catalyst.expressions.{AttributeSet, GenericRow}
import org.apache.spark.sql.catalyst.expressions.aggregate._
import org.apache.spark.sql.sources.{
And => SAnd,
IsNotNull => SIsNotNull,
Not => SNot,
Or => SOr,
_
}
import org.apache.spark.sql.types._
import org.apache.spark.sql.xsql.types._
private[sql] class DefaultSource
extends DataSourceRegister
with RelationProvider
with SchemaRelationProvider {
override def shortName(): String = "druid"
/**
* Returns a new base relation with the given parameters.
*
* @note The parameters' keywords are case insensitive and this insensitivity is enforced
* by the Map that is passed to the function.
*/
override def createRelation(
sqlContext: SQLContext,
parameters: Map[String, String]): BaseRelation = {
DruidRelation(sqlContext, parameters)
}
def addPhysicalRules(sqlContext: SQLContext): Unit = {
sqlContext.sparkSession.experimental.extraStrategies ++=
Seq(new DruidStrategy())
sqlContext.sparkSession.experimental.extraOptimizations ++=
Seq(DruidRule)
}
/**
* Returns a new base relation with the given parameters and user defined schema.
* @note The parameters' keywords are case insensitive and this insensitivity is enforced
* by the Map that is passed to the function.
*/
override def createRelation(
sqlContext: SQLContext,
parameters: Map[String, String],
schema: StructType): BaseRelation = {
val relation = DruidRelation(sqlContext, parameters, Some(schema))
// add experimental rules
addPhysicalRules(sqlContext)
relation
}
}
private[sql] case class DruidRelation(
sqlContext: SQLContext,
parameters: Map[String, String],
userSchema: Option[StructType] = None)
extends BaseRelation
with PushDownAggregateScan
with Logging {
var startTime: String = null
var endTime: String = null
var granularity: Granularity = SimpleGranularity.All
val url = parameters.getOrElse("url", null)
val coordinator = parameters.getOrElse("coordinator", null)
val datasource = parameters.getOrElse("datasource", null)
if (datasource == null) {
throw new IllegalArgumentException("datasource must set when create table")
}
val timestampcolumn = parameters.getOrElse("timestampcolumn", null)
if (timestampcolumn == null) {
throw new IllegalArgumentException("timestampcolumn must set when create table")
}
override def schema: StructType = userSchema.get
def buildScan(): RDD[Row] = {
buildScan(Array.empty)
}
def buildScan(requiredColumns: Array[String]): RDD[Row] = {
buildScan(requiredColumns, Array.empty)
}
override def buildScan(requiredColumns: Array[String], filters: Array[Filter]): RDD[Row] = {
val otherfilters = getPushDownFilters(filters)
if (startTime == null) {
throw new IllegalArgumentException(s"the $timestampcolumn lowerbound must be set in query")
}
if (endTime == null) {
throw new IllegalArgumentException(s"the $timestampcolumn uperbound must be set in query")
}
// execScanQuery(requiredColumns, otherfilters)
// if (aggregateExpressions == null && groupingExpressions == null && !otherfilters.isEmpty) {
if (aggregateExpressions == null && groupingExpressions == null && otherfilters.isEmpty) {
logInfo("execute SelectQueries")
execSelectQuery(requiredColumns, otherfilters)
} else if (aggregateExpressions == null
&& groupingExpressions == null && !otherfilters.isEmpty) {
logInfo("execute ScanQueries")
execScanQuery(requiredColumns, otherfilters)
} else {
val groupFields =
AttributeSet(groupingExpressions.flatMap(_.references)).map(_.name).toArray
if (groupFields.size == 1 && groupFields(0) == timestampcolumn) {
logInfo("execute TimeSeriesQuery")
execTimeSeriesQuery(requiredColumns, otherfilters)
} else {
logInfo("execute GroupByQuery")
execGroupByQuery(requiredColumns, otherfilters)
}
}
}
def getGranularity(granularity: String): Option[Granularity] = {
val sampleGranularityValue = Set(
"all",
"none",
"second",
"minute",
"fifteen_minute",
"thirty_minute",
"hour",
"day",
"week",
"month",
"quarter",
"year")
val strValue = granularity.trim
val v = strValue.split(",")
if (v.size == 1) {
if (sampleGranularityValue.contains(strValue)) {
Some(SimpleGranularity(strValue))
} else {
throw new IllegalArgumentException(s"can not analysis $granularity")
}
} else {
val vMap = v.map { x =>
val a = x.split(":")
if (a.size != 2) throw new IllegalArgumentException(s"can not analysis $granularity")
(a(0).trim, a(1).trim)
}.toMap
if (vMap.contains("type")) {
vMap.getOrElse("type", null) match {
case "period" =>
Some(
PeriodGranularity(
vMap.getOrElse("period", null),
vMap.getOrElse("timeZone", null),
vMap.getOrElse("origin", null)))
case "duration" =>
Some(
DurationGranularity(
vMap.getOrElse("duration", null),
vMap.getOrElse("timeZone", null),
vMap.getOrElse("duration", null)))
case _ => throw new IllegalArgumentException(s"can not analysis $granularity")
}
} else {
throw new IllegalArgumentException(s"can not analysis $granularity")
}
}
}
def getPushDownFilters(filters: Array[Filter]): Array[Filter] = {
val (_, otherfilters) = filters.partition { f =>
f match {
case EqualTo(attribute, value) =>
if (attribute == "granularity") {
val tempGranularity = getGranularity(value.toString)
if (tempGranularity.isDefined) {
granularity = tempGranularity.get
}
true
} else {
false
}
case LessThan(attribute, value) =>
if (attribute == timestampcolumn) {
endTime = value.toString
true
} else {
false
}
case LessThanOrEqual(attribute, value) =>
if (attribute == timestampcolumn) {
endTime = value.toString
true
} else {
false
}
case GreaterThan(attribute, value) =>
if (attribute == timestampcolumn) {
startTime = value.toString
true
} else {
false
}
case GreaterThanOrEqual(attribute, value) =>
if (attribute == timestampcolumn) {
startTime = value.toString
true
} else {
false
}
case _ => false
}
}
otherfilters
}
def execSelectQuery(requiredColumns: Array[String], filters: Array[Filter]): RDD[Row] = {
implicit val executionContext = ExecutionContext.Implicits.global
val client = DruidClient(url)
val fieldMap = Map.empty[String, String]
val query = SelectQuery(
source = datasource,
interval = new Interval(new DateTime(startTime), new DateTime(endTime)),
granularity = granularity,
dimensions = requiredColumns,
filter = if (filters.isEmpty) QueryFilter.All else createDruidFilters(filters))
val future = client(query)
var data: Seq[Map[String, Any]] = null
future.onComplete {
case Success(resp) => data = resp.data
case Failure(ex) => ex.printStackTrace()
}
while (!future.isCompleted) {
logInfo("sleep 500ms")
Thread.sleep(500)
}
logInfo("get selectByQuery result and the size is " + data.size)
client.close()
converedToRow(data, requiredColumns, fieldMap)
}
def createDruidFilters(filters: Array[Filter]): QueryFilter = {
val f = filters.filter(!_.isInstanceOf[SIsNotNull])
if (f.length > 0) {
And(f.map(filter => translateFilter(filter)))
} else {
QueryFilter.All
}
}
def createDruidDimFilters(filters: Array[Filter]): DimFilter = {
val list = new util.ArrayList[DimFilter](filters.size)
for (filter <- filters) {
list.add(translateToDruidFilter(filter))
}
new AndDimFilter(list)
}
def translateFilter(filter: Filter): QueryFilter = {
filter match {
case EqualTo(attribute, value) => SelectorQueryFilter(attribute, value.toString)
case SAnd(left, right) => And(Seq(translateFilter(left), translateFilter(right)))
case SOr(left, right) => Or(Seq(translateFilter(left), translateFilter(right)))
case SNot(filterToNeg) => Not(translateFilter(filterToNeg))
// case SIsNotNull(attributeNotNull) => IsNotNull(attributeNotNull)
case f: Product if isClass(f, "org.apache.spark.sql.sources.StringStartsWith") =>
val arg = f.productElement(1).toString()
RegexQueryFilter(f.productElement(0).toString(), "$arg*")
case f: Product if isClass(f, "org.apache.spark.sql.sources.StringEndsWith") =>
val arg = f.productElement(1).toString()
RegexQueryFilter(f.productElement(0).toString(), "*$arg")
case f: Product if isClass(f, "org.apache.spark.sql.sources.StringContains") =>
val arg = f.productElement(1).toString()
RegexQueryFilter(f.productElement(0).toString(), "*$arg*")
}
}
def scalaArrayToJavaCollection(values: Array[Any]): util.Collection[String] = {
val list = new util.ArrayList[String](values.size)
values.map { value =>
list.add(value.toString)
}
list
}
def translateToDruidFilter(filter: Filter): DimFilter = {
filter match {
case EqualTo(attribute, value) => new SelectorDimFilter(attribute, value.toString, null)
case SAnd(left, right) =>
new AndDimFilter(
ImmutableList
.of[DimFilter](translateToDruidFilter(left), translateToDruidFilter(right)))
case SOr(left, right) =>
new OrDimFilter(
ImmutableList
.of[DimFilter](translateToDruidFilter(left), translateToDruidFilter(right)))
case SNot(filterToNeg) => new NotDimFilter(translateToDruidFilter(filterToNeg))
case In(attribute, values) =>
new InDimFilter(attribute, scalaArrayToJavaCollection(values), null)
case SIsNotNull(attribute) => new NotDimFilter(new SelectorDimFilter(attribute, "", null))
}
}
def isClass(obj: Any, className: String): Boolean = {
className.equals(obj.getClass().getName())
}
def execScanQuery(requiredColumns: Array[String], filters: Array[Filter]): RDD[Row] = {
val intervals = new util.ArrayList[Interval](1)
intervals.add(new Interval(new DateTime(startTime), new DateTime(endTime)))
val columns = new util.ArrayList[String]()
requiredColumns.map { column =>
columns.add(column)
}
val df = createDruidDimFilters(filters)
DruidInputFormat.setInputs(
sqlContext.sparkContext.hadoopConfiguration,
coordinator,
datasource,
intervals,
df,
columns)
val rdd = new NewHadoopRDD[NullWritable, InputRow](
sqlContext.sparkContext,
classOf[DruidInputFormat],
classOf[NullWritable],
classOf[InputRow],
sqlContext.sparkContext.hadoopConfiguration)
rdd.map(_._2).map { row =>
val size = requiredColumns.size
val r = new Array[Any](size)
for (i <- 0 until size) {
if ("__time".equalsIgnoreCase(requiredColumns(i))) {
r(i) = row.getTimestamp.toString()
} else {
val hll = row.getRaw(requiredColumns(i))
if (hll.isInstanceOf[HyperLogLogCollector]) {
r(i) = null
} else if (hll.isInstanceOf[Float]) {
r(i) = java.lang.Double.parseDouble(hll + "")
} else {
r(i) = hll
}
}
}
new GenericRow(r)
}
}
def converedToRow(
data: Seq[Map[String, Any]],
requiredColumns: Array[String],
sparkfField2Druid: Map[String, String]): RDD[Row] = {
val numColumns = requiredColumns.length
sqlContext.sparkContext.parallelize(data, 2).mapPartitions { data =>
for (d <- data) yield {
val row = new Array[Any](numColumns)
for (id <- 0 until numColumns) {
val value = d.getOrElse(requiredColumns(id), null)
if (value == null) {
row(id) = ""
} else {
row(id) = value match {
case v: BigInt => v.toLong
case v: Double =>
if (!sparkfField2Druid
.get(requiredColumns(id))
.get
.isEmpty) { v.longValue() } else v
case _ => value
}
}
}
new GenericRow(row)
}
}
}
def execTimeSeriesQuery(requiredColumns: Array[String], filters: Array[Filter]): RDD[Row] = {
implicit val executionContext = ExecutionContext.Implicits.global
val client = DruidClient(url)
val (aggDruid, fieldMap) = converedToDriudAggregate
val query = TimeSeriesQuery(
source = datasource,
interval = new Interval(new DateTime(startTime), new DateTime(endTime)),
granularity = granularity,
aggregate = aggDruid,
postAggregate = Seq(),
filter = if (filters.isEmpty) QueryFilter.All else createDruidFilters(filters))
val future = client(query)
var data: Seq[(DateTime, Map[String, Any])] = null
future.onComplete {
case Success(resp) => data = resp.data
case Failure(ex) => ex.printStackTrace()
}
while (!future.isCompleted) {
logInfo("sleep 500ms")
Thread.sleep(500)
}
logInfo("get TimeSeriesQuery result and the size is " + data.size)
client.close()
converedToTsRow(data, requiredColumns, fieldMap)
}
def converedToDriudAggregate(): (Seq[Aggregation], Map[String, String]) = {
val aggExpr = aggregateExpressions.flatMap { expr =>
expr.collect {
case agg: AggregateExpression => (expr.toAttribute.name, agg)
}
}.distinct
val aggregateFunction = aggExpr.map {
case (name, agg) =>
var aggFunc = ""
var field = ""
var nameDruid = ""
var t = ""
val aggregateFunction = agg.aggregateFunction
aggregateFunction match {
case s: Sum =>
s.dataType match {
case LongType | IntegerType =>
aggFunc = "longSum"
field = s.references.map(_.name).mkString(" ")
case DoubleType | FloatType =>
aggFunc = "doubleSum"
field = s.references.map(_.name).mkString(" ")
case _ => None
}
case s: Max =>
s.dataType match {
case LongType | IntegerType =>
aggFunc = "longMax"
field = s.references.map(_.name).mkString(" ")
case DoubleType | FloatType =>
aggFunc = "doubleMax"
field = s.references.map(_.name).mkString(" ")
case _ => None
}
case s: Min =>
s.dataType match {
case LongType | IntegerType =>
aggFunc = "longMin"
field = s.references.map(_.name).mkString(" ")
case DoubleType | FloatType =>
aggFunc = "doubleMax"
field = s.references.map(_.name).mkString(" ")
case _ => None
}
case c: Count =>
aggFunc = "count"
if (agg.isDistinct) {
aggFunc = "countdistinct"
t = c.references.baseSet.head.a.metadata.getString(DRUID_TYPE_STRING)
}
field = c.references.map(_.name).mkString(" ")
case _ => None
}
(name, (aggFunc, field, t))
}
val fieldMap = aggregateFunction.map(x => (x._1, x._2._3)).toMap
logInfo("fieldMap is " + fieldMap)
import org.apache.spark.sql.execution.datasources.druid.DSL._
val aggDruid = aggregateFunction.map {
case (nameDruid, (f, n, t)) =>
f match {
case "longSum" => sum(n, nameDruid)
case "doubleSum" => doubleSum(n, nameDruid)
case "longMax" => max(n, nameDruid)
case "doubleMax" => doubleMax(n, nameDruid)
case "longMin" => min(n, nameDruid)
case "doubleMin" => doubleMin(n, nameDruid)
case "count" => count(nameDruid)
case "countdistinct" => countdistinct(n, t, nameDruid)
}
}.toSeq
(aggDruid, fieldMap)
}
def converedToTsRow(
data: Seq[(DateTime, Map[String, Any])],
requiredColumns: Array[String],
sparkfField2Druid: Map[String, String]): RDD[Row] = {
val numColumns = requiredColumns.length
sqlContext.sparkContext.parallelize(data, 1).mapPartitions { data =>
for ((t, d) <- data) yield {
val row = new Array[Any](numColumns)
for (id <- 0 until numColumns) {
if (requiredColumns(id) == timestampcolumn) {
row(id) = t.toString
} else {
val value = d.getOrElse(requiredColumns(id), null)
row(id) = value match {
case v: BigInt => v.toLong
case v: Double =>
if (!sparkfField2Druid.get(requiredColumns(id)).get.isEmpty) {
v.longValue
} else {
v
}
case _ => value
}
}
}
new GenericRow(row)
}
}
}
def converedToTsRow2(
data: Seq[(DateTime, Seq[Map[String, Any]])],
requiredColumns: Array[String],
sparkfField2Druid: Map[String, String]): RDD[Row] = {
val numColumns = requiredColumns.length
sqlContext.sparkContext.parallelize(data, 1).mapPartitions { data =>
val list = ArrayBuffer.empty[Row]
for ((t, d) <- data) {
for (dd <- d) {
val row = new Array[Any](numColumns)
for (id <- 0 until numColumns) {
if (requiredColumns(id) == timestampcolumn) {
row(id) = t.toString
} else {
val value = dd.getOrElse(requiredColumns(id), null)
row(id) = value match {
case v: BigInt => v.toLong
case v: Double =>
if (!sparkfField2Druid.get(requiredColumns(id)).get.isEmpty) {
v.longValue
} else {
v
}
case _ => value
}
}
}
list.append(new GenericRow(row))
}
}
list.iterator
}
}
def execGroupByQuery(requiredColumns: Array[String], filters: Array[Filter]): RDD[Row] = {
implicit val executionContext = ExecutionContext.Implicits.global
val client = new DruidClient(url)
var groupFields = AttributeSet(groupingExpressions.flatMap(_.references)).map(_.name).toSeq
val (aggDruid, fieldMap) = converedToDriudAggregate
var order = new ArrayBuffer[ColumnOrder]
if (orders != null) {
orders.map { o =>
val t = o.child.dataType
if (t.isInstanceOf[NumericType]) {
order += ColumnOrder(o.child.references.head.name, o.direction.sql, "numeric")
} else {
order += ColumnOrder(o.child.references.head.name, o.direction.sql)
}
}
}
if (groupFields.size == 1 && orders != null && orders.size == 1 && limit > 0) {
// topN
var m: Metric = null
if (orders(0).dataType.isInstanceOf[NumericType]) {
if (orders(0).direction.sql.equalsIgnoreCase("descending")) {
m = Metric("numberic", orders(0).child.references.head.name)
} else {
m = Metric("inverted", orders(0).child.references.head.name)
}
} else {
m = Metric(metric = orders(0).child.references.head.name)
}
val query = TopNSelectQuery(
source = datasource,
dimension = groupFields(0),
metric = m,
interval = new Interval(new DateTime(startTime), new DateTime(endTime)),
granularity = granularity,
aggregate = aggDruid,
filter = if (filters.isEmpty) QueryFilter.All else createDruidFilters(filters),
limit = limit)
if (granularity.toString.equalsIgnoreCase("all")) {
var data: Seq[Map[String, Any]] = null
val future = client.queryTopN(query)
future.onComplete {
case Success(resp) => data = resp.data
case Failure(ex) => throw new RuntimeException(ex)
}
while (!future.isCompleted) {
logInfo("sleep 100ms")
Thread.sleep(100)
}
client.close()
converedToRow(data, requiredColumns, fieldMap)
} else {
val future = client.queryTopN2(query)
var data: Seq[(DateTime, Seq[Map[String, Any]])] = null
future.onComplete {
case Success(resp) => data = resp.data
case Failure(ex) => throw new RuntimeException(ex)
}
while (!future.isCompleted) {
logInfo("sleep 100ms")
Thread.sleep(100)
}
logInfo("get TimeSeriesQuery result and the size is " + data.size)
client.close()
converedToTsRow2(data, requiredColumns, fieldMap)
}
} else {
groupFields = groupFields.filter(!_.equalsIgnoreCase("__time"))
val query = GroupByQuery(
source = datasource,
interval = new Interval(new DateTime(startTime), new DateTime(endTime)),
granularity = granularity,
dimensions = groupFields,
aggregate = aggDruid,
postAggregate = Seq(),
filter = if (filters.isEmpty) QueryFilter.All else createDruidFilters(filters),
orderBy = order,
limit = Some(if (limit > 0) limit else 20))
val future = client(query)
var data: Seq[(DateTime, Map[String, Any])] = null
future.onComplete {
case Success(resp) => data = resp.data
case Failure(ex) => ex.printStackTrace()
}
while (!future.isCompleted) {
logInfo("sleep 500ms")
Thread.sleep(500)
}
logInfo("get GroupByQuery result and the size is " + data.size)
client.close()
converedToTsRow(data, requiredColumns, fieldMap)
}
}
}
|
// manejadores de acción
const fillMatrix = (cards, mtz_board) => {
return {
type: "FILL_MATRIX",
cards,
mtz_board
};
};
const turnPlay = (mtz_board, row, col) => {
return {
type: "TURN_PLAY",
mtz_board,
row,
col
};
};
const success = () => {
return {
type: "SUCCESS"
};
};
const turnOff = (row, col) => {
return {
type: "TURN_OFF",
row,
col
};
};
export { fillMatrix, turnPlay, success, turnOff };
|
class MyList(list):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def append(self, element):
super().append(element)
print('Element {} is added to the list'.format(element))
def insert(self, index, element):
super().insert(index, element)
print('Element {} is inserted at index {}'.format(element, index))
def pop(self, index=-1):
element = super().pop(index)
print('Element {} is removed from the list'.format(element))
return element |
#!/bin/bash
TASK=18
MODEL=ctrl_visualbert
MODEL_CONFIG=ctrl_visualbert_base
TASKS_CONFIG=xm-influence_test_tasks
PRETRAINED=/science/image/nlp-datasets/emanuele/checkpoints/mpre-unmasked/conceptual_captions_s33/volta/ctrl_visualbert/ctrl_visualbert_base/pytorch_model_9.bin
OUTPUT_DIR=/science/image/nlp-datasets/emanuele/results/xm-influence/flickr30kentities_lang4vis/${MODEL}_s33
THR=0.5
source activate /science/image/nlp-datasets/emanuele/envs/xm-influence
cd ../../../../volta
python ablate_lang4vis.py \
--bert_model bert-base-uncased --config_file config/${MODEL_CONFIG}.json --from_pretrained ${PRETRAINED} \
--tasks_config_file config_tasks/${TASKS_CONFIG}.yml --task $TASK --split val \
--output_dir ${OUTPUT_DIR} --dump_results --masking phrase --overlap_threshold $THR
conda deactivate
|
#!/usr/bin/env bash
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
declare -A ORIGINAL_COPYRIGHT_YEAR=(
[centos-7]=2018
[centos-8]=2019
[debian-buster]=2019
[debian-stretch]=2018
[fedora]=2018
[opensuse-leap]=2019
[opensuse-tumbleweed]=2018
[ubuntu-xenial]=2018
[ubuntu-bionic]=2018
)
declare -a FROZEN_FILES=()
BUILD_AND_TEST_PROJECT_FRAGMENT=$(replace_fragments \
"INSTALL_CRC32C_FROM_SOURCE" \
"INSTALL_CPP_CMAKEFILES_FROM_SOURCE" \
"INSTALL_GOOGLETEST_FROM_SOURCE" \
"INSTALL_GOOGLE_CLOUD_CPP_COMMON_FROM_SOURCE" <<'_EOF_'
# #### crc32c
# The project depends on the Crc32c library, we need to compile this from
# source:
# ```bash
@INSTALL_CRC32C_FROM_SOURCE@
# ```
# #### googleapis
# We need a recent version of the Google Cloud Platform proto C++ libraries:
# ```bash
@INSTALL_CPP_CMAKEFILES_FROM_SOURCE@
# ```
# #### googletest
# We need a recent version of GoogleTest to compile the unit and integration
# tests.
# ```bash
@INSTALL_GOOGLETEST_FROM_SOURCE@
# ```
# #### google-cloud-cpp-common
# The project also depends on google-cloud-cpp-common, the libraries shared by
# all the Google Cloud C++ client libraries:
# ```bash
@INSTALL_GOOGLE_CLOUD_CPP_COMMON_FROM_SOURCE@
# ```
FROM devtools AS install
ARG NCPU=4
# #### Compile and install the main project
# We can now compile, test, and install `@GOOGLE_CLOUD_CPP_REPOSITORY@`.
# ```bash
WORKDIR /home/build/project
COPY . /home/build/project
RUN cmake -H. -Bcmake-out
RUN cmake --build cmake-out -- -j "${NCPU:-4}"
WORKDIR /home/build/project/cmake-out
RUN ctest -LE integration-tests --output-on-failure
RUN cmake --build . --target install
# ```
## [END INSTALL.md]
ENV PKG_CONFIG_PATH=/usr/local/lib64/pkgconfig:/usr/local/lib/pkgconfig
# Verify that the installed files are actually usable
WORKDIR /home/build/test-install-plain-make
COPY ci/test-install /home/build/test-install-plain-make
RUN make
WORKDIR /home/build/test-install-cmake-bigtable
COPY ci/test-install/bigtable /home/build/test-install-cmake-bigtable
RUN env -u PKG_CONFIG_PATH cmake -H. -B/i/bigtable
RUN cmake --build /i/bigtable -- -j ${NCPU:-4}
WORKDIR /home/build/test-install-cmake-storage
COPY ci/test-install/storage /home/build/test-install-cmake-storage
RUN env -u PKG_CONFIG_PATH cmake -H. -B/i/storage
RUN cmake --build /i/storage -- -j ${NCPU:-4}
_EOF_
)
|
import re
def parse_anchor_tag(tag):
attributes = re.findall(r'(\w+)=["\'](.*?)["\']', tag)
return dict(attributes) |
import { useEffect, useState } from "react";
import { _get, _put } from "../../../../common/httpClient";
import { Box, Text, Checkbox, Flex, Button, useToast } from "@chakra-ui/react";
import { Check } from "../../../../theme/components/icons";
/**
* @description Updates all widgetParameters to updates referrers.
* @returns {JSX.Element}
*/
const UpdateAllParameterReferrers = () => {
const [loading, setLoading] = useState(false);
const [submitLoading, setSubmitLoading] = useState(false);
const [isSubmitDisabled, setSubmitDisabled] = useState(true);
const [referrers, setReferrers] = useState();
const toast = useToast();
/**
* @description Get all parameters.
*/
useEffect(() => {
async function fetchData() {
try {
setLoading(true);
const referrersResponse = await getReferrers();
setReferrers(referrersResponse.map((referrer) => ({ ...referrer, isChecked: false })));
} catch (error) {
toast({
title: "Une erreur est survenue durant la récupération des informations.",
status: "error",
isClosable: true,
position: "bottom-right",
});
} finally {
setLoading(false);
}
}
fetchData();
}, [toast]);
/**
* @description Returns all referrers.
* @returns {Promise<{code: {number}, name: {string}, full_name: {string}, url: {string}[]}>}
*/
const getReferrers = async () => {
const { referrers } = await _get(`/api/constants`);
return referrers;
};
/**
* @description Toggles checkboxes.
* @param {Number} referrerCode
* @param {Boolean} isChecked
* @returns {void}
*/
const toggleReferrer = (referrerCode, isChecked) => {
const referrersUpdated = referrers.map((referrer) => {
if (referrer.code === referrerCode) {
referrer.isChecked = isChecked;
}
return referrer;
});
setReferrers(referrersUpdated);
toggleDisableButton();
};
/**
* @description Disable submit button if no one of checkbox is checked.
* @returns {void}
*/
const toggleDisableButton = () => {
const uncheckedReferrers = referrers.filter((referrer) => !referrer.isChecked);
setSubmitDisabled(uncheckedReferrers.length === referrers.length);
};
/**
* @description Submit.
* @returns {Promise<void>}
*/
const submit = async () => {
try {
setSubmitLoading(true);
await _put("/api/widget-parameters/referrers", {
referrers: referrers.filter((referrer) => referrer.isChecked).map((referrer) => referrer.code),
});
toast({
title: "Enregistrement effectué avec succès.",
status: "success",
isClosable: true,
position: "bottom-right",
});
} catch (error) {
toast({
title: "Une erreur est survenue.",
status: "error",
isClosable: true,
position: "bottom-right",
});
} finally {
setSubmitLoading(false);
}
};
return (
<Box
w={["100%", "100%", "40%", "40%"]}
boxShadow="0 1px 2px 0 rgb(0 0 0 / 5%)"
border="1px solid rgba(0,40,100,.12)"
border-radius="3px"
mt={10}
>
<Text fontSize="15px" p={5} borderBottom="1px solid rgba(0,40,100,.12)" border-radius="3px">
Modifier les sources de parution pour tous les paramètres actifs
</Text>
<Box active={loading} loader p={5}>
<Text>
Veuillez cocher l'ensemble des plateformes de diffusion sur lesquelles vous souhaitez que les formations
actuellement publiées soient accessibles.
<br />
<br />
{referrers &&
referrers.map((referrer) => (
<Flex>
<Checkbox
key={referrer.code}
checked={referrer.checked}
icon={<Check w="20px" h="18px" />}
onChange={() => toggleReferrer(referrer.code, !referrer.isChecked)}
>
<Text ml={2}>{referrer.full_name}</Text>
</Checkbox>
</Flex>
))}
</Text>
</Box>
<Flex justifyContent="flex-end" borderTop="1px solid rgba(0,40,100,.12)" border-radius="3px" p={5} mt="12.6rem">
<Button
bg={isSubmitDisabled === true ? "" : "#467fcf"}
disabled={isSubmitDisabled}
loading={submitLoading}
onClick={submit}
variant="primary"
mr="3rem"
_hover={{ bg: "#3057BE" }}
>
Enregistrer
</Button>
</Flex>
</Box>
);
};
export { UpdateAllParameterReferrers };
|
def edit_distance(s1, s2):
if len(s1) > len(s2):
s1, s2 = s2, s1
distances = range(len(s1) + 1)
for i2, c2 in enumerate(s2):
distances_ = [i2+1]
for i1, c1 in enumerate(s1):
if c1 == c2:
distances_.append(distances[i1])
else:
distances_.append(1 + min((distances[i1], distances[i1 + 1], distances_[-1])))
distances = distances_
return distances[-1] |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import click
from revolut import Revolut, __version__
import revolut_bot
import sys
# Usage : revolutbot.py --help
_BOT_PERCENT_MARGIN = 1 # at least 1% benefit to exchange
_VERBOSE_MODE = False # can be changed with --verbose parameter
_RETURN_CODE_BUY = 0
_RETURN_CODE_DO_NOT_BUY = 1
_RETURN_CODE_ERROR = 2
@click.command()
@click.option(
'--device-id', '-d',
envvar="REVOLUT_DEVICE_ID",
type=str,
help='your Revolut token (or set the env var REVOLUT_DEVICE_ID)',
default='revolut_cli',
)
@click.option(
'--token', '-t',
envvar="REVOLUT_TOKEN",
type=str,
help='your Revolut token (or set the env var REVOLUT_TOKEN)',
)
@click.option(
'--historyfile', '-f',
type=str,
help='csv file with the exchange history',
required=True,
)
@click.option(
'--forceexchange',
is_flag=True,
help='force the exchange, ignoring the bot decision (you may lose money)',
)
@click.option(
'--simulate', '-s',
is_flag=True,
help='do not really exchange your money if set',
)
@click.option(
'--verbose', '-v',
is_flag=True,
help='verbose mode',
)
@click.version_option(
version=__version__,
message='%(prog)s, based on [revolut] package version %(version)s'
)
def main(device_id, token, simulate, historyfile, verbose, forceexchange):
if token is None:
print("You don't seem to have a Revolut token")
print("Please execute revolut_cli.py first to get one")
sys.exit(_RETURN_CODE_ERROR)
global _VERBOSE_MODE
_VERBOSE_MODE = verbose
rev = Revolut(device_id=device_id, token=token)
to_buy_or_not_to_buy(revolut=rev,
simulate=simulate,
filename=historyfile,
forceexchange=forceexchange)
def log(log_str=""):
if _VERBOSE_MODE:
print(log_str)
def to_buy_or_not_to_buy(revolut, simulate, filename, forceexchange):
percent_margin = _BOT_PERCENT_MARGIN
last_transactions = revolut_bot.get_last_transactions_from_csv(
filename=filename)
last_tr = last_transactions[-1] # The last transaction
log()
log("Last transaction : {}\n".format(last_tr))
previous_currency = last_tr.from_amount.currency
current_balance = last_tr.to_amount # How much we currently have
current_balance_in_other_currency = revolut.quote(
from_amount=current_balance,
to_currency=previous_currency)
log("Today : {} in {} : {}\n".format(
current_balance, previous_currency, current_balance_in_other_currency))
last_sell = last_tr.from_amount # How much did it cost before selling
last_sell_plus_margin = revolut_bot.get_amount_with_margin(
amount=last_sell,
percent_margin=percent_margin)
log("Min value to buy : {} + {}% (margin) = {}\n".format(
last_sell,
percent_margin,
last_sell_plus_margin))
buy_condition = current_balance_in_other_currency.real_amount > \
last_sell_plus_margin.real_amount
if buy_condition or forceexchange:
if buy_condition:
log("{} > {}".format(
current_balance_in_other_currency,
last_sell_plus_margin))
elif forceexchange:
log("/!\\ Force exchange option enabled")
log("=> BUY")
if simulate:
log("(Simulation mode : do not really buy)")
else:
exchange_transaction = revolut.exchange(
from_amount=current_balance,
to_currency=previous_currency,
simulate=simulate)
log("{} bought".format(exchange_transaction.to_amount.real_amount))
log("Update history file : {}".format(filename))
revolut_bot.update_historyfile(
filename=filename,
exchange_transaction=exchange_transaction)
sys.exit(_RETURN_CODE_BUY)
else:
log("{} < {}".format(
current_balance_in_other_currency,
last_sell_plus_margin))
log("=> DO NOT BUY")
sys.exit(_RETURN_CODE_DO_NOT_BUY)
if __name__ == "__main__":
main()
|
def reverse(str):
new_str = ""
for i in str:
new_str = i + new_str
return new_str |
#sh node.sh
#sh npm.sh
#sh etherpad-lite.sh
echo "Copy this API KEY:"
echo ""
cat /usr/share/etherpad-lite/APIKEY.txt
echo ""
|
<filename>src/main/java-gen/io/dronefleet/mavlink/paparazzi/ScriptItem.java
package io.dronefleet.mavlink.paparazzi;
import io.dronefleet.mavlink.annotations.MavlinkFieldInfo;
import io.dronefleet.mavlink.annotations.MavlinkMessageBuilder;
import io.dronefleet.mavlink.annotations.MavlinkMessageInfo;
import java.lang.Object;
import java.lang.Override;
import java.lang.String;
import java.util.Objects;
/**
* Message encoding a mission script item. This message is emitted upon a request for the next
* script item.
*/
@MavlinkMessageInfo(
id = 180,
crc = 231,
description = "Message encoding a mission script item. This message is emitted upon a request for the next script item."
)
public final class ScriptItem {
private final int targetSystem;
private final int targetComponent;
private final int seq;
private final String name;
private ScriptItem(int targetSystem, int targetComponent, int seq, String name) {
this.targetSystem = targetSystem;
this.targetComponent = targetComponent;
this.seq = seq;
this.name = name;
}
/**
* Returns a builder instance for this message.
*/
@MavlinkMessageBuilder
public static Builder builder() {
return new Builder();
}
/**
* System ID
*/
@MavlinkFieldInfo(
position = 1,
unitSize = 1,
description = "System ID"
)
public final int targetSystem() {
return this.targetSystem;
}
/**
* Component ID
*/
@MavlinkFieldInfo(
position = 2,
unitSize = 1,
description = "Component ID"
)
public final int targetComponent() {
return this.targetComponent;
}
/**
* Sequence
*/
@MavlinkFieldInfo(
position = 3,
unitSize = 2,
description = "Sequence"
)
public final int seq() {
return this.seq;
}
/**
* The name of the mission script, NULL terminated.
*/
@MavlinkFieldInfo(
position = 4,
unitSize = 1,
arraySize = 50,
description = "The name of the mission script, NULL terminated."
)
public final String name() {
return this.name;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || !getClass().equals(o.getClass())) return false;
ScriptItem other = (ScriptItem)o;
if (!Objects.deepEquals(targetSystem, other.targetSystem)) return false;
if (!Objects.deepEquals(targetComponent, other.targetComponent)) return false;
if (!Objects.deepEquals(seq, other.seq)) return false;
if (!Objects.deepEquals(name, other.name)) return false;
return true;
}
@Override
public int hashCode() {
int result = 0;
result = 31 * result + Objects.hashCode(targetSystem);
result = 31 * result + Objects.hashCode(targetComponent);
result = 31 * result + Objects.hashCode(seq);
result = 31 * result + Objects.hashCode(name);
return result;
}
@Override
public String toString() {
return "ScriptItem{targetSystem=" + targetSystem
+ ", targetComponent=" + targetComponent
+ ", seq=" + seq
+ ", name=" + name + "}";
}
public static final class Builder {
private int targetSystem;
private int targetComponent;
private int seq;
private String name;
/**
* System ID
*/
@MavlinkFieldInfo(
position = 1,
unitSize = 1,
description = "System ID"
)
public final Builder targetSystem(int targetSystem) {
this.targetSystem = targetSystem;
return this;
}
/**
* Component ID
*/
@MavlinkFieldInfo(
position = 2,
unitSize = 1,
description = "Component ID"
)
public final Builder targetComponent(int targetComponent) {
this.targetComponent = targetComponent;
return this;
}
/**
* Sequence
*/
@MavlinkFieldInfo(
position = 3,
unitSize = 2,
description = "Sequence"
)
public final Builder seq(int seq) {
this.seq = seq;
return this;
}
/**
* The name of the mission script, NULL terminated.
*/
@MavlinkFieldInfo(
position = 4,
unitSize = 1,
arraySize = 50,
description = "The name of the mission script, NULL terminated."
)
public final Builder name(String name) {
this.name = name;
return this;
}
public final ScriptItem build() {
return new ScriptItem(targetSystem, targetComponent, seq, name);
}
}
}
|
<filename>javascript-practice-programs/game js/game.js
// create secret number
var secretNumber = 4;
//as user for guess
var guess =Number(prompt("guess a number"));
// check if guess is right
if(guess === secretNumber){
alert("you got it right");
}
//otherwise, check if higher or lower
else if(guess> secretNumber){
alert("Too High Guess again");
}
else
{
alert("Too low Guess again");
} |
<reponame>tessel/ambient-attx4<filename>test/hardware/flashVerify.js
var tessel = require('tessel');
var port = tessel.port[process.argv[2] || 'A'];
var reset = port.digital[1];
reset.output(true);
console.log('1..35');
var spi = new port.SPI({
clockSpeed: 50000,
mode: 2,
chipSelect: port.digital[0].output(true),
chipSelectDelayUs: 500
});
var crc1 = 0x1e;
var crc2 = 0xb5;
function showResult(test, fail, buf) {
if (test) {
console.log('ok');
} else {
console.log(fail, buf);
}
}
function repeat(n, test, callback) {
test(function() {
n--;
if (n) {
repeat(n, test, callback);
} else {
if (callback) {
callback();
}
}
});
}
function testCrc(callback) {
spi.transfer(new Buffer([0x07, 0x00, 0x00, 0x00]), function(err, res) {
if (err) {
console.log('not ok - SPI error', err);
}
showResult(
res[1] === 0x07 && res[2] === crc1 && res[3] === crc2,
'not ok - checksum not verified',
res
);
if (callback) {
callback();
}
});
}
function testVersion(callback) {
spi.transfer(new Buffer([0x01, 0x00, 0x00]), function(err, res) {
if (err) {
console.log('not ok - SPI error', err);
}
showResult(
res[1] === 0x01 && (res[2] === 0x03 || res[2] === 0x02 || res[2] === 0x01),
'not ok - Version returned incorrectly:',
res
);
if (callback) {
callback();
}
});
}
function testLightBuffer(callback) {
var header = new Buffer([0x02, 0x0a, 0x00]);
var fill = new Buffer(20);
fill.fill(0);
var stop = new Buffer(1);
stop.writeUInt8(0x16, 0);
spi.transfer(Buffer.concat([header, fill, stop]), function(err, res) {
if (err) {
console.log('not ok - SPI error', err);
}
showResult(
res[1] === 0x02 && res[2] === 0x0a && res[res.length - 1] === 0x16,
'not ok - light buffer incorrectly formatted:',
res
);
if (callback) {
callback();
}
});
}
function testSoundBuffer(callback) {
var header = new Buffer([0x03, 0x0a, 0x00]);
var fill = new Buffer(20);
fill.fill(0);
var stop = new Buffer(1);
stop.writeUInt8(0x16, 0);
spi.transfer(Buffer.concat([header, fill, stop]), function(err, res) {
if (err) {
console.log('not ok - SPI error', err);
}
showResult(
res[1] === 0x03 && res[2] === 0x0a && res[res.length - 1] === 0x16,
'not ok - sound buffer incorrectly formatted:',
res
);
if (callback) {
callback();
}
});
}
function testSetLightTrigger(callback) {
var dataBuffer = new Buffer(2);
dataBuffer.writeUInt16BE(0x0ff0, 0);
var packet = new Buffer([0x04, dataBuffer.readUInt8(0), dataBuffer.readUInt8(1), 0x00, 0x00, 0x00]);
spi.transfer(packet, function(err, res) {
if (err) {
console.log('not ok - SPI error', err);
}
showResult(
res[1] === packet[0] && res[2] === packet[1] && res[3] === packet[2],
'not ok - light trigger set failed',
res
);
if (callback) {
callback();
}
});
}
function testSetSoundTrigger(callback) {
var dataBuffer = new Buffer(2);
dataBuffer.writeUInt16BE(0x0ff0, 0);
var packet = new Buffer([0x05, dataBuffer.readUInt8(0), dataBuffer.readUInt8(1), 0x00, 0x00, 0x00]);
spi.transfer(packet, function(err, res) {
if (err) {
console.log('not ok - SPI error', err);
}
showResult(
res[1] === packet[0] && res[2] === packet[1] && res[3] === packet[2],
'not ok - sound trigger set failed',
res
);
if (callback) {
callback();
}
});
}
function testFetchTrigger(callback) {
spi.transfer(new Buffer([0x06, 0x00, 0x00, 0x00, 0x00, 0x00]), function(err, res) {
if (err) {
console.log('not ok - SPI error', err);
}
showResult(
res[0] === 0x55 && res[1] === 0x06 && res.length === 6,
'not ok - trigger fetch failed',
res
);
if (callback) {
callback();
}
});
}
function test() {
console.log('# Test checksum');
repeat(5, testCrc, function() {
console.log('# Test version number');
repeat(5, testVersion, function() {
console.log('# Test getting light');
repeat(5, testLightBuffer, function() {
console.log('# Test getting sound');
repeat(5, testSoundBuffer, function() {
console.log('#Test setting triggers');
repeat(5, testSetLightTrigger, function() {
repeat(5, testSetSoundTrigger, function() {
console.log('#Test fetch trigger');
repeat(5, testFetchTrigger);
});
});
});
});
});
});
}
test();
|
#pragma once
#include <Core/Serializer/Serializer.h>
#include <Core/IStream.h>
#include <Core/com_ptr.h>
#include "BinaryStreamWriter.h"
#include "BinaryStreamReader.h"
#include "StructuredBinaryStreamWriter2.h"
#include "StructuredBinaryStreamReader.h"
#include "StructuredBinaryStreamReader2.h"
#include <Core/FileIO/FileIO.h>
#include <Core/Resource/Xml/Xml.h>
#include <iostream>
namespace Lunia {
namespace Serializer {
com_ptr<IRefCountedStreamWriter> CreateBinaryStreamWriter(Lunia::IStreamWriter& stream) {
return new BinaryStreamWriter(stream);
}
com_ptr<IRefCountedStreamReader> CreateBinaryStreamReader(Lunia::IStreamReader& stream) {
return new BinaryStreamReader(stream);
}
com_ptr<IRefCountedStreamWriter> CreateBinaryStreamWriter(com_ptr<Lunia::IRefCountedStreamWriter> stream) {
return new BinaryStreamWriter(stream);
}
com_ptr<IRefCountedStreamReader> CreateBinaryStreamReader(com_ptr<Lunia::IRefCountedStreamReader> stream) {
return new BinaryStreamReader(stream);
}
com_ptr<IRefCountedStreamWriter> CreateStructuredBinaryStreamWriter(Lunia::IStreamWriter& stream) {
return new StructuredBinaryStreamWriter2(stream);
}
com_ptr<IRefCountedStreamWriter> CreateStructuredBinaryStreamWriter(com_ptr<Lunia::IRefCountedStreamWriter> stream) {
return new StructuredBinaryStreamWriter2(stream);
}
com_ptr<IRefCountedStreamReader> CreateStructuredBinaryStreamReader(com_ptr<Lunia::IRefCountedStreamReader> stream) {
int startPosition = stream->GetReadCursor();
StructuredBinaryStreamHeader header;
stream->Read(reinterpret_cast<unsigned char*>(&header), sizeof(header));
stream->SetReadCursor(startPosition, IStream::CursorPosition::Begin);
if (header.IsVersion1()) {
// ALLM_INFO((L"Loading Structured Binary Format 1, stream=%s", stream->GetName()));
return new StructuredBinaryStreamReader(stream);
}
if (header.IsVersion2()) {
// ALLM_INFO((L"Loading Structured Binary Format 2, stream=%s", stream->GetName()));
return new StructuredBinaryStreamReader2(stream);
}
throw Exception(L"Invalid stream header, unknown version");
}
com_ptr<IRefCountedStreamReader> CreateStructuredBinaryStreamReader(Lunia::IStreamReader& stream) {
int startPosition = stream.GetReadCursor();
StructuredBinaryStreamHeader header;
stream.Read(reinterpret_cast<unsigned char*>(&header), sizeof(header));
stream.SetReadCursor(startPosition, IStream::CursorPosition::Begin);
if (header.IsVersion1()) {
//ALLM_INFO((L"Loading Structured Binary Format 1, stream=%s", stream.GetName()));
return new StructuredBinaryStreamReader(stream);
}
if (header.IsVersion2()) {
//ALLM_INFO((L"Loading Structured Binary Format 2, stream=%s", stream.GetName()));
return new StructuredBinaryStreamReader2(stream);
}
throw Exception(L"Invalid stream header, unknown version");
}
/**
Exports Version 2 format
*/
void RecursiveWrite(Xml::Element* element, FileIO::File& writeTo)
{
unsigned int hash = StringUtil::Hash(element->Name.c_str());
writeTo.WriteStruct(hash); // write hash
const std::wstring& type = element->Attributes[L"type"];
assert(!type.empty());
int datasize = 0;
if (element->SubElements.empty()) // leaf
{
const std::wstring& value = element->Attributes[L"value"];
if (type == L"int16")
{
int16 v = (int16)StringUtil::ToInt(value);
datasize = sizeof(v);
writeTo.Write((unsigned char*)&datasize, sizeof(datasize));
writeTo.Write((unsigned char*)&v, datasize);
}
else if (type == L"uint16")
{
uint16 v = (uint16)StringUtil::ToInt(value);
datasize = sizeof(v);
writeTo.Write((unsigned char*)&datasize, sizeof(datasize));
writeTo.Write((unsigned char*)&v, datasize);
}
else if (type == L"int32")
{
int32 v = StringUtil::ToInt(value);
datasize = sizeof(v);
writeTo.Write((unsigned char*)&datasize, sizeof(datasize));
writeTo.Write((unsigned char*)&v, datasize);
}
else if (type == L"uint32")
{
uint32 v = StringUtil::ToInt(value);
datasize = sizeof(v);
writeTo.Write((unsigned char*)&datasize, sizeof(datasize));
writeTo.Write((unsigned char*)&v, datasize);
}
else if (type == L"double")
{
double v = StringUtil::ToDouble(value);
datasize = sizeof(v);
writeTo.Write((unsigned char*)&datasize, sizeof(datasize));
writeTo.Write((unsigned char*)&v, datasize);
}
else if (type == L"float")
{
float v = StringUtil::ToFloat(value);
datasize = sizeof(v);
writeTo.Write((unsigned char*)&datasize, sizeof(datasize));
writeTo.Write((unsigned char*)&v, datasize);
}
else if (type == L"float2")
{
float2 v = StringUtil::ToFloat2(value);
datasize = sizeof(v);
writeTo.Write((unsigned char*)&datasize, sizeof(datasize));
writeTo.Write((unsigned char*)&v, datasize);
}
else if (type == L"float3")
{
float3 v = StringUtil::ToFloat3(value);
datasize = sizeof(v);
writeTo.Write((unsigned char*)&datasize, sizeof(datasize));
writeTo.Write((unsigned char*)&v, datasize);
}
else if (type == L"float4")
{
float4 v = StringUtil::ToFloat4(value);
datasize = sizeof(v);
writeTo.Write((unsigned char*)&datasize, sizeof(datasize));
writeTo.Write((unsigned char*)&v, datasize);
}
else if (type == L"string")
{
std::string v = StringUtil::ToASCII(value);
datasize = (int)v.size() + 1/*end of string */;
writeTo.Write((unsigned char*)&datasize, sizeof(datasize));
writeTo.Write((unsigned char*)v.c_str(), (unsigned int)(datasize * sizeof(std::string::value_type)));
}
else if (type == L"wstring")
{
datasize = (int)(value.size() + 1) * sizeof(std::wstring::value_type)/*end of string */;
writeTo.Write((unsigned char*)&datasize, sizeof(datasize));
writeTo.Write((unsigned char*)value.c_str(), (unsigned int)(datasize));
}
else if (type == L"resource")
{
datasize = (int)(value.size() + 1) * sizeof(std::wstring::value_type)/*end of string */;
writeTo.Write((unsigned char*)&datasize, sizeof(datasize));
writeTo.Write((unsigned char*)value.c_str(), (unsigned int)(datasize/**sizeof(std::wstring::value_type)*/));
}
return;
}
/* has child element - ISerializable derrived */
unsigned int position = writeTo.Seek(0, FileIO::File::SeekFrom::Cur);
writeTo.WriteStruct(position); // reserve place for datasize
/* write Begin() */
hash = StringUtil::Hash(type.c_str());
writeTo.WriteStruct(hash); // type name hash
float2 v = StringUtil::ToFloat2(element->Attributes[L"version"]);
/*
//int32 majorVersionFromFile=0;
//ALLM_INFO((L"reading majorVersion"));
//stream->Read(reinterpret_cast<uint8*>(&majorVersionFromFile), sizeof(int32));
//int32 minorVersionFromFile=0;
//ALLM_INFO((L"reading minorVersion"));
//stream->Read(reinterpret_cast<uint8*>(&minorVersionFromFile), sizeof(int32));
*/
int32 majorVersion = int32(v[0]);
int32 minorVersion = int32(v[1]);
writeTo.WriteStruct(majorVersion); // major version
writeTo.WriteStruct(minorVersion); // minor version
Xml::ElementCollection::iterator end = element->SubElements.end();
for (Xml::ElementCollection::iterator i = element->SubElements.begin(); i != end; ++i)
{
RecursiveWrite((*i), writeTo);
}
datasize = writeTo.Seek(0, FileIO::File::SeekFrom::Cur) - position - sizeof(datasize); // calculate data size
writeTo.Seek(position, FileIO::File::SeekFrom::Top); // move to the place that is reserved for datasize
writeTo.WriteStruct(datasize); // overwrite datasize
writeTo.Seek(0, FileIO::File::SeekFrom::End);
}
// irreversible converting
void ExportStructuredBinaryFromXml(const wchar_t* filename, const wchar_t* output)
{
Xml::Parser parser;
parser.ParseFromFile(filename);
FileIO::File writeTo(output, FileIO::File::Mode::WriteMode);
// write header
StructuredBinaryStreamHeader header;
writeTo.WriteStruct(header);
Xml::Element* allm = parser.GetElements().front();
assert(allm->Name == L"allm");
for (Xml::ElementCollection::iterator i = allm->SubElements.begin(); i != allm->SubElements.end(); ++i)
RecursiveWrite((*i), writeTo);
writeTo.Close();
}
}
} |
<gh_stars>1-10
import * as ts from "typescript";
import * as Lint from "tslint";
export class Rule extends Lint.Rules.AbstractRule {
static MISSING_TYPE_OR_INITIALIZER = "Missing type or initializer.";
static MISSING_RETURN_TYPE = "Missing return type.";
static UNNECESSARY_RETURN_TYPE = "Unnecessary return type.";
apply(sourceFile: ts.SourceFile): Lint.RuleFailure[] {
return this.applyWithWalker(new DiagnosticsWalker(sourceFile, this.getOptions()));
}
}
class DiagnosticsWalker extends Lint.RuleWalker {
visitVariableDeclaration(node: ts.VariableDeclaration) {
var list = node.parent;
if (list) {
let stmt = list.parent;
if (stmt && stmt.kind != ts.SyntaxKind.ForOfStatement) {
this.checkTypeOrInitializer(node);
}
}
super.visitVariableDeclaration(node);
}
visitPropertyDeclaration(node: ts.PropertyDeclaration) {
this.checkTypeOrInitializer(node);
super.visitPropertyDeclaration(node);
}
visitParameterDeclaration(node: ts.ParameterDeclaration) {
this.checkTypeOrInitializer(node);
super.visitParameterDeclaration(node);
}
private checkTypeOrInitializer(node: ts.NamedDeclaration & { type?: ts.TypeNode, initializer?: ts.Expression }) {
if (!node.type && !node.initializer) {
this.addFailureAtNode(node, Rule.MISSING_TYPE_OR_INITIALIZER);
}
}
visitFunctionDeclaration(node: ts.FunctionDeclaration) {
this.checkFunctionReturnType(node);
super.visitFunctionDeclaration(node);
}
visitArrowFunction(node: ts.ArrowFunction) {
if (requiresReturnType(node)) {
this.checkFunctionReturnType(node);
} else if (node.type) {
this.addFailureAtNode(node.type, Rule.UNNECESSARY_RETURN_TYPE);
}
super.visitArrowFunction(node);
}
visitMethodDeclaration(node: ts.MethodDeclaration) {
this.checkFunctionReturnType(node);
super.visitMethodDeclaration(node);
}
visitGetAccessor(node: ts.GetAccessorDeclaration) {
this.checkFunctionReturnType(node);
super.visitGetAccessor(node);
}
private checkFunctionReturnType(node: ts.FunctionLikeDeclaration) {
if (!node.type) {
this.addFailureAtNode(node, Rule.MISSING_RETURN_TYPE);
}
}
}
function requiresReturnType(node: ts.ArrowFunction): boolean {
if (ts.isCallExpression(node.parent) && ts.isIdentifier(node.parent.expression)
&& ["lengthof", "nameof"].includes(node.parent.expression.text)) {
return true;
}
return !ts.isCallLikeExpression(node.parent);
}
|
import { shallowMount } from "@vue/test-utils";
import SfBannerGrid from "@/components/organisms/SfBannerGrid/SfBannerGrid.vue";
describe("SfBannerGrid.vue", () => {
it("renders a component", () => {
const component = shallowMount(SfBannerGrid);
expect(component.contains(".sf-banner-grid")).toBe(true);
});
});
|
docker run -it ubuntu |
'''
references from:
https://www.tensorflow.org/api_docs/python/tf/contrib/keras/preprocessing/image/random_shift
https://www.tensorflow.org/api_docs/python/tf/contrib/keras/preprocessing/image/random_zoom
'''
import tensorflow as tf
import numpy as np
def augment_data(dataset, dataset_labels, augmentation_factor=1, use_random_shift=True, use_random_zoom=True):
augmented_image = []
augmented_image_labels = []
for num in range (0, dataset.shape[0]):
# original image:
augmented_image.append(dataset[num])
augmented_image_labels.append(dataset_labels[num])
for i in range(0, augmentation_factor):
if use_random_shift:
augmented_image.append(tf.contrib.keras.preprocessing.image.random_shift(dataset[num], 0.15, 0.15, row_axis=0, col_axis=1, channel_axis=2))
augmented_image_labels.append(dataset_labels[num])
if use_random_zoom:
augmented_image.append(tf.contrib.keras.preprocessing.image.random_zoom(dataset[num], [0.15, 0.15], row_axis=0, col_axis=1, channel_axis=2))
augmented_image_labels.append(dataset_labels[num])
return np.array(augmented_image), np.array(augmented_image_labels)
|
<filename>test/integration/controller_instance_test.go<gh_stars>0
/*
Copyright 2017 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package integration
import (
"errors"
"fmt"
"net/http"
"net/url"
"reflect"
"testing"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
scfeatures "github.com/kubernetes-incubator/service-catalog/pkg/features"
osb "github.com/pmorie/go-open-service-broker-client/v2"
fakeosb "github.com/pmorie/go-open-service-broker-client/v2/fake"
utilfeature "k8s.io/apiserver/pkg/util/feature"
// avoid error `servicecatalog/v1beta1 is not enabled`
_ "github.com/kubernetes-incubator/service-catalog/pkg/apis/servicecatalog/install"
"github.com/kubernetes-incubator/service-catalog/pkg/apis/servicecatalog/v1beta1"
"github.com/kubernetes-incubator/service-catalog/test/util"
)
// TestCreateServiceInstanceNonExistentClusterServiceClassOrPlan tests that a ServiceInstance gets
// a Failed condition when the service class or service plan it references does not exist.
func TestCreateServiceInstanceNonExistentClusterServiceClassOrPlan(t *testing.T) {
cases := []struct {
name string
classExternalName string
classExternalID string
planExternalName string
planExternalID string
classK8sName string
planK8sName string
expectedErrorReason string
}{
{
name: "existent external class and plan name",
classExternalName: testClusterServiceClassName,
planExternalName: testClusterServicePlanName,
expectedErrorReason: "",
},
{
name: "non-existent external class name",
classExternalName: "nothereclass",
planExternalName: testClusterServicePlanName,
expectedErrorReason: "ReferencesNonexistentServiceClass",
},
{
name: "non-existent external plan name",
classExternalName: testClusterServiceClassName,
planExternalName: "nothereplan",
expectedErrorReason: "ReferencesNonexistentServicePlan",
},
{
name: "non-existent external class and plan name",
classExternalName: "nothereclass",
planExternalName: "nothereplan",
expectedErrorReason: "ReferencesNonexistentServiceClass",
},
{
name: "existent external class and plan id",
classExternalID: testClassExternalID,
planExternalID: testPlanExternalID,
expectedErrorReason: "",
},
{
name: "non-existent external class id",
classExternalID: "nothereclass",
planExternalID: testPlanExternalID,
expectedErrorReason: "ReferencesNonexistentServiceClass",
},
{
name: "non-existent external plan id",
classExternalID: testClassExternalID,
planExternalID: "nothereplan",
expectedErrorReason: "ReferencesNonexistentServicePlan",
},
{
name: "non-existent external class and plan id",
classExternalID: "nothereclass",
planExternalID: "nothereplan",
expectedErrorReason: "ReferencesNonexistentServiceClass",
},
{
name: "existent k8s class and plan name",
classK8sName: testClusterServiceClassGUID,
planK8sName: testPlanExternalID,
expectedErrorReason: "",
},
{
name: "non-existent k8s class name",
classK8sName: "nothereclass",
planK8sName: testPlanExternalID,
expectedErrorReason: "ReferencesNonexistentServiceClass",
},
{
name: "non-existent k8s plan name",
classK8sName: testClusterServiceClassGUID,
planK8sName: "nothereplan",
expectedErrorReason: "ReferencesNonexistentServicePlan",
},
{
name: "non-existent k8s class and plan name",
classK8sName: "nothereclass",
planK8sName: "nothereplan",
expectedErrorReason: "ReferencesNonexistentServiceClass",
},
}
for _, tc := range cases {
tc := tc
t.Run(tc.name, func(t *testing.T) {
ct := &controllerTest{
t: t,
broker: getTestBroker(),
instance: func() *v1beta1.ServiceInstance {
i := getTestInstance()
i.Spec.PlanReference.ClusterServiceClassExternalName = tc.classExternalName
i.Spec.PlanReference.ClusterServicePlanExternalName = tc.planExternalName
i.Spec.PlanReference.ClusterServiceClassExternalID = tc.classExternalID
i.Spec.PlanReference.ClusterServicePlanExternalID = tc.planExternalID
i.Spec.PlanReference.ClusterServiceClassName = tc.classK8sName
i.Spec.PlanReference.ClusterServicePlanName = tc.planK8sName
return i
}(),
skipVerifyingInstanceSuccess: tc.expectedErrorReason != "",
}
ct.run(func(ct *controllerTest) {
status := v1beta1.ConditionTrue
if tc.expectedErrorReason != "" {
status = v1beta1.ConditionFalse
}
condition := v1beta1.ServiceInstanceCondition{
Type: v1beta1.ServiceInstanceConditionReady,
Status: status,
Reason: tc.expectedErrorReason,
}
if err := util.WaitForInstanceCondition(ct.client, testNamespace, testInstanceName, condition); err != nil {
t.Fatalf("error waiting for instance condition: %v", err)
}
})
})
}
}
// TestCreateServiceInstanceNonExistsentClusterServiceBroker tests creating a
// ServiceInstance whose broker does not exist.
func TestCreateServiceInstanceNonExistentClusterServiceBroker(t *testing.T) {
ct := &controllerTest{
t: t,
instance: getTestInstance(),
skipVerifyingInstanceSuccess: true,
preCreateInstance: func(ct *controllerTest) {
serviceClass := &v1beta1.ClusterServiceClass{
ObjectMeta: metav1.ObjectMeta{Name: testClusterServiceClassGUID},
Spec: v1beta1.ClusterServiceClassSpec{
ClusterServiceBrokerName: testClusterServiceBrokerName,
CommonServiceClassSpec: v1beta1.CommonServiceClassSpec{
ExternalID: testClusterServiceClassGUID,
ExternalName: testClusterServiceClassName,
Description: "a test service",
Bindable: true,
},
},
}
if _, err := ct.client.ClusterServiceClasses().Create(serviceClass); err != nil {
t.Fatalf("error creating ClusterServiceClass: %v", err)
}
if err := util.WaitForClusterServiceClassToExist(ct.client, testClusterServiceClassGUID); err != nil {
t.Fatalf("error waiting for ClusterServiceClass to exist: %v", err)
}
servicePlan := &v1beta1.ClusterServicePlan{
ObjectMeta: metav1.ObjectMeta{Name: testPlanExternalID},
Spec: v1beta1.ClusterServicePlanSpec{
ClusterServiceBrokerName: testClusterServiceBrokerName,
CommonServicePlanSpec: v1beta1.CommonServicePlanSpec{
ExternalID: testPlanExternalID,
ExternalName: testClusterServicePlanName,
Description: "a test plan",
},
ClusterServiceClassRef: v1beta1.ClusterObjectReference{
Name: testClusterServiceClassGUID,
},
},
}
if _, err := ct.client.ClusterServicePlans().Create(servicePlan); err != nil {
t.Fatalf("error creating ClusterServicePlan: %v", err)
}
if err := util.WaitForClusterServicePlanToExist(ct.client, testPlanExternalID); err != nil {
t.Fatalf("error waiting for ClusterServicePlan to exist: %v", err)
}
},
}
ct.run(func(ct *controllerTest) {
if err := util.WaitForInstanceCondition(ct.client, testNamespace, testInstanceName, v1beta1.ServiceInstanceCondition{
Type: v1beta1.ServiceInstanceConditionReady,
Status: v1beta1.ConditionFalse,
Reason: "ReferencesNonexistentBroker",
}); err != nil {
t.Fatalf("error waiting for instance reconciliation to fail: %v", err)
}
})
}
// TestCreateServiceInstanceWithAuthError tests creating a SerivceInstance when
// the secret containing the broker authorization info cannot be found.
func TestCreateServiceInstanceWithAuthError(t *testing.T) {
ct := &controllerTest{
t: t,
broker: func() *v1beta1.ClusterServiceBroker {
b := getTestBroker()
b.Spec.AuthInfo = &v1beta1.ClusterServiceBrokerAuthInfo{
Basic: &v1beta1.ClusterBasicAuthConfig{
SecretRef: &v1beta1.ObjectReference{
Namespace: testNamespace,
Name: "secret-name",
},
},
}
return b
}(),
instance: getTestInstance(),
skipVerifyingInstanceSuccess: true,
preCreateBroker: func(ct *controllerTest) {
prependGetSecretReaction(ct.kubeClient, "secret-name", map[string][]byte{
"username": []byte("user"),
"password": []byte("<PASSWORD>"),
})
},
preCreateInstance: func(ct *controllerTest) {
prependGetSecretNotFoundReaction(ct.kubeClient)
},
}
ct.run(func(ct *controllerTest) {
if err := util.WaitForInstanceCondition(ct.client, testNamespace, testInstanceName, v1beta1.ServiceInstanceCondition{
Type: v1beta1.ServiceInstanceConditionReady,
Status: v1beta1.ConditionFalse,
Reason: "ErrorGettingAuthCredentials",
}); err != nil {
t.Fatalf("error waiting for instance reconciliation to fail: %v", err)
}
})
}
// TestCreateServiceInstanceWithParameters tests creating a ServiceInstance
// with parameters.
func TestCreateServiceInstanceWithParameters(t *testing.T) {
type secretDef struct {
name string
data map[string][]byte
}
cases := []struct {
name string
params map[string]interface{}
paramsFrom []v1beta1.ParametersFromSource
secrets []secretDef
expectedParams map[string]interface{}
expectedError bool
}{
{
name: "no params",
expectedParams: nil,
},
{
name: "plain params",
params: map[string]interface{}{
"Name": "test-param",
"Args": map[string]interface{}{
"first": "first-arg",
"second": "second-arg",
},
},
expectedParams: map[string]interface{}{
"Name": "test-param",
"Args": map[string]interface{}{
"first": "first-arg",
"second": "second-arg",
},
},
},
{
name: "secret params",
paramsFrom: []v1beta1.ParametersFromSource{
{
SecretKeyRef: &v1beta1.SecretKeyReference{
Name: "secret-name",
Key: "secret-key",
},
},
},
secrets: []secretDef{
{
name: "secret-name",
data: map[string][]byte{
"secret-key": []byte(`{"A":"B","<KEY>"}}`),
},
},
},
expectedParams: map[string]interface{}{
"A": "B",
"C": map[string]interface{}{
"D": "E",
"F": "G",
},
},
},
{
name: "plain and secret params",
params: map[string]interface{}{
"Name": "test-param",
"Args": map[string]interface{}{
"first": "first-arg",
"second": "second-arg",
},
},
paramsFrom: []v1beta1.ParametersFromSource{
{
SecretKeyRef: &v1beta1.SecretKeyReference{
Name: "secret-name",
Key: "secret-key",
},
},
},
secrets: []secretDef{
{
name: "secret-name",
data: map[string][]byte{
"secret-key": []byte(`{"A":"B","C":{"D":"E","F":"G"}}`),
},
},
},
expectedParams: map[string]interface{}{
"Name": "test-param",
"Args": map[string]interface{}{
"first": "first-arg",
"second": "second-arg",
},
"A": "B",
"C": map[string]interface{}{
"D": "E",
"F": "G",
},
},
},
{
name: "missing secret",
paramsFrom: []v1beta1.ParametersFromSource{
{
SecretKeyRef: &v1beta1.SecretKeyReference{
Name: "secret-name",
Key: "secret-key",
},
},
},
expectedError: true,
},
{
name: "missing secret key",
paramsFrom: []v1beta1.ParametersFromSource{
{
SecretKeyRef: &v1beta1.SecretKeyReference{
Name: "secret-name",
Key: "other-secret-key",
},
},
},
secrets: []secretDef{
{
name: "secret-name",
data: map[string][]byte{
"secret-key": []byte(`bad`),
},
},
},
expectedError: true,
},
{
name: "empty secret data",
paramsFrom: []v1beta1.ParametersFromSource{
{
SecretKeyRef: &v1beta1.SecretKeyReference{
Name: "secret-name",
Key: "secret-key",
},
},
},
secrets: []secretDef{
{
name: "secret-name",
data: map[string][]byte{},
},
},
expectedError: true,
},
{
name: "bad secret data",
paramsFrom: []v1beta1.ParametersFromSource{
{
SecretKeyRef: &v1beta1.SecretKeyReference{
Name: "secret-name",
Key: "secret-key",
},
},
},
secrets: []secretDef{
{
name: "secret-name",
data: map[string][]byte{
"secret-key": []byte(`bad`),
},
},
},
expectedError: true,
},
{
name: "no params in secret data",
paramsFrom: []v1beta1.ParametersFromSource{
{
SecretKeyRef: &v1beta1.SecretKeyReference{
Name: "secret-name",
Key: "secret-key",
},
},
},
secrets: []secretDef{
{
name: "secret-name",
data: map[string][]byte{
"secret-key": []byte(`{}`),
},
},
},
expectedParams: nil,
},
}
for _, tc := range cases {
tc := tc
t.Run(tc.name, func(t *testing.T) {
//t.Parallel()
ct := &controllerTest{
t: t,
broker: getTestBroker(),
instance: func() *v1beta1.ServiceInstance {
i := getTestInstance()
if tc.params != nil {
i.Spec.Parameters = convertParametersIntoRawExtension(t, tc.params)
}
i.Spec.ParametersFrom = tc.paramsFrom
return i
}(),
skipVerifyingInstanceSuccess: tc.expectedError,
setup: func(ct *controllerTest) {
for _, secret := range tc.secrets {
prependGetSecretReaction(ct.kubeClient, secret.name, secret.data)
}
},
}
ct.run(func(ct *controllerTest) {
if tc.expectedError {
if err := util.WaitForInstanceCondition(ct.client, testNamespace, testInstanceName, v1beta1.ServiceInstanceCondition{
Type: v1beta1.ServiceInstanceConditionReady,
Status: v1beta1.ConditionFalse,
Reason: "ErrorWithParameters",
}); err != nil {
t.Fatalf("error waiting for instance reconciliation to fail: %v", err)
}
} else {
brokerAction := getLastBrokerAction(t, ct.osbClient, fakeosb.ProvisionInstance)
if e, a := tc.expectedParams, brokerAction.Request.(*osb.ProvisionRequest).Parameters; !reflect.DeepEqual(e, a) {
t.Fatalf("unexpected diff in provision parameters: expected %v, got %v", e, a)
}
}
})
})
}
}
// TestUpdateServiceInstanceChangePlans tests changing plans for an existing
// ServiceInstance.
func TestUpdateServiceInstanceChangePlans(t *testing.T) {
otherPlanName := "otherplanname"
otherPlanID := "other-plan-id"
cases := []struct {
name string
useExternalNames bool
dynamicUpdateInstanceReaction fakeosb.DynamicUpdateInstanceReaction
asyncUpdateInstanceReaction *fakeosb.UpdateInstanceReaction
}{
{
name: "external",
useExternalNames: true,
},
{
name: "k8s",
useExternalNames: false,
},
{
name: "external name with two update call failures",
useExternalNames: true,
dynamicUpdateInstanceReaction: fakeosb.DynamicUpdateInstanceReaction(
getUpdateInstanceResponseByPollCountReactions(2, []fakeosb.UpdateInstanceReaction{
fakeosb.UpdateInstanceReaction{
Error: errors.New("fake update error"),
},
fakeosb.UpdateInstanceReaction{
Response: &osb.UpdateInstanceResponse{},
},
})),
},
{
name: "external name with two update failures",
useExternalNames: true,
dynamicUpdateInstanceReaction: fakeosb.DynamicUpdateInstanceReaction(
getUpdateInstanceResponseByPollCountReactions(2, []fakeosb.UpdateInstanceReaction{
fakeosb.UpdateInstanceReaction{
Error: osb.HTTPStatusCodeError{
StatusCode: http.StatusConflict,
ErrorMessage: strPtr("OutOfQuota"),
Description: strPtr("You're out of quota!"),
},
},
fakeosb.UpdateInstanceReaction{
Response: &osb.UpdateInstanceResponse{},
},
})),
},
{
name: "external name update response async",
useExternalNames: true,
asyncUpdateInstanceReaction: &fakeosb.UpdateInstanceReaction{
Response: &osb.UpdateInstanceResponse{
Async: true,
},
},
},
}
for _, tc := range cases {
tc := tc
t.Run(tc.name, func(t *testing.T) {
//t.Parallel()
ct := &controllerTest{
t: t,
broker: getTestBroker(),
instance: func() *v1beta1.ServiceInstance {
i := getTestInstance()
if !tc.useExternalNames {
i.Spec.ClusterServiceClassExternalName = ""
i.Spec.ClusterServicePlanExternalName = ""
i.Spec.ClusterServiceClassName = testClusterServiceClassGUID
i.Spec.ClusterServicePlanName = testPlanExternalID
}
return i
}(),
setup: func(ct *controllerTest) {
if tc.dynamicUpdateInstanceReaction != nil {
ct.osbClient.UpdateInstanceReaction = tc.dynamicUpdateInstanceReaction
} else if tc.asyncUpdateInstanceReaction != nil {
ct.osbClient.UpdateInstanceReaction = tc.asyncUpdateInstanceReaction
}
catalogResponse := ct.osbClient.CatalogReaction.(*fakeosb.CatalogReaction).Response
catalogResponse.Services[0].PlanUpdatable = truePtr()
catalogResponse.Services[0].Plans = append(catalogResponse.Services[0].Plans, osb.Plan{
Name: otherPlanName,
Free: truePtr(),
ID: otherPlanID,
Description: "another test plan",
})
},
}
ct.run(func(ct *controllerTest) {
if tc.useExternalNames {
ct.instance.Spec.ClusterServicePlanExternalName = otherPlanName
} else {
ct.instance.Spec.ClusterServicePlanName = otherPlanID
}
updatedInstance, err := ct.client.ServiceInstances(testNamespace).Update(ct.instance)
if err != nil {
t.Fatalf("error updating Instance: %v", err)
}
ct.instance = updatedInstance
if err := util.WaitForInstanceProcessedGeneration(ct.client, testNamespace, testInstanceName, ct.instance.Generation); err != nil {
t.Fatalf("error waiting for instance to reconcile: %v", err)
}
if tc.asyncUpdateInstanceReaction != nil {
// action sequence: GetCatalog, ProvisionInstance, UpdateInstance, PollLastOperation
brokerAction := getLastBrokerAction(t, ct.osbClient, fakeosb.PollLastOperation)
request := brokerAction.Request.(*osb.LastOperationRequest)
if request.PlanID == nil {
t.Fatalf("plan ID not sent in update instance request: request = %+v", request)
}
if e, a := otherPlanID, *request.PlanID; e != a {
t.Fatalf("unexpected plan ID: expected %s, got %s", e, a)
}
} else {
brokerAction := getLastBrokerAction(t, ct.osbClient, fakeosb.UpdateInstance)
request := brokerAction.Request.(*osb.UpdateInstanceRequest)
if request.PlanID == nil {
t.Fatalf("plan ID not sent in update instance request: request = %+v", request)
}
if e, a := otherPlanID, *request.PlanID; e != a {
t.Fatalf("unexpected plan ID: expected %s, got %s", e, a)
}
}
})
})
}
}
// TestUpdateServiceInstanceChangePlansToNonexistentPlan tests changing plans
// to a non-existent plan.
func TestUpdateServiceInstanceChangePlansToNonexistentPlan(t *testing.T) {
cases := []struct {
name string
useExternalNames bool
}{
{
name: "external",
useExternalNames: true,
},
{
name: "k8s",
useExternalNames: false,
},
}
for _, tc := range cases {
t.Run(tc.name, func(t *testing.T) {
ct := &controllerTest{
t: t,
broker: getTestBroker(),
instance: func() *v1beta1.ServiceInstance {
i := getTestInstance()
if !tc.useExternalNames {
i.Spec.ClusterServiceClassExternalName = ""
i.Spec.ClusterServicePlanExternalName = ""
i.Spec.ClusterServiceClassName = testClusterServiceClassGUID
i.Spec.ClusterServicePlanName = testPlanExternalID
}
return i
}(),
setup: func(ct *controllerTest) {
ct.osbClient.CatalogReaction.(*fakeosb.CatalogReaction).Response.Services[0].PlanUpdatable = truePtr()
},
}
ct.run(func(ct *controllerTest) {
if tc.useExternalNames {
ct.instance.Spec.ClusterServicePlanExternalName = "other-plan-name"
} else {
ct.instance.Spec.ClusterServicePlanName = "other-plan-id"
}
if _, err := ct.client.ServiceInstances(testNamespace).Update(ct.instance); err != nil {
t.Fatalf("error updating Instance: %v", err)
}
if err := util.WaitForInstanceCondition(ct.client, testNamespace, testInstanceName, v1beta1.ServiceInstanceCondition{
Type: v1beta1.ServiceInstanceConditionReady,
Status: v1beta1.ConditionFalse,
Reason: "ReferencesNonexistentServicePlan",
}); err != nil {
t.Fatalf("error waiting for instance reconciliation to fail: %v", err)
}
})
})
}
}
// TestUpdateServiceInstanceUpdateParameters tests updating the parameters
// of an existing ServiceInstance.
func TestUpdateServiceInstanceUpdateParameters(t *testing.T) {
cases := []struct {
name string
createdWithParams bool
createdWithParamsFromSecret bool
updateParams bool
updateParamsFromSecret bool
updateSecret bool
deleteParams bool
deleteParamsFromSecret bool
}{
{
name: "add param",
createdWithParams: false,
updateParams: true,
},
{
name: "update param",
createdWithParams: true,
updateParams: true,
},
{
name: "delete param",
createdWithParams: true,
deleteParams: true,
},
{
name: "add param with secret",
createdWithParams: false,
createdWithParamsFromSecret: true,
updateParams: true,
},
{
name: "update param with secret",
createdWithParams: true,
createdWithParamsFromSecret: true,
updateParams: true,
},
{
name: "delete param with secret",
createdWithParams: true,
createdWithParamsFromSecret: true,
deleteParams: true,
},
{
name: "add secret param",
createdWithParamsFromSecret: false,
updateParamsFromSecret: true,
},
{
name: "update secret param",
createdWithParamsFromSecret: true,
updateParamsFromSecret: true,
},
{
name: "delete secret param",
createdWithParamsFromSecret: true,
deleteParamsFromSecret: true,
},
{
name: "add secret param with plain param",
createdWithParams: true,
createdWithParamsFromSecret: false,
updateParamsFromSecret: true,
},
{
name: "update secret param with plain param",
createdWithParams: true,
createdWithParamsFromSecret: true,
updateParamsFromSecret: true,
},
{
name: "delete secret param with plain param",
createdWithParams: true,
createdWithParamsFromSecret: true,
deleteParamsFromSecret: true,
},
{
name: "update secret",
createdWithParamsFromSecret: true,
updateSecret: true,
},
{
name: "update secret with plain param",
createdWithParams: true,
createdWithParamsFromSecret: true,
updateSecret: true,
},
{
name: "add plain and secret param",
createdWithParams: false,
createdWithParamsFromSecret: false,
updateParams: true,
updateParamsFromSecret: true,
},
{
name: "update plain and secret param",
createdWithParams: true,
createdWithParamsFromSecret: true,
updateParams: true,
updateParamsFromSecret: true,
},
{
name: "delete plain and secret param",
createdWithParams: true,
createdWithParamsFromSecret: true,
deleteParams: true,
deleteParamsFromSecret: true,
},
}
for _, tc := range cases {
tc := tc
t.Run(tc.name, func(t *testing.T) {
//t.Parallel()
ct := &controllerTest{
t: t,
broker: getTestBroker(),
instance: func() *v1beta1.ServiceInstance {
i := getTestInstance()
if tc.createdWithParams {
i.Spec.Parameters = convertParametersIntoRawExtension(t,
map[string]interface{}{
"param-key": "param-value",
})
}
if tc.createdWithParamsFromSecret {
i.Spec.ParametersFrom = []v1beta1.ParametersFromSource{
{
SecretKeyRef: &v1beta1.SecretKeyReference{
Name: "secret-name",
Key: "secret-key",
},
},
}
}
return i
}(),
setup: func(ct *controllerTest) {
prependGetSecretReaction(ct.kubeClient, "secret-name", map[string][]byte{
"secret-key": []byte(`{"secret-param-key":"secret-param-value"}`),
})
prependGetSecretReaction(ct.kubeClient, "other-secret-name", map[string][]byte{
"other-secret-key": []byte(`{"other-secret-param-key":"other-secret-param-value"}`),
})
},
}
ct.run(func(ct *controllerTest) {
if tc.updateParams {
ct.instance.Spec.Parameters = convertParametersIntoRawExtension(t,
map[string]interface{}{
"param-key": "new-param-value",
})
} else if tc.deleteParams {
ct.instance.Spec.Parameters = nil
}
if tc.updateParamsFromSecret {
ct.instance.Spec.ParametersFrom = []v1beta1.ParametersFromSource{
{
SecretKeyRef: &v1beta1.SecretKeyReference{
Name: "other-secret-name",
Key: "other-secret-key",
},
},
}
} else if tc.deleteParamsFromSecret {
ct.instance.Spec.ParametersFrom = nil
}
if tc.updateSecret {
ct.kubeClient.Lock()
prependGetSecretReaction(ct.kubeClient, "secret-name", map[string][]byte{
"secret-key": []byte(`{"new-secret-param-key":"new-secret-param-value"}`),
})
ct.kubeClient.Unlock()
ct.instance.Spec.UpdateRequests++
}
updatedInstance, err := ct.client.ServiceInstances(testNamespace).Update(ct.instance)
if err != nil {
t.Fatalf("error updating Instance: %v", err)
}
ct.instance = updatedInstance
if err := util.WaitForInstanceProcessedGeneration(ct.client, testNamespace, testInstanceName, ct.instance.Generation); err != nil {
t.Fatalf("error waiting for instance to reconcile: %v", err)
}
expectedParameters := make(map[string]interface{})
if tc.updateParams {
expectedParameters["param-key"] = "new-param-value"
} else if tc.createdWithParams && !tc.deleteParams {
expectedParameters["param-key"] = "param-value"
}
if tc.updateParamsFromSecret {
expectedParameters["other-secret-param-key"] = "other-secret-param-value"
} else if tc.updateSecret {
expectedParameters["new-secret-param-key"] = "new-secret-param-value"
} else if tc.createdWithParamsFromSecret && !tc.deleteParamsFromSecret {
expectedParameters["secret-param-key"] = "secret-param-value"
}
brokerAction := getLastBrokerAction(t, ct.osbClient, fakeosb.UpdateInstance)
request := brokerAction.Request.(*osb.UpdateInstanceRequest)
if e, a := expectedParameters, request.Parameters; !reflect.DeepEqual(e, a) {
t.Fatalf("unexpected parameters: expected %v, got %v", e, a)
}
})
})
}
}
// TestCreateServiceInstanceWithInvalidParameters tests creating a ServiceInstance
// with invalid parameters.
func TestCreateServiceInstanceWithInvalidParameters(t *testing.T) {
ct := &controllerTest{
t: t,
broker: getTestBroker(),
}
ct.run(func(ct *controllerTest) {
instance := getTestInstance()
instance.Spec.Parameters = convertParametersIntoRawExtension(t,
map[string]interface{}{
"Name": "test-param",
"Args": map[string]interface{}{
"first": "first-arg",
"second": "second-arg",
},
})
instance.Spec.Parameters.Raw[0] = 0x21
if _, err := ct.client.ServiceInstances(instance.Namespace).Create(instance); err == nil {
t.Fatalf("expected instance to fail to be created due to invalid parameters")
}
})
}
// TimeoutError is an error sent back in a url.Error from the broker when
// the request has timed out at the network layer.
type TimeoutError string
// Timeout returns true since TimeoutError indicates that there was a timeout.
// This method is so that TimeoutError implements the url.timeout interface.
func (e TimeoutError) Timeout() bool {
return true
}
// Error returns the TimeoutError as a string
func (e TimeoutError) Error() string {
return string(e)
}
// TestCreateServiceInstanceWithProvisionFailure tests creating a ServiceInstance
// with various failure results in response to the provision request.
func TestCreateServiceInstanceWithProvisionFailure(t *testing.T) {
cases := []struct {
name string
statusCode int
nonHTTPResponseError error
conditionReason string
expectFailCondition bool
triggersOrphanMitigation bool
}{
{
name: "Status OK",
statusCode: http.StatusOK,
conditionReason: "ProvisionedSuccessfully",
expectFailCondition: false,
},
{
name: "Status Created",
statusCode: http.StatusCreated,
conditionReason: "ProvisionedSuccessfully",
expectFailCondition: false,
triggersOrphanMitigation: true,
},
{
name: "other 2xx",
statusCode: http.StatusNoContent,
conditionReason: "ProvisionedSuccessfully",
expectFailCondition: false,
triggersOrphanMitigation: true,
},
{
name: "3XX",
statusCode: 300,
conditionReason: "ProvisionedSuccessfully",
expectFailCondition: false,
},
{
name: "Status Request Timeout",
statusCode: http.StatusRequestTimeout,
conditionReason: "ProvisionedSuccessfully",
expectFailCondition: false,
triggersOrphanMitigation: false,
},
{
name: "400",
statusCode: 400,
conditionReason: "ClusterServiceBrokerReturnedFailure",
expectFailCondition: true,
},
{
name: "other 4XX",
statusCode: 403,
conditionReason: "ProvisionedSuccessfully",
expectFailCondition: false,
},
{
name: "5XX",
statusCode: 500,
conditionReason: "ProvisionedSuccessfully",
expectFailCondition: false,
triggersOrphanMitigation: true,
},
{
name: "non-url transport error",
nonHTTPResponseError: fmt.Errorf("non-url error"),
conditionReason: "ProvisionedSuccessfully",
},
{
name: "non-timeout url error",
nonHTTPResponseError: &url.Error{
Op: "Put",
URL: "https://fakebroker.com/v2/service_instances/instance_id",
Err: fmt.Errorf("non-timeout error"),
},
conditionReason: "ProvisionedSuccessfully",
},
{
name: "network timeout",
nonHTTPResponseError: &url.Error{
Op: "Put",
URL: "https://fakebroker.com/v2/service_instances/instance_id",
Err: TimeoutError("timeout error"),
},
conditionReason: "ProvisionedSuccessfully",
expectFailCondition: false,
triggersOrphanMitigation: true,
},
}
for _, tc := range cases {
tc := tc
t.Run(tc.name, func(t *testing.T) {
//t.Parallel()
ct := &controllerTest{
t: t,
broker: getTestBroker(),
instance: getTestInstance(),
skipVerifyingInstanceSuccess: true,
setup: func(ct *controllerTest) {
reactionError := tc.nonHTTPResponseError
if reactionError == nil {
reactionError = osb.HTTPStatusCodeError{
StatusCode: tc.statusCode,
ErrorMessage: strPtr("error message"),
Description: strPtr("response description"),
}
}
ct.osbClient.ProvisionReaction = fakeosb.DynamicProvisionReaction(
getProvisionResponseByPollCountReactions(2, []fakeosb.ProvisionReaction{
fakeosb.ProvisionReaction{
Error: reactionError,
},
fakeosb.ProvisionReaction{
Response: &osb.ProvisionResponse{},
},
}))
ct.osbClient.DeprovisionReaction = fakeosb.DynamicDeprovisionReaction(
getDeprovisionResponseByPollCountReactions(2, []fakeosb.DeprovisionReaction{
fakeosb.DeprovisionReaction{
Error: osb.HTTPStatusCodeError{
StatusCode: 500,
ErrorMessage: strPtr("temporary deprovision error"),
},
},
fakeosb.DeprovisionReaction{
Response: &osb.DeprovisionResponse{},
},
}))
},
}
ct.run(func(ct *controllerTest) {
var condition v1beta1.ServiceInstanceCondition
if tc.expectFailCondition {
// Instance should get stuck in a Failed condition
condition = v1beta1.ServiceInstanceCondition{
Type: v1beta1.ServiceInstanceConditionFailed,
Status: v1beta1.ConditionTrue,
Reason: tc.conditionReason,
}
} else {
// Instance provisioning should be retried and succeed
condition = v1beta1.ServiceInstanceCondition{
Type: v1beta1.ServiceInstanceConditionReady,
Status: v1beta1.ConditionTrue,
Reason: tc.conditionReason,
}
}
if err := util.WaitForInstanceCondition(ct.client, testNamespace, testInstanceName, condition); err != nil {
t.Fatalf("error waiting for instance condition: %v", err)
}
if tc.expectFailCondition {
if err := util.WaitForInstanceProcessedGeneration(ct.client, ct.instance.Namespace, ct.instance.Name, 1); err != nil {
t.Fatalf("error waiting for instance reconciliation to complete: %v", err)
}
}
brokerActions := ct.osbClient.Actions()
fmt.Printf("%#v", brokerActions)
// Ensure that we meet expectations on deprovision requests for orphan mitigation
deprovisionActions := findBrokerActions(t, ct.osbClient, fakeosb.DeprovisionInstance)
if tc.triggersOrphanMitigation {
if len(deprovisionActions) == 0 {
t.Fatal("expected orphan mitigation deprovision request to occur")
}
} else {
if len(deprovisionActions) != 0 {
t.Fatal("unexpected deprovision requests")
}
}
// All instances should eventually succeed
getLastBrokerAction(t, ct.osbClient, fakeosb.ProvisionInstance)
})
})
}
}
func TestCreateServiceInstanceFailsWithNonexistentPlan(t *testing.T) {
ct := &controllerTest{
t: t,
broker: getTestBroker(),
instance: getTestInstance(),
skipVerifyingInstanceSuccess: true,
preCreateInstance: func(ct *controllerTest) {
otherPlanName := "otherplanname"
otherPlanID := "other-plan-id"
catalogResponse := ct.osbClient.CatalogReaction.(*fakeosb.CatalogReaction).Response
catalogResponse.Services[0].PlanUpdatable = truePtr()
catalogResponse.Services[0].Plans = []osb.Plan{
{
Name: otherPlanName,
Free: truePtr(),
ID: otherPlanID,
Description: "another test plan",
},
}
ct.broker.Spec.RelistRequests++
if _, err := ct.client.ClusterServiceBrokers().Update(ct.broker); err != nil {
t.Fatalf("error updating Broker: %v", err)
}
if err := util.WaitForClusterServicePlanToExist(ct.client, otherPlanID); err != nil {
t.Fatalf("error waiting for ClusterServiceClass to exist: %v", err)
}
if err := util.WaitForClusterServicePlanToNotExist(ct.client, testPlanExternalID); err != nil {
t.Fatalf("error waiting for ClusterServiceClass to not exist: %v", err)
}
},
}
ct.run(func(ct *controllerTest) {
condition := v1beta1.ServiceInstanceCondition{
Type: v1beta1.ServiceInstanceConditionReady,
Status: v1beta1.ConditionFalse,
Reason: "ReferencesNonexistentServicePlan",
}
if err := util.WaitForInstanceCondition(ct.client, testNamespace, testInstanceName, condition); err != nil {
t.Fatalf("error waiting for instance condition: %v", err)
}
})
}
func TestCreateServiceInstanceAsynchronous(t *testing.T) {
dashURL := testDashboardURL
key := osb.OperationKey(testOperation)
cases := []struct {
name string
osbResponse *osb.ProvisionResponse
}{
{
name: "asynchronous provision with operation key",
osbResponse: &osb.ProvisionResponse{
Async: true,
DashboardURL: &dashURL,
OperationKey: &key,
},
},
{
name: "asynchronous provision without operation key",
osbResponse: &osb.ProvisionResponse{
Async: true,
DashboardURL: &dashURL,
},
},
}
for _, tc := range cases {
t.Run(tc.name, func(t *testing.T) {
ct := &controllerTest{
t: t,
broker: getTestBroker(),
instance: getTestInstance(),
setup: func(ct *controllerTest) {
ct.osbClient.ProvisionReaction = &fakeosb.ProvisionReaction{
Response: tc.osbResponse,
}
},
}
ct.run(func(ct *controllerTest) {
// the action sequence is GetCatalog, ProvisionInstance, PollLastOperation
osbActions := ct.osbClient.Actions()
if tc.osbResponse.OperationKey != nil {
lastOpRequest := osbActions[len(osbActions)-1].Request.(*osb.LastOperationRequest)
if lastOpRequest.OperationKey == nil {
t.Fatal("OperationKey should not be nil")
} else if e, a := key, *(osbActions[len(osbActions)-1].Request.(*osb.LastOperationRequest).OperationKey); e != a {
t.Fatalf("unexpected OperationKey: expected %v, got %v", e, a)
}
} else {
if a := osbActions[len(osbActions)-1].Request.(*osb.LastOperationRequest).OperationKey; a != nil {
t.Fatalf("unexpected OperationKey: expected nil, got %v", a)
}
}
condition := v1beta1.ServiceInstanceCondition{
Type: v1beta1.ServiceInstanceConditionReady,
Status: v1beta1.ConditionTrue,
Reason: "ProvisionedSuccessfully",
}
if err := util.WaitForInstanceCondition(ct.client, testNamespace, testInstanceName, condition); err != nil {
t.Fatalf("error waiting for instance condition: %v", err)
}
})
})
}
}
func TestDeleteServiceInstance(t *testing.T) {
key := osb.OperationKey(testOperation)
cases := []struct {
name string
skipVerifyingInstanceSuccess bool
binding *v1beta1.ServiceBinding
setup func(*controllerTest)
testFunction func(t *controllerTest)
}{
{
name: "synchronous deprovision",
binding: getTestBinding(),
setup: func(ct *controllerTest) {
ct.osbClient.DeprovisionReaction = &fakeosb.DeprovisionReaction{
Response: &osb.DeprovisionResponse{},
}
},
},
{
name: "synchronous deprovision, no binding",
setup: func(ct *controllerTest) {
ct.osbClient.DeprovisionReaction = &fakeosb.DeprovisionReaction{
Response: &osb.DeprovisionResponse{},
}
},
},
{
name: "asynchronous deprovision with operation key",
binding: getTestBinding(),
setup: func(ct *controllerTest) {
ct.osbClient.DeprovisionReaction = &fakeosb.DeprovisionReaction{
Response: &osb.DeprovisionResponse{
Async: true,
OperationKey: &key,
},
}
},
},
{
name: "asynchronous deprovision with operation key, no binding",
setup: func(ct *controllerTest) {
ct.osbClient.DeprovisionReaction = &fakeosb.DeprovisionReaction{
Response: &osb.DeprovisionResponse{
Async: true,
OperationKey: &key,
},
}
},
},
{
name: "asynchronous deprovision without operation key",
binding: getTestBinding(),
setup: func(ct *controllerTest) {
ct.osbClient.DeprovisionReaction = &fakeosb.DeprovisionReaction{
Response: &osb.DeprovisionResponse{
Async: true,
},
}
},
},
{
name: "asynchronous deprovision without operation key, no binding",
setup: func(ct *controllerTest) {
ct.osbClient.DeprovisionReaction = &fakeosb.DeprovisionReaction{
Response: &osb.DeprovisionResponse{
Async: true,
},
}
},
},
{
name: "deprovision instance with binding not deleted",
binding: getTestBinding(),
setup: func(ct *controllerTest) {
ct.osbClient.DeprovisionReaction = &fakeosb.DeprovisionReaction{
Response: &osb.DeprovisionResponse{},
}
},
testFunction: func(ct *controllerTest) {
if err := ct.client.ServiceInstances(ct.instance.Namespace).Delete(ct.instance.Name, &metav1.DeleteOptions{}); err != nil {
ct.t.Fatalf("instance delete should have been accepted: %v", err)
}
condition := v1beta1.ServiceInstanceCondition{
Type: v1beta1.ServiceInstanceConditionReady,
Status: v1beta1.ConditionFalse,
Reason: "DeprovisionBlockedByExistingCredentials",
}
if err := util.WaitForInstanceCondition(ct.client, testNamespace, testInstanceName, condition); err != nil {
ct.t.Fatalf("error waiting for instance condition: %v", err)
}
},
},
{
name: "deprovision instance after in progress provision",
skipVerifyingInstanceSuccess: true,
setup: func(ct *controllerTest) {
ct.osbClient.PollLastOperationReaction = fakeosb.DynamicPollLastOperationReaction(
getLastOperationResponseByPollCountReactions(2, []fakeosb.PollLastOperationReaction{
fakeosb.PollLastOperationReaction{
Response: &osb.LastOperationResponse{
State: osb.StateInProgress,
},
},
fakeosb.PollLastOperationReaction{
Response: &osb.LastOperationResponse{
State: osb.StateSucceeded,
},
},
}))
ct.osbClient.ProvisionReaction = &fakeosb.ProvisionReaction{
Response: &osb.ProvisionResponse{
Async: true,
},
}
ct.osbClient.DeprovisionReaction = &fakeosb.DeprovisionReaction{
Response: &osb.DeprovisionResponse{},
}
},
testFunction: func(ct *controllerTest) {
verifyCondition := v1beta1.ServiceInstanceCondition{
Type: v1beta1.ServiceInstanceConditionReady,
Status: v1beta1.ConditionTrue,
Reason: "ProvisionedSuccessfully",
}
if err := util.WaitForInstanceCondition(ct.client, testNamespace, testInstanceName, verifyCondition); err != nil {
t.Fatalf("error waiting for instance condition: %v", err)
}
},
},
}
for _, tc := range cases {
tc := tc
t.Run(tc.name, func(t *testing.T) {
//t.Parallel()
ct := &controllerTest{
t: t,
broker: getTestBroker(),
binding: tc.binding,
instance: getTestInstance(),
skipVerifyingInstanceSuccess: tc.skipVerifyingInstanceSuccess,
setup: tc.setup,
}
ct.run(tc.testFunction)
})
}
}
func TestPollServiceInstanceLastOperationSuccess(t *testing.T) {
cases := []struct {
name string
setup func(t *controllerTest)
skipVerifyingInstanceSuccess bool
verifyCondition *v1beta1.ServiceInstanceCondition
preDeleteBroker func(t *controllerTest)
preCreateInstance func(t *controllerTest)
postCreateInstance func(t *controllerTest)
}{
{
name: "async provisioning with last operation response state in progress",
setup: func(ct *controllerTest) {
ct.osbClient.ProvisionReaction = &fakeosb.ProvisionReaction{
Response: &osb.ProvisionResponse{
Async: true,
},
}
ct.osbClient.PollLastOperationReaction = fakeosb.DynamicPollLastOperationReaction(
getLastOperationResponseByPollCountStates(2, []osb.LastOperationState{osb.StateInProgress, osb.StateSucceeded}))
},
skipVerifyingInstanceSuccess: true,
verifyCondition: &v1beta1.ServiceInstanceCondition{
Type: v1beta1.ServiceInstanceConditionReady,
Status: v1beta1.ConditionTrue,
Reason: "ProvisionedSuccessfully",
},
},
{
name: "async provisioning with last operation response state succeeded",
setup: func(ct *controllerTest) {
ct.osbClient.ProvisionReaction = &fakeosb.ProvisionReaction{
Response: &osb.ProvisionResponse{
Async: true,
},
}
ct.osbClient.PollLastOperationReaction = &fakeosb.PollLastOperationReaction{
Response: &osb.LastOperationResponse{
State: osb.StateSucceeded,
Description: strPtr("testDescription"),
},
}
},
verifyCondition: &v1beta1.ServiceInstanceCondition{
Type: v1beta1.ServiceInstanceConditionReady,
Status: v1beta1.ConditionTrue,
Reason: "ProvisionedSuccessfully",
},
},
// response errors
{
name: "async provisioning with error on first poll",
setup: func(ct *controllerTest) {
ct.osbClient.ProvisionReaction = &fakeosb.ProvisionReaction{
Response: &osb.ProvisionResponse{
Async: true,
},
}
ct.osbClient.PollLastOperationReaction = fakeosb.DynamicPollLastOperationReaction(
getLastOperationResponseByPollCountReactions(2, []fakeosb.PollLastOperationReaction{
fakeosb.PollLastOperationReaction{
Error: errors.New("some error"),
},
fakeosb.PollLastOperationReaction{
Response: &osb.LastOperationResponse{
State: osb.StateSucceeded,
},
},
}))
},
skipVerifyingInstanceSuccess: true,
verifyCondition: &v1beta1.ServiceInstanceCondition{
Type: v1beta1.ServiceInstanceConditionReady,
Status: v1beta1.ConditionTrue,
Reason: "ProvisionedSuccessfully",
},
},
{
name: "async provisioning with error on second poll",
setup: func(ct *controllerTest) {
ct.osbClient.ProvisionReaction = &fakeosb.ProvisionReaction{
Response: &osb.ProvisionResponse{
Async: true,
},
}
ct.osbClient.PollLastOperationReaction = fakeosb.DynamicPollLastOperationReaction(
getLastOperationResponseByPollCountReactions(1, []fakeosb.PollLastOperationReaction{
fakeosb.PollLastOperationReaction{
Response: &osb.LastOperationResponse{
State: osb.StateInProgress,
},
},
fakeosb.PollLastOperationReaction{
Error: errors.New("some error"),
},
fakeosb.PollLastOperationReaction{
Response: &osb.LastOperationResponse{
State: osb.StateSucceeded,
},
},
}))
},
skipVerifyingInstanceSuccess: true,
verifyCondition: &v1beta1.ServiceInstanceCondition{
Type: v1beta1.ServiceInstanceConditionReady,
Status: v1beta1.ConditionTrue,
Reason: "ProvisionedSuccessfully",
},
},
{
name: "async last operation response successful with originating identity",
setup: func(ct *controllerTest) {
if err := utilfeature.DefaultFeatureGate.Set(fmt.Sprintf("%v=true", scfeatures.OriginatingIdentity)); err != nil {
t.Fatalf("Failed to enable originating identity feature: %v", err)
}
ct.osbClient.ProvisionReaction = &fakeosb.ProvisionReaction{
Response: &osb.ProvisionResponse{
Async: true,
},
}
ct.osbClient.PollLastOperationReaction = &fakeosb.PollLastOperationReaction{
Response: &osb.LastOperationResponse{
State: osb.StateSucceeded,
Description: strPtr("testDescription"),
},
}
},
verifyCondition: &v1beta1.ServiceInstanceCondition{
Type: v1beta1.ServiceInstanceConditionReady,
Status: v1beta1.ConditionTrue,
Reason: "ProvisionedSuccessfully",
},
preCreateInstance: func(ct *controllerTest) {
catalogClient, err := changeUsernameForCatalogClient(ct.catalogClient, ct.catalogClientConfig, "instance-creator")
if err != nil {
t.Fatalf("could not change the username for the catalog client: %v", err)
}
ct.catalogClient = catalogClient
ct.client = catalogClient.ServicecatalogV1beta1()
},
postCreateInstance: func(ct *controllerTest) {
verifyUsernameInLastBrokerAction(ct.t, ct.osbClient, fakeosb.PollLastOperation, "instance-creator")
},
},
}
for _, tc := range cases {
tc := tc
t.Run(tc.name, func(t *testing.T) {
//t.Parallel()
ct := &controllerTest{
t: t,
broker: getTestBroker(),
instance: getTestInstance(),
skipVerifyingInstanceSuccess: tc.skipVerifyingInstanceSuccess,
setup: tc.setup,
preDeleteBroker: tc.preDeleteBroker,
preCreateInstance: tc.preCreateInstance,
postCreateInstance: tc.postCreateInstance,
}
ct.run(func(ct *controllerTest) {
if tc.verifyCondition != nil {
if err := util.WaitForInstanceCondition(ct.client, testNamespace, testInstanceName, *tc.verifyCondition); err != nil {
t.Fatalf("error waiting for instance condition: %v", err)
}
}
})
})
}
}
// TestPollServiceInstanceLastOperationFailure checks that async operation is correctly
// retried after the initial operation fails
func TestPollServiceInstanceLastOperationFailure(t *testing.T) {
cases := []struct {
name string
setup func(t *controllerTest)
skipVerifyingInstanceSuccess bool
failureCondition *v1beta1.ServiceInstanceCondition
successCondition *v1beta1.ServiceInstanceCondition
}{
{
name: "async provisioning with last operation response state failed",
setup: func(ct *controllerTest) {
ct.osbClient.ProvisionReaction = &fakeosb.ProvisionReaction{
Response: &osb.ProvisionResponse{
Async: true,
},
}
ct.osbClient.PollLastOperationReaction = fakeosb.DynamicPollLastOperationReaction(
getLastOperationResponseByPollCountStates(2,
[]osb.LastOperationState{
osb.StateFailed,
osb.StateSucceeded,
}))
},
skipVerifyingInstanceSuccess: false,
failureCondition: &v1beta1.ServiceInstanceCondition{
Type: v1beta1.ServiceInstanceConditionReady,
Status: v1beta1.ConditionFalse,
Reason: "ProvisionCallFailed",
},
successCondition: &v1beta1.ServiceInstanceCondition{
Type: v1beta1.ServiceInstanceConditionReady,
Status: v1beta1.ConditionTrue,
Reason: "ProvisionedSuccessfully",
},
},
// response errors
{
name: "async provisioning with last operation response state failed eventually",
setup: func(ct *controllerTest) {
ct.osbClient.ProvisionReaction = &fakeosb.ProvisionReaction{
Response: &osb.ProvisionResponse{
Async: true,
},
}
ct.osbClient.PollLastOperationReaction = fakeosb.DynamicPollLastOperationReaction(
getLastOperationResponseByPollCountStates(1,
[]osb.LastOperationState{
osb.StateInProgress,
osb.StateFailed,
osb.StateInProgress,
osb.StateSucceeded,
}))
},
skipVerifyingInstanceSuccess: false,
failureCondition: &v1beta1.ServiceInstanceCondition{
Type: v1beta1.ServiceInstanceConditionReady,
Status: v1beta1.ConditionFalse,
Reason: "ProvisionCallFailed",
},
successCondition: &v1beta1.ServiceInstanceCondition{
Type: v1beta1.ServiceInstanceConditionReady,
Status: v1beta1.ConditionTrue,
Reason: "ProvisionedSuccessfully",
},
},
}
for _, tc := range cases {
tc := tc
t.Run(tc.name, func(t *testing.T) {
//t.Parallel()
ct := &controllerTest{
t: t,
broker: getTestBroker(),
instance: getTestInstance(),
skipVerifyingInstanceSuccess: tc.skipVerifyingInstanceSuccess,
setup: tc.setup,
}
ct.run(func(ct *controllerTest) {
if err := util.WaitForInstanceCondition(ct.client, testNamespace, testInstanceName, *tc.successCondition); err != nil {
t.Fatalf("error waiting for instance condition: %v", err)
}
})
})
}
}
// TestRetryAsyncDeprovision tests whether asynchronous deprovisioning retries
// by attempting a new deprovision after failing.
func TestRetryAsyncDeprovision(t *testing.T) {
hasPollFailed := false
ct := &controllerTest{
t: t,
broker: getTestBroker(),
instance: getTestInstance(),
setup: func(ct *controllerTest) {
ct.osbClient.DeprovisionReaction = fakeosb.DynamicDeprovisionReaction(
func(_ *osb.DeprovisionRequest) (*osb.DeprovisionResponse, error) {
response := &osb.DeprovisionResponse{Async: true}
if hasPollFailed {
response.Async = false
}
return response, nil
})
ct.osbClient.PollLastOperationReaction = fakeosb.DynamicPollLastOperationReaction(
func(_ *osb.LastOperationRequest) (*osb.LastOperationResponse, error) {
hasPollFailed = true
return &osb.LastOperationResponse{
State: osb.StateFailed,
}, nil
})
},
}
ct.run(func(_ *controllerTest) {})
}
|
#!/usr/bin/env bash
##
## Package uploader for Bintray.
##
## Copyright 2019 Leonid Plyushch <leonid.plyushch@gmail.com>
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
##
set -o errexit
set -o nounset
TERMUX_PACKAGES_BASEDIR=$(realpath "$(dirname "$0")/../")
# Verify that script is correctly installed to Termux repository.
if [ ! -d "$TERMUX_PACKAGES_BASEDIR/packages" ]; then
echo "[!] Cannot find directory 'packages'."
exit 1
fi
# Check dependencies.
if [ -z "$(command -v curl)" ]; then
echo "[!] Package 'curl' is not installed."
exit 1
fi
if [ -z "$(command -v find)" ]; then
echo "[!] Package 'findutils' is not installed."
exit 1
fi
if [ -z "$(command -v grep)" ]; then
echo "[!] Package 'grep' is not installed."
exit 1
fi
if [ -z "$(command -v jq)" ]; then
echo "[!] Package 'jq' is not installed."
exit 1
fi
###################################################################
# In this variable a package metadata will be stored.
declare -gA PACKAGE_METADATA
# Initialize default configuration.
DEBFILES_DIR_PATH="$TERMUX_PACKAGES_BASEDIR/debs"
METADATA_GEN_MODE=false
PACKAGE_CLEANUP_MODE=false
PACKAGE_DELETION_MODE=false
SCRIPT_EMERG_EXIT=false
# Special variable to force script to exit with error status
# when everything finished. Should be set only when non-script
# errors occur, e.g. curl request failure.
#
# Useful in case if there was an error when uploading packages
# via CI/CD so packages are still uploaded where possible but
# maintainers will be notified about error because pipeline
# will be marked as "failed".
SCRIPT_ERROR_EXIT=false
# Bintray-specific configuration.
BINTRAY_REPO_NAME="unstable-packages"
BINTRAY_REPO_GITHUB="termux/unstable-packages"
BINTRAY_REPO_DISTRIBUTION="unstable"
BINTRAY_REPO_COMPONENT="main"
# Bintray credentials that should be set as external environment
# variables by user.
: "${BINTRAY_USERNAME:=""}"
: "${BINTRAY_API_KEY:=""}"
: "${BINTRAY_GPG_SUBJECT:=""}"
: "${BINTRAY_GPG_PASSPHRASE:=""}"
# If BINTRAY_GPG_SUBJECT is not specified, then signing will be
# done with gpg key of subject '$BINTRAY_USERNAME'.
if [ -z "$BINTRAY_GPG_SUBJECT" ]; then
BINTRAY_GPG_SUBJECT="$BINTRAY_USERNAME"
fi
# Xeffyr is the maintainer of Termux Unstable packages repository.
BINTRAY_SUBJECT="xeffyr"
###################################################################
## Print message to stderr.
## Takes same arguments as command 'echo'.
msg() {
echo "$@" >&2
}
## Blocks terminal to prevent any user input.
## Takes no arguments.
block_terminal() {
stty -echo -icanon time 0 min 0 2>/dev/null || true
stty quit undef susp undef 2>/dev/null || true
}
## Unblocks terminal blocked with block_terminal() function.
## Takes no arguments.
unblock_terminal() {
while read -r; do
true;
done
stty sane 2>/dev/null || true
}
## Process request for aborting script execution.
## Used by signal trap.
## Takes no arguments.
request_emerg_exit() {
SCRIPT_EMERG_EXIT=true
}
## Handle emergency exit requested by ctrl-c.
## Takes no arguments.
emergency_exit() {
msg
recalculate_metadata
msg "[!] Aborted by user."
unblock_terminal
exit 1
}
## Dump everything from $PACKAGE_METADATA to json structure.
## Takes no arguments.
json_metadata_dump() {
local old_ifs=$IFS
local license
local pkg_licenses=""
IFS=","
for license in ${PACKAGE_METADATA['LICENSES']}; do
pkg_licenses+="\"$(echo "$license" | sed -r 's/^\s*(\S+(\s+\S+)*)\s*$/\1/')\","
done
pkg_licenses=${pkg_licenses%%,}
IFS=$old_ifs
cat <<- EOF
{
"name": "${PACKAGE_METADATA['NAME']}",
"desc": "${PACKAGE_METADATA['DESCRIPTION']}",
"version": "${PACKAGE_METADATA['VERSION_FULL']}",
"licenses": [${pkg_licenses}],
"vcs_url": "https://github.com/${BINTRAY_REPO_GITHUB}",
"website_url": "${PACKAGE_METADATA['WEBSITE_URL']}",
"issue_tracker_url": "https://github.com/${BINTRAY_REPO_GITHUB}/issues",
"github_repo": "${BINTRAY_REPO_GITHUB}",
"public_download_numbers": "true",
"public_stats": "false"
}
EOF
}
## Request metadata recalculation and signing.
## Takes no arguments.
recalculate_metadata() {
local curl_response
local http_status_code
local api_response_message
msg -n "[@] Requesting metadata recalculation... "
curl_response=$(
curl \
--silent \
--user "${BINTRAY_USERNAME}:${BINTRAY_API_KEY}" \
--request POST \
--header "Content-Type: application/json" \
--data "{\"subject\":\"${BINTRAY_GPG_SUBJECT}\",\"passphrase\":\"$BINTRAY_GPG_PASSPHRASE\"}" \
--write-out "|%{http_code}" \
"https://api.bintray.com/calc_metadata/${BINTRAY_SUBJECT}/${BINTRAY_REPO_NAME}/"
)
http_status_code=$(echo "$curl_response" | cut -d'|' -f2)
api_response_message=$(echo "$curl_response" | cut -d'|' -f1 | jq -r .message)
if [ "$http_status_code" = "202" ]; then
msg "done"
else
msg "failure"
msg "[!] $api_response_message"
SCRIPT_ERROR_EXIT=true
fi
}
## Request deletion of the specified package.
## Takes only one argument - package name.
delete_package() {
msg -n " * ${1}: "
if $SCRIPT_EMERG_EXIT; then
emergency_exit
fi
local curl_response
curl_response=$(
curl \
--silent \
--user "${BINTRAY_USERNAME}:${BINTRAY_API_KEY}" \
--request DELETE \
--write-out "|%{http_code}" \
"https://api.bintray.com/packages/${BINTRAY_SUBJECT}/${BINTRAY_REPO_NAME}/${1}"
)
local http_status_code
http_status_code=$(
echo "$curl_response" | cut -d'|' -f2
)
local api_response_message
api_response_message=$(
echo "$curl_response" | cut -d'|' -f1 | jq -r .message
)
if [ "$http_status_code" = "200" ] || [ "$http_status_code" = "404" ]; then
msg "success"
else
msg "$api_response_message"
SCRIPT_ERROR_EXIT=true
fi
if $SCRIPT_EMERG_EXIT; then
emergency_exit
fi
}
## Leave only the latest version of specified package and remove old ones.
## Takes only one argument - package name.
delete_old_versions_from_package() {
local package_versions
local package_latest_version
local curl_response
local http_status_code
local api_response_message
if $SCRIPT_EMERG_EXIT; then
emergency_exit
fi
msg -n " * ${1}: checking latest version... "
curl_response=$(
curl \
--silent \
--user "${BINTRAY_USERNAME}:${BINTRAY_API_KEY}" \
--request GET \
--write-out "|%{http_code}" \
"https://api.bintray.com/packages/${BINTRAY_SUBJECT}/${BINTRAY_REPO_NAME}/${1}"
)
http_status_code=$(echo "$curl_response" | cut -d'|' -f2)
api_response_message=$(echo "$curl_response" | cut -d'|' -f1 | jq -r .message)
if [ "$http_status_code" = "200" ]; then
package_latest_version=$(
echo "$curl_response" | cut -d'|' -f1 | jq -r .latest_version | \
sed 's/\./\\./g'
)
package_versions=$(
echo "$curl_response" | cut -d'|' -f1 | jq -r '.versions[]' | \
grep -v "^$package_latest_version$" || true
)
else
msg "$api_response_message."
SCRIPT_ERROR_EXIT=true
return 1
fi
if $SCRIPT_EMERG_EXIT; then
emergency_exit
fi
if [ -n "$package_versions" ]; then
local old_version
for old_version in $package_versions; do
if $SCRIPT_EMERG_EXIT; then
emergency_exit
fi
msg -ne "\\r\\e[2K * ${1}: deleting '$old_version'... "
curl_response=$(
curl \
--silent \
--user "${BINTRAY_USERNAME}:${BINTRAY_API_KEY}" \
--request DELETE \
--write-out "|%{http_code}" \
"https://api.bintray.com/packages/${BINTRAY_SUBJECT}/${BINTRAY_REPO_NAME}/${1}/versions/${old_version}"
)
http_status_code=$(echo "$curl_response" | cut -d'|' -f2)
api_response_message=$(
echo "$curl_response" | cut -d'|' -f1 | jq -r .message
)
if [ "$http_status_code" != "200" ] && [ "$http_status_code" != "404" ]; then
msg "$api_response_message"
SCRIPT_ERROR_EXIT=true
return 1
fi
if $SCRIPT_EMERG_EXIT; then
emergency_exit
fi
done
fi
msg -e "\\r\\e[2K * ${1}: success"
}
## Upload the specified package. Will also create a new version entry
## if required. When upload is done within the same version, already existing
## *.deb files will not be replaced.
##
## Note that upload_package() detects right *.deb files by using naming scheme
## defined in the build script. It does not care about actual content stored in
## the package so the good advice is to never rename *.deb files once they built.
##
## Function takes only one argument - package name.
upload_package() {
local curl_response
local http_status_code
local api_response_message
declare -A debfiles_catalog
local arch
for arch in all aarch64 arm i686 x86_64; do
# Regular package.
if [ -f "$DEBFILES_DIR_PATH/${1}_${PACKAGE_METADATA['VERSION_FULL']}_${arch}.deb" ]; then
debfiles_catalog["${1}_${PACKAGE_METADATA['VERSION_FULL']}_${arch}.deb"]=${arch}
fi
# Static library package.
if [ -f "$DEBFILES_DIR_PATH/${1}-static_${PACKAGE_METADATA['VERSION_FULL']}_${arch}.deb" ]; then
debfiles_catalog["${1}-static_${PACKAGE_METADATA['VERSION_FULL']}_${arch}.deb"]=${arch}
fi
# Discover subpackages.
local file
for file in $(find "$TERMUX_PACKAGES_BASEDIR/packages/${1}/" -maxdepth 1 -type f -iname \*.subpackage.sh | sort); do
file=$(basename "$file")
if [ -f "$DEBFILES_DIR_PATH/${file%%.subpackage.sh}_${PACKAGE_METADATA['VERSION_FULL']}_${arch}.deb" ]; then
debfiles_catalog["${file%%.subpackage.sh}_${PACKAGE_METADATA['VERSION_FULL']}_${arch}.deb"]=${arch}
fi
done
done
# Verify that our catalog is not empty.
set +o nounset
if [ ${#debfiles_catalog[@]} -eq 0 ]; then
set -o nounset
msg " * ${1}: skipping because no files to upload."
SCRIPT_ERROR_EXIT=true
return 1
fi
set -o nounset
if $SCRIPT_EMERG_EXIT; then
emergency_exit
fi
# Create new entry for package.
msg -n " * ${1}: creating entry for version '${PACKAGE_METADATA['VERSION_FULL']}'... "
curl_response=$(
curl \
--silent \
--user "${BINTRAY_USERNAME}:${BINTRAY_API_KEY}" \
--request POST \
--header "Content-Type: application/json" \
--data "$(json_metadata_dump)" \
--write-out "|%{http_code}" \
"https://api.bintray.com/packages/${BINTRAY_SUBJECT}/${BINTRAY_REPO_NAME}"
)
http_status_code=$(echo "$curl_response" | cut -d'|' -f2)
api_response_message=$(echo "$curl_response" | cut -d'|' -f1 | jq -r .message)
if [ "$http_status_code" != "201" ] && [ "$http_status_code" != "409" ]; then
msg "$api_response_message"
SCRIPT_ERROR_EXIT=true
return 1
fi
if $SCRIPT_EMERG_EXIT; then
emergency_exit
fi
for item in "${!debfiles_catalog[@]}"; do
local package_arch=${debfiles_catalog[$item]}
if $SCRIPT_EMERG_EXIT; then
emergency_exit
fi
msg -ne "\\r\\e[2K * ${1}: uploading '$item'... "
curl_response=$(
curl \
--silent \
--user "${BINTRAY_USERNAME}:${BINTRAY_API_KEY}" \
--request PUT \
--header "X-Bintray-Debian-Distribution: $BINTRAY_REPO_DISTRIBUTION" \
--header "X-Bintray-Debian-Component: $BINTRAY_REPO_COMPONENT" \
--header "X-Bintray-Debian-Architecture: $package_arch" \
--header "X-Bintray-Package: ${1}" \
--header "X-Bintray-Version: ${PACKAGE_METADATA['VERSION_FULL']}" \
--upload-file "$DEBFILES_DIR_PATH/$item" \
--write-out "|%{http_code}" \
"https://api.bintray.com/content/${BINTRAY_SUBJECT}/${BINTRAY_REPO_NAME}/${package_arch}/${item}"
)
http_status_code=$(echo "$curl_response" | cut -d'|' -f2)
api_response_message=$(echo "$curl_response" | cut -d'|' -f1 | jq -r .message)
if [ "$http_status_code" != "201" ] && [ "$http_status_code" != "409" ]; then
msg "$api_response_message"
SCRIPT_ERROR_EXIT=true
return 1
fi
if $SCRIPT_EMERG_EXIT; then
emergency_exit
fi
done
# Publishing package only after uploading all it's files. This will prevent
# spawning multiple metadata-generation jobs and will allow to sign metadata
# with maintainer's key.
msg -ne "\\r\\e[2K * ${1}: publishing... "
curl_response=$(
curl \
--silent \
--user "${BINTRAY_USERNAME}:${BINTRAY_API_KEY}" \
--request POST \
--header "Content-Type: application/json" \
--data "{\"subject\":\"${BINTRAY_GPG_SUBJECT}\",\"passphrase\":\"$BINTRAY_GPG_PASSPHRASE\"}" \
--write-out "|%{http_code}" \
"https://api.bintray.com/content/${BINTRAY_SUBJECT}/${BINTRAY_REPO_NAME}/${1}/${PACKAGE_METADATA['VERSION_FULL']}/publish"
)
http_status_code=$(echo "$curl_response" | cut -d'|' -f2)
api_response_message=$(echo "$curl_response" | cut -d'|' -f1 | jq -r .message)
if [ "$http_status_code" = "200" ]; then
msg -e "\\r\\e[2K * ${1}: success"
else
msg "$api_response_message"
SCRIPT_ERROR_EXIT=true
return 1
fi
}
## Extact value of specified variable from build.sh script.
## Takes 2 arguments: package name, variable name.
get_package_property() {
local buildsh_path="$TERMUX_PACKAGES_BASEDIR/packages/$1/build.sh"
local extracted_value
extracted_value=$(
set +o nounset
set -o noglob
# When sourcing external code, do not expose variables
# with sensitive information.
unset BINTRAY_API_KEY
unset BINTRAY_GPG_PASSPHRASE
unset BINTRAY_GPG_SUBJECT
unset BINTRAY_SUBJECT
unset BINTRAY_USERNAME
if [ -e "$TERMUX_PACKAGES_BASEDIR/scripts/properties.sh" ]; then
. "$TERMUX_PACKAGES_BASEDIR/scripts/properties.sh" 2>/dev/null
fi
. "$buildsh_path" 2>/dev/null
echo "${!2}"
set +o noglob
set -o nounset
)
echo "$extracted_value"
}
## Execute desired action on specified packages.
## Takes arbitrary amount of arguments - package names.
process_packages() {
local package_name
local package_name_list
local buildsh_path
msg
msg "Repository: $BINTRAY_REPO_NAME"
msg
if $PACKAGE_CLEANUP_MODE; then
msg "[@] Removing old versions:"
elif $PACKAGE_DELETION_MODE; then
msg "[@] Deleting packages from remote:"
elif $METADATA_GEN_MODE; then
recalculate_metadata
msg "[@] Finished."
return 0
else
msg "[@] Uploading packages:"
fi
msg
block_terminal
# Remove duplicates from the list of the package names.
readarray -t package_name_list < <(printf '%s\n' "${@}" | sort -u)
for package_name in "${package_name_list[@]}"; do
if $SCRIPT_EMERG_EXIT; then
emergency_exit
fi
if $PACKAGE_DELETION_MODE; then
delete_package "$package_name" || continue
else
if [ ! -f "$TERMUX_PACKAGES_BASEDIR/packages/$package_name/build.sh" ]; then
msg " * ${package_name}: skipping because such package does not exist."
SCRIPT_ERROR_EXIT=true
continue
fi
PACKAGE_METADATA["NAME"]="$package_name"
PACKAGE_METADATA["LICENSES"]=$(get_package_property "$package_name" "TERMUX_PKG_LICENSE")
if [ -z "${PACKAGE_METADATA['LICENSES']}" ]; then
msg " * ${package_name}: skipping because field 'TERMUX_PKG_LICENSE' is empty."
SCRIPT_ERROR_EXIT=true
continue
elif grep -qP '.*(custom|non-free).*' <(echo "${PACKAGE_METADATA['LICENSES']}"); then
msg " * ${package_name}: skipping because it has custom license."
SCRIPT_ERROR_EXIT=true
continue
fi
PACKAGE_METADATA["DESCRIPTION"]=$(get_package_property "$package_name" "TERMUX_PKG_DESCRIPTION")
if [ -z "${PACKAGE_METADATA['DESCRIPTION']}" ]; then
msg " * ${package_name}: skipping because field 'TERMUX_PKG_DESCRIPTION' is empty."
SCRIPT_ERROR_EXIT=true
continue
fi
PACKAGE_METADATA["WEBSITE_URL"]=$(get_package_property "$package_name" "TERMUX_PKG_HOMEPAGE")
if [ -z "${PACKAGE_METADATA['WEBSITE_URL']}" ]; then
msg " * ${package_name}: skipping because field 'TERMUX_PKG_HOMEPAGE' is empty."
SCRIPT_ERROR_EXIT=true
continue
fi
PACKAGE_METADATA["VERSION"]=$(get_package_property "$package_name" "TERMUX_PKG_VERSION")
if [ -z "${PACKAGE_METADATA['VERSION']}" ]; then
msg " * ${package_name}: skipping because field 'TERMUX_PKG_VERSION' is empty."
SCRIPT_ERROR_EXIT=true
continue
fi
PACKAGE_METADATA["REVISION"]=$(get_package_property "$package_name" "TERMUX_PKG_REVISION")
if [ -n "${PACKAGE_METADATA['REVISION']}" ]; then
PACKAGE_METADATA["VERSION_FULL"]="${PACKAGE_METADATA['VERSION']}-${PACKAGE_METADATA['REVISION']}"
else
if [ "${PACKAGE_METADATA['VERSION']}" != "${PACKAGE_METADATA['VERSION']/-/}" ]; then
PACKAGE_METADATA["VERSION_FULL"]="${PACKAGE_METADATA['VERSION']}-0"
else
PACKAGE_METADATA["VERSION_FULL"]="${PACKAGE_METADATA['VERSION']}"
fi
fi
if $PACKAGE_CLEANUP_MODE; then
delete_old_versions_from_package "$package_name" || continue
else
upload_package "$package_name" || continue
fi
fi
done
if $SCRIPT_EMERG_EXIT; then
emergency_exit
fi
unblock_terminal
msg
if $PACKAGE_CLEANUP_MODE || $PACKAGE_DELETION_MODE; then
recalculate_metadata
fi
msg "[@] Finished."
}
## Just print information about usage.
## Takes no arumnets.
show_usage() {
msg
msg "Usage: package_uploader.sh [OPTIONS] [package name] ..."
msg
msg "A command line client for Bintray designed for managing"
msg "Termux *.deb packages."
msg
msg "=========================================================="
msg
msg "Primarily indended to be used by CI systems for automatic"
msg "package uploads but it can be used for manual uploads too."
msg
msg "Before using this script, check that you have all"
msg "necessary credentials for accessing repository."
msg
msg "Credentials are specified via environment variables:"
msg
msg " BINTRAY_USERNAME - User name."
msg " BINTRAY_API_KEY - User's API key."
msg " BINTRAY_GPG_SUBJECT - Owner of GPG key."
msg " BINTRAY_GPG_PASSPHRASE - GPG key passphrase."
msg
msg "=========================================================="
msg
msg "Options:"
msg
msg " -h, --help Print this help."
msg
msg " -c, --cleanup Action. Clean selected packages by"
msg " removing older versions from the remote."
msg
msg " -d, --delete Action. Remove selected packages from"
msg " remote."
msg
msg " -r, --regenerate Action. Request metadata recalculation"
msg " and signing on the remote."
msg
msg
msg " -p, --path [path] Specify a directory containing *.deb"
msg " files ready for uploading."
msg " Default is './debs'."
msg
msg "=========================================================="
}
###################################################################
trap request_emerg_exit INT
while getopts ":-:hcdrp:" opt; do
case "$opt" in
-)
case "$OPTARG" in
help)
show_usage
exit 0
;;
cleanup)
PACKAGE_CLEANUP_MODE=true
;;
delete)
PACKAGE_DELETION_MODE=true
;;
regenerate)
METADATA_GEN_MODE=true;
;;
path)
DEBFILES_DIR_PATH="${!OPTIND}"
OPTIND=$((OPTIND + 1))
if [ -z "$DEBFILES_DIR_PATH" ]; then
msg "[!] Option '--${OPTARG}' requires argument."
show_usage
exit 1
fi
if [ ! -d "$DEBFILES_DIR_PATH" ]; then
msg "[!] Directory '$DEBFILES_DIR_PATH' does not exist."
show_usage
exit 1
fi
;;
*)
msg "[!] Invalid option '$OPTARG'."
show_usage
exit 1
;;
esac
;;
h)
show_usage
exit 0
;;
c)
PACKAGE_CLEANUP_MODE=true
;;
d)
PACKAGE_DELETION_MODE=true
;;
r)
METADATA_GEN_MODE=true
;;
p)
DEBFILES_DIR_PATH="${OPTARG}"
if [ ! -d "$DEBFILES_DIR_PATH" ]; then
msg "[!] Directory '$DEBFILES_DIR_PATH' does not exist."
show_usage
exit 1
fi
;;
*)
msg "[!] Invalid option '-${OPTARG}'."
show_usage
exit 1
;;
esac
done
shift $((OPTIND - 1))
if [ $# -lt 1 ] && ! $METADATA_GEN_MODE; then
msg "[!] No packages specified."
show_usage
exit 1
fi
# These variables should never be changed.
readonly DEBFILES_DIR_PATH
readonly PACKAGE_DELETION_MODE
readonly PACKAGE_CLEANUP_MODE
readonly TERMUX_PACKAGES_BASEDIR
# Check if no mutually exclusive options used.
if $PACKAGE_CLEANUP_MODE && $METADATA_GEN_MODE; then
msg "[!] Options '-c|--cleanup' and '-r|--regenerate' are mutually exclusive."
exit 1
fi
if $PACKAGE_CLEANUP_MODE && $PACKAGE_DELETION_MODE; then
msg "[!] Options '-c|--cleanup' and '-d|--delete' are mutually exclusive."
exit 1
fi
if $PACKAGE_DELETION_MODE && $METADATA_GEN_MODE; then
msg "[!] Options '-d|--delete' and '-r|--regenerate' are mutually exclusive."
exit 1
fi
# Without Bintray credentials this script is useless.
if [ -z "$BINTRAY_USERNAME" ]; then
msg "[!] Variable 'BINTRAY_USERNAME' is not set."
exit 1
fi
if [ -z "$BINTRAY_API_KEY" ]; then
msg "[!] Variable 'BINTRAY_API_KEY' is not set."
exit 1
fi
if [ -z "$BINTRAY_GPG_SUBJECT" ]; then
msg "[!] Variable 'BINTRAY_GPG_SUBJECT' is not set."
exit 1
fi
process_packages "$@"
if $SCRIPT_ERROR_EXIT; then
exit 1
else
exit 0
fi
|
#!/bin/bash
#
# Copyright 2020 IBM Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
CMDNAME=`basename $0`
if [ $# -ne 3 ]; then
echo "Usage: $CMDNAME <signer-email> <input-file> <output-file>" 1>&2
exit 1
fi
if [ ! -e $2 ]; then
echo "$2 does not exist"
exit 1
fi
if ! [ -x "$(command -v yq)" ]; then
echo 'Error: yq is not installed.' >&2
exit 1
fi
SIGNER=$1
INPUT_FILE=$2
OUTPUT_FILE=$3
if [ -z $SIGNER ]; then
echo "Signer email is empty, please provide it."
exit 1
fi
if [ ! -f $INPUT_FILE ]; then
echo "Input file does not exist, please create it."
exit 1
fi
if [ -z "$TMP_DIR" ]; then
echo "TMP_DIR is empty. Setting /tmp as default"
TMP_DIR="/tmp"
fi
if [ ! -d $TMP_DIR ]; then
echo "$TMP_DIR directory does not exist, please create it."
exit 1
fi
RSC_TEMPLATE=""
# compute signature (and encoded message and certificate)
RSC_TEMPLATE=`cat << EOF
apiVersion: apis.integrityshield.io/v1alpha1
kind: ResourceSignature
metadata:
annotations:
integrityshield.io/messageScope: spec
integrityshield.io/signature: ""
name: ""
spec:
data:
- message: ""
signature: ""
type: resource
EOF`
if [[ "$OSTYPE" == "linux-gnu"* ]]; then
base='base64 -w 0'
elif [[ "$OSTYPE" == "darwin"* ]]; then
base='base64'
fi
YQ_VERSION=$(yq --version 2>&1 | awk '{print $3}' | cut -c 1 )
# message
msg=`cat $INPUT_FILE | gzip -c | $base`
# signature
sig=`cat ${INPUT_FILE} > $TMP_DIR/temp-aaa.yaml; gpg -u $SIGNER --detach-sign --armor --output - $TMP_DIR/temp-aaa.yaml | $base`
sigtime=`date +%s`
if [ -f $OUTPUT_FILE ]; then
rm $OUTPUT_FILE
fi
if [[ $YQ_VERSION == "3" ]]; then
RSC_COUNT=`yq r -d'*' ${INPUT_FILE} --tojson | wc -l`
indx=0
yq r -d'*' ${INPUT_FILE} -j | while read doc;
do
resApiVer=$(echo $doc | yq r - 'apiVersion' | tr / _)
if [ -z "$resApiVer" ] || [ "$resApiVer" = "null" ] ; then
break
fi
resKind=$(echo $doc | yq r - 'kind')
reslowerkind=$(echo $resKind | tr "[:upper:]" "[:lower:]")
resname=$(echo $doc | yq r - 'metadata.name')
rsigname="rsig-${reslowerkind}-${resname}"
echo "$RSC_TEMPLATE" >> $OUTPUT_FILE
yq w -i -d$indx $OUTPUT_FILE metadata.name $rsigname
yq w -i -d$indx $OUTPUT_FILE 'metadata.labels."integrityshield.io/sigobject-apiversion"' $resApiVer
yq w -i -d$indx $OUTPUT_FILE 'metadata.labels."integrityshield.io/sigobject-kind"' $resKind
yq w -i -d$indx --tag !!str $OUTPUT_FILE 'metadata.labels."integrityshield.io/sigtime"' $sigtime
indx=$[$indx+1]
if (( $indx < $RSC_COUNT )) ; then
echo "---" >> $OUTPUT_FILE
fi
done
elif [[ $YQ_VERSION == "4" ]]; then
indx=0
while true
do
resApiVer=$(yq eval ".apiVersion | select(di == $indx)" ${INPUT_FILE} | sed 's/\//_/g')
resKind=$(yq eval ".kind | select(di == $indx)" ${INPUT_FILE} | sed 's/\//_/g')
reslowerkind=$(echo $resKind | tr '[:upper:]' '[:lower:]')
resname=$(yq eval ".metadata.name | select(di == $indx)" ${INPUT_FILE})
rsigname="rsig-${reslowerkind}-${resname}"
if [ -z "$resApiVer" ]; then
break
else
TMP_FILE="$TMP_DIR/${rsigname}.yaml"
echo "$RSC_TEMPLATE" >> ${TMP_FILE}
yq eval ".metadata.name = \"$rsigname\"" -i $TMP_FILE
yq eval ".metadata.labels.\"integrityshield.io/sigobject-apiversion\" = \"$resApiVer\"" -i $TMP_FILE
yq eval ".metadata.labels.\"integrityshield.io/sigobject-kind\" = \"$resKind\"" -i $TMP_FILE
yq eval ".metadata.labels.\"integrityshield.io/sigtime\" = \"$sigtime\"" -i $TMP_FILE
if [ -f $TMP_FILE ]; then
cat $TMP_FILE >> $OUTPUT_FILE
rm $TMP_FILE
fi
echo "---" >> $OUTPUT_FILE
indx=$[$indx+1]
fi
done
fi
if [[ "$OSTYPE" == "linux-gnu"* ]]; then
sed -i '$ s/---//g' $OUTPUT_FILE
elif [[ "$OSTYPE" == "darwin"* ]]; then
sed -i '' '$ s/---//g' $OUTPUT_FILE
fi
if [[ $YQ_VERSION == "3" ]]; then
yq w -i -d* $OUTPUT_FILE spec.data.[0].message $msg
yq w -i -d* $OUTPUT_FILE spec.data.[0].signature $sig
elif [[ $YQ_VERSION == "4" ]]; then
yq eval ".spec.data.[0].message |= \"$msg\"" -i $OUTPUT_FILE
yq eval ".spec.data.[0].signature |= \"$sig\"" -i $OUTPUT_FILE
fi
# resource signature spec content
if [[ $YQ_VERSION == "3" ]]; then
rsigspec=`cat $OUTPUT_FILE | yq r - -j |jq -r '.spec' | yq r - --prettyPrint | $base`
elif [[ $YQ_VERSION == "4" ]]; then
rsigspec=`yq eval '.spec' $OUTPUT_FILE | $base`
fi
# resource signature signature
rsigsig=`echo -e "$rsigspec" > $TMP_DIR/temp-rsig.yaml; gpg -u $SIGNER --detach-sign --armor --output - $TMP_DIR/temp-rsig.yaml | $base`
if [[ $YQ_VERSION == "3" ]]; then
yq w -i -d* $OUTPUT_FILE 'metadata.annotations."integrityshield.io/signature"' $rsigsig
elif [[ $YQ_VERSION == "4" ]]; then
yq eval ".metadata.annotations.\"integrityshield.io/signature\" = \"$rsigsig\"" -i $OUTPUT_FILE
fi
if [ -f $TMP_DIR/temp-aaa.yaml ]; then
rm $TMP_DIR/temp-aaa.yaml
fi
if [ -f $TMP_DIR/temp-rsig.yaml ]; then
rm $TMP_DIR/temp-rsig.yaml
fi
|
<filename>amaascore/assets/sukuk.py
from __future__ import absolute_import, division, print_function, unicode_literals
from amaascore.assets.asset import Asset
class Sukuk(Asset):
def __init__(self, asset_manager_id, asset_id, maturity_date, asset_issuer_id=None,
asset_status='Active', roll_price=True, issue_date=None, display_name='', description='',
country_id=None, venue_id=None, currency=None,
comments=None, links=None, references=None, *args, **kwargs):
if not hasattr(self, 'asset_class'): # A more specific child class may have already set this
self.asset_class = 'Sukuk'
self.maturity_date = maturity_date
super(Sukuk, self).__init__(asset_manager_id=asset_manager_id, asset_id=asset_id, fungible=True,
asset_issuer_id=asset_issuer_id, asset_status=asset_status,
roll_price=roll_price, display_name=display_name, currency=currency,
description=description, country_id=country_id, venue_id=venue_id,
comments=comments, links=links, references=references,
issue_date=issue_date,
*args, **kwargs)
|
# models.py
class Profile(models.Model):
user = models.OneToOneField(User,related_name='profile',on_delete=models.CASCADE)
address = models.TextField()
city = models.CharField(max_length=255)
phone = models.IntegerField(default=0)
# serializers.py
class ProfileSerializer(serializers.ModelSerializer):
class Meta:
model = Profile
fields = ('id','user','address','city','phone' )
# views.py
class ProfileView(APIView):
def post(self,request):
serialized = ProfileSerializer(data=request.data)
if serialized.is_valid():
serialized.save()
return Response(serialized.data,status = status.HTTP_201_CREATED)
else:
return Response(serialized.errors, status = status.HTTP_400_BAD_REQUEST) |
import Vue from 'vue';
import Vuex from 'vuex';
Vue.use(Vuex);
export default new Vuex.Store({
state: {
step: 0,
ailments: [
"<NAME>",
"Migraines",
"<NAME>",
"Stress",
"Vertigo",
"Nausea"
],
ailmentsLib: [
"<NAME>",
"Migraines",
"<NAME>",
"Stress",
"Vertigo",
"Nausea"
],
firstName: null, //
email: null,
zip: null,
ailment: [], // List To Be Mailed
effect: {
high: null, // 0 - 5
time: null, // Day - Night
feeling: [] // List
},
personality: [],
disableNext: false
},
mutations: {
resetState (state){
state.step = 0;
state.ailment = []; // List
state.effect = {
high: null, // 0 - 5
time: null, // Day - Night
feeling: [] // List
};
state.personality = [];
state.disableNext = false;
},
toggleNext (state){
state.disableNext = !state.disableNext
},
setStep (state, n){
state.step = n;
},
setFirstName (state, payload){
state.firstName = payload;
},
setEmail (state, payload){
state.email = payload;
},
setZip (state, payload){
state.zip = payload;
},
updateRating (state, n) {
state.rating = n > state.rating ? n : state.rating;
},
addAilment (state, payload){
state.ailment.push(payload)
},
removeAilment (state, payload){
let i = state.ailment.indexOf(payload);
state.ailment.splice(i, 1);
},
setHigh (state, payload) {
state.effect.high = payload
},
setTime (state, payload) {
state.effect.time = payload
},
addFeeling (state, payload){
state.effect.feeling.push(payload)
},
removeFeeling (state, payload){
let i = state.effect.feeling.indexOf(payload);
state.effect.feeling.splice(i, 1);
},
addPersonality (state, payload){
state.personality.push(payload)
},
removePersonality (state, payload){
let i = state.personality.indexOf(payload);
state.personality.splice(i, 1);
}
},
actions: {
// setRating (context, n) {
// context.commit('updateRating', n)
// }
}
});
|
import re
def extract_emails(string):
email_regex = re.compile(r'[\w\.-]+@[\w\.-]+')
emails = re.findall(email_regex, string)
return emails
emails = extract_emails("Here are emails: myname@example.com and another@example.org.")
print(emails) # prints ['myname@example.com', 'another@example.org'] |
#!/bin/sh
set -o errexit
set -o pipefail
source $HOMEAPP/docker-secrets-to-env-var.sh
if [[ "${DJANGO_SETTINGS_MODULE}" == "${PROJECT_NAME}.config.production" ]]; then
python $HOMEAPP/manage.py check --deploy
python $HOMEAPP/manage.py collectstatic --noinput
# You can execute `compilemessages` here too, like this:
# python $HOMEAPP/manage.py compilemessages --locale=pt_BR
fi
exec "$@"
|
#include <iostream>
using namespace std;
// Function to print two largest elements
void print2largest(int arr[], int arr_size)
{
int i, first, second;
/* There should be atleast two elements */
if (arr_size < 2)
{
printf(" Invalid Input ");
return;
}
first = second = INT_MIN;
for (i = 0; i < arr_size ; i++)
{
/* If current element is smaller than first
then update both first and second */
if (arr[i] > first)
{
second = first;
first = arr[i];
}
/* If arr[i] is in between first and second
then update second */
else if (arr[i] > second && arr[i] != first)
second = arr[i];
}
if (second == INT_MIN)
printf("There is no second largest element\n");
else
printf("The largest element is %d and second largest element is %d\n", first, second);
}
/* Driver program to test above function */
int main()
{
int arr[] = {10, 7, 20, 35, 60, 44};
int n = sizeof(arr)/sizeof(arr[0]);
print2largest(arr, n);
return 0;
} |
# Definition for a binary tree node.
class TreeNode(object):
def __init__(self, x):
self.val = x
self.left = None
self.right = None
def __repr__(self):
return "{}".format(self.val)
class Solution(object):
def isSymmetric(self, root):
"""
Time: N
Space: h
:type root: TreeNode
:rtype: bool
"""
if not root:
return True
stack = list()
stack.append(root.left)
stack.append(root.right)
while stack:
p, q = stack.pop(), stack.pop()
if p is None and q is None:
continue
if p is None or q is None or p.val != q.val:
return False
stack.append(p.left)
stack.append(q.right)
stack.append(p.right)
stack.append(q.left)
return True
def isSymmetricRecursive(self, root):
"""
Time: N
Space: h
:type root: TreeNode
:rtype: bool
"""
if not root:
return True
return self.isSymmetricRecur(root.left, root.right)
def isSymmetricRecur(self, left, right):
if left is None and right is None:
return True
if left is None or right is None or left.val != right.val:
return False
return self.isSymmetricRecur(left.left, right.right) and self.isSymmetricRecur(left.right, right.left)
if __name__ == "__main__":
root = TreeNode(1)
root.left = TreeNode(2)
root.right = TreeNode(2)
root.left.left = TreeNode(4)
root.left.right = TreeNode(5)
root.right.left = TreeNode(5)
root.right.right = TreeNode(4)
print(Solution().isSymmetricRecursive(root))
root2 = TreeNode(1)
root2.left = TreeNode(2)
root2.right = TreeNode(2)
root2.left.left = TreeNode(4)
root2.left.right = TreeNode(5)
root2.right.left = TreeNode(6)
root2.right.right = TreeNode(4)
print(Solution().isSymmetricRecursive(root2))
|
/*
* Copyright 2021 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.arturopala.validator
import org.scalacheck.Prop
import org.scalacheck.Prop._
import org.scalacheck.Gen
class ValidatorSpec extends munit.ScalaCheckSuite {
import Validator._
case class Foo(bar: String, bazOpt: Option[Int] = None)
property("Validator.all combines provided validators to verify if all checks passes") {
val nonEmptyStringValidator = Validator.check[String](_.nonEmpty, "string must be non-empty")
val emptyStringValidator = Validator.check[String](_.isEmpty(), "string must be empty")
val validate: Validate[String] = nonEmptyStringValidator & emptyStringValidator
forAll { (string: String) =>
Prop.all(
emptyStringValidator("").isValid,
nonEmptyStringValidator("").isInvalid,
emptyStringValidator(s"a$string").isInvalid,
nonEmptyStringValidator(s"a$string").isValid,
Validator(nonEmptyStringValidator, emptyStringValidator).apply(string).isInvalid,
Validator.all(nonEmptyStringValidator, emptyStringValidator).apply(string).isInvalid,
validate(string).isInvalid
)
}
}
property("Validator.all(with error prefix) combines provided validators to verify if all checks passes") {
val nonEmptyStringValidator = Validator.check[String](_.nonEmpty, "string must be non-empty")
val emptyStringValidator = Validator.check[String](_.isEmpty(), "string must be empty")
val validate: Validate[String] =
Validator.allWithPrefix("foo: ", nonEmptyStringValidator, emptyStringValidator)
forAll { (string: String) =>
Prop.all(
emptyStringValidator("").isValid,
("foo: " @: nonEmptyStringValidator)("").errorString == Some("foo: string must be non-empty"),
(emptyStringValidator.withErrorPrefix("@ "))(s"a$string").errorString == Some("@ string must be empty"),
nonEmptyStringValidator(s"a$string").isValid,
Validator.allWithPrefix("foo_", nonEmptyStringValidator, emptyStringValidator).apply(string).errorString ==
Some(if (string.isEmpty) "foo_string must be non-empty" else "foo_string must be empty"),
Validator.allWithPrefix("bar/", nonEmptyStringValidator, emptyStringValidator).apply(string).errorString ==
Some(if (string.isEmpty) "bar/string must be non-empty" else "bar/string must be empty"),
validate(string).errorString ==
Some(if (string.isEmpty) "foo: string must be non-empty" else "foo: string must be empty")
)
}
}
property("Validator.all(with calculated error prefix) combines provided validators to verify if all checks passes") {
val nonEmptyStringValidator = Validator.check[String](_.nonEmpty, "string must be non-empty")
val emptyStringValidator = Validator.check[String](_.isEmpty(), "string must be empty")
val calculatePrefix: String => String = s => s"${s.take(1)}: "
forAll { (string: String) =>
val f = string.take(1)
Prop.all(
Validator
.allWithComputedPrefix(calculatePrefix, nonEmptyStringValidator, emptyStringValidator)
.apply(string)
.errorString ==
Some(if (string.isEmpty) s"$f: string must be non-empty" else s"$f: string must be empty"),
Validator
.allWithComputedPrefix(calculatePrefix, nonEmptyStringValidator, emptyStringValidator)
.apply(string)
.errorString ==
Some(if (string.isEmpty) s"$f: string must be non-empty" else s"$f: string must be empty"),
Validator
.allWithComputedPrefix(calculatePrefix, nonEmptyStringValidator, emptyStringValidator)
.apply(string)
.errorString ==
Some(if (string.isEmpty) s"$f: string must be non-empty" else s"$f: string must be empty")
)
}
}
property("Validator.any combines provided validators to verify if any of the checks passes") {
val hasDigitValidator = Validator.check[String](_.exists(_.isDigit), "some characters must be digits")
val hasLowerCaseValidator =
Validator.check[String](_.exists(_.isLower), "some characters must be lower case")
val validate: Validate[String] = hasDigitValidator | hasLowerCaseValidator
forAllNoShrink(Gen.alphaChar, Gen.numChar) { (a: Char, d: Char) =>
(a.isLower) ==>
Prop.all(
hasDigitValidator(s"$a/$d").isValid,
hasLowerCaseValidator(s"$a!$d").isValid,
hasDigitValidator(s"${a.toUpper}").errorString == Some("some characters must be digits"),
hasLowerCaseValidator(s"${a.toUpper}").errorString == Some("some characters must be lower case"),
validate(s"$a-$d").isValid,
validate(s"$a-$a").isValid,
validate(s"$d-$d").isValid,
validate(s"$d-$a").isValid,
Validator.any(hasDigitValidator, hasLowerCaseValidator).apply(s"$a-$d").isValid,
Validator.any(hasDigitValidator, hasLowerCaseValidator).apply(s"$a-$a").isValid,
Validator.any(hasDigitValidator, hasLowerCaseValidator).apply(s"$d-$d").isValid,
Validator.any(hasDigitValidator, hasLowerCaseValidator).apply(s"$d-$a").isValid,
validate(s"${a.toUpper}" * d.toInt).errorString(", ") == Some(
"some characters must be digits, some characters must be lower case"
),
Validator
.any(hasDigitValidator, hasLowerCaseValidator)
.apply(s"${a.toUpper}" * d.toInt)
.errorString(", ") == Some(
"some characters must be digits, some characters must be lower case"
)
)
}
}
property("Validator.any(with error prefix) combines provided validators to verify if any of the checks passes") {
val hasDigitValidator = Validator.check[String](_.exists(_.isDigit), "some characters must be digits")
val hasLowerCaseValidator =
Validator.check[String](_.exists(_.isLower), "some characters must be lower case")
forAllNoShrink(Gen.alphaChar, Gen.numChar) { (a: Char, d: Char) =>
(a.isLower) ==>
Prop.all(
Validator.anyWithPrefix("foo_", hasDigitValidator, hasLowerCaseValidator).apply(s"$a-$d").isValid,
Validator.anyWithPrefix("foo_", hasDigitValidator, hasLowerCaseValidator).apply(s"$a-$a").isValid,
Validator.anyWithPrefix("foo_", hasDigitValidator, hasLowerCaseValidator).apply(s"$d-$d").isValid,
Validator.anyWithPrefix("foo_", hasDigitValidator, hasLowerCaseValidator).apply(s"$d-$a").isValid,
Validator
.anyWithPrefix("foo_", hasDigitValidator, hasLowerCaseValidator)
.apply(s"${a.toUpper}" * d.toInt)
.errorString(", ") == Some(
"foo_some characters must be digits, foo_some characters must be lower case"
)
)
}
}
property(
"Validator.any(with calculated error prefix) combines provided validators to verify if any of the checks passes"
) {
val hasDigitValidator = Validator.check[String](_.exists(_.isDigit), "some characters must be digits")
val hasLowerCaseValidator =
Validator.check[String](_.exists(_.isLower), "some characters must be lower case")
val calculatePrefix: String => String = s => s"${s.take(1)}_"
forAllNoShrink(Gen.alphaChar, Gen.numChar) { (a: Char, d: Char) =>
(a.isLower) ==>
Prop.all(
Validator
.anyWithComputedPrefix(calculatePrefix, hasDigitValidator, hasLowerCaseValidator)
.apply(s"${a.toUpper}" * d.toInt)
.errorString(", ") == Some(
s"${a.toUpper}_some characters must be digits, ${a.toUpper}_some characters must be lower case"
)
)
}
}
test("Validator.conditionally runs the test and follows with either first or second check") {
val validateOnlyDigits = Validator.check[String](_.forall(_.isDigit), "all characters must be digits")
val validateNonEmpty = Validator.check[String](_.nonEmpty, "must be non empty string")
def validateLength(length: Int) = Validator.check[String](_.length() == length, s"must have $length characters")
val validateAllUpperCase = Validator.check[String](_.forall(_.isUpper), "all characters must be upper case")
val validate: Validate[String] =
Validator.conditionally[String](
_.headOption.contains('0'),
validateLength(3) & validateOnlyDigits,
validateNonEmpty & validateAllUpperCase
)
assert(validate("A").isValid)
assert(validate("AZ").isValid)
assert(validate("ABC").isValid)
assert(validate("000").isValid)
assert(validate("012").isValid)
assert(validate("").errorString == Some("must be non empty string"))
assert(validate("Az").errorString == Some("all characters must be upper case"))
assert(validate("az").errorString == Some("all characters must be upper case"))
assert(validate("a").errorString == Some("all characters must be upper case"))
assert(validate("0").errorString == Some("must have 3 characters"))
assert(validate("00").errorString == Some("must have 3 characters"))
assert(validate("123").errorString == Some("all characters must be upper case"))
assert(validate("0000").errorString == Some("must have 3 characters"))
}
test("Validator.whenTrue runs the test and if true then follows with the next check") {
val validateOnlyDigits = Validator.check[String](_.forall(_.isDigit), "all characters must be digits")
def validateLength(length: Int) = Validator.check[String](_.length() == length, s"must have $length characters")
val validate: Validate[String] =
Validator.whenTrue[String](_.headOption.contains('0'), validateLength(3) & validateOnlyDigits)
assert(validate("000").isValid)
assert(validate("012").isValid)
assert(validate("A").isValid)
assert(validate("AZ").isValid)
assert(validate("ABC").isValid)
assert(validate("").isValid)
assert(validate("Az").isValid)
assert(validate("az").isValid)
assert(validate("a").isValid)
assert(validate("123").isValid)
assert(validate("0").errorString == Some("must have 3 characters"))
assert(validate("00").errorString == Some("must have 3 characters"))
assert(validate("0000").errorString == Some("must have 3 characters"))
}
test("Validator.whenFalse runs the test and if false then tries the next check") {
val validateNonEmpty = Validator.check[String](_.nonEmpty, "must be non empty string")
val validateAllUpperCase = Validator.check[String](_.forall(_.isUpper), "all characters must be upper case")
val validate: Validate[String] =
Validator.whenFalse[String](_.headOption.contains('0'), validateNonEmpty & validateAllUpperCase)
assert(validate("A").isValid)
assert(validate("AZ").isValid)
assert(validate("ABC").isValid)
assert(validate("0").isValid)
assert(validate("00").isValid)
assert(validate("000").isValid)
assert(validate("0000").isValid)
assert(validate("0abc").isValid)
assert(validate("012").isValid)
assert(validate("0123").isValid)
assert(validate("").errorString == Some("must be non empty string"))
assert(validate("Az").errorString == Some("all characters must be upper case"))
assert(validate("az").errorString == Some("all characters must be upper case"))
assert(validate("a").errorString == Some("all characters must be upper case"))
assert(validate("1").errorString == Some("all characters must be upper case"))
assert(validate("12").errorString == Some("all characters must be upper case"))
assert(validate("123").errorString == Some("all characters must be upper case"))
assert(validate("1ABC").errorString == Some("all characters must be upper case"))
}
test("Validator.when runs the guard check and follows with either first or second check") {
val validateStartsWithZero =
Validator.check[String](_.headOption.contains('0'), "first character must be a Zero")
val validateOnlyDigits = Validator.check[String](_.forall(_.isDigit), "all characters must be digits")
val validateNonEmpty = Validator.check[String](_.nonEmpty, "must be non empty string")
def validateLength(length: Int) = Validator.check[String](_.length() == length, s"must have $length characters")
val validateAllUpperCase = Validator.check[String](_.forall(_.isUpper), "all characters must be upper case")
val validate: Validate[String] =
Validator.when(
validateStartsWithZero,
validateLength(3) & validateOnlyDigits,
validateNonEmpty & validateAllUpperCase
)
assert(validate("A").isValid)
assert(validate("AZ").isValid)
assert(validate("ABC").isValid)
assert(validate("000").isValid)
assert(validate("012").isValid)
assert(validate("").errorString == Some("must be non empty string"))
assert(validate("Az").errorString == Some("all characters must be upper case"))
assert(validate("az").errorString == Some("all characters must be upper case"))
assert(validate("a").errorString == Some("all characters must be upper case"))
assert(validate("0").errorString == Some("must have 3 characters"))
assert(validate("00").errorString == Some("must have 3 characters"))
assert(validate("123").errorString == Some("all characters must be upper case"))
assert(validate("0000").errorString == Some("must have 3 characters"))
}
test("Validator.whenValid runs the guard check and if valid then follows with the next check") {
val validateStartsWithZero =
Validator.check[String](_.headOption.contains('0'), "first character must be a Zero")
val validateOnlyDigits = Validator.check[String](_.forall(_.isDigit), "all characters must be digits")
def validateLength(length: Int) = Validator.check[String](_.length() == length, s"must have $length characters")
val validate1: Validate[String] =
Validator.whenValid(validateStartsWithZero, validateLength(3) & validateOnlyDigits).debug
val validate2: Validate[String] =
validateStartsWithZero.andWhenValid(validateLength(3) & validateOnlyDigits)
val validate3: Validate[String] =
validateStartsWithZero ? (validateLength(3) & validateOnlyDigits)
def runtWith(validate: Validate[String]) = {
assert(validate("000").isValid)
assert(validate("012").isValid)
assert(validate("A").errorString == Some("first character must be a Zero"))
assert(validate("AZ").errorString == Some("first character must be a Zero"))
assert(validate("ABC").errorString == Some("first character must be a Zero"))
assert(validate("").errorString == Some("first character must be a Zero"))
assert(validate("Az").errorString == Some("first character must be a Zero"))
assert(validate("az").errorString == Some("first character must be a Zero"))
assert(validate("a").errorString == Some("first character must be a Zero"))
assert(validate("123").errorString == Some("first character must be a Zero"))
assert(validate("0").errorString == Some("must have 3 characters"))
assert(validate("00").errorString == Some("must have 3 characters"))
assert(validate("0000").errorString == Some("must have 3 characters"))
}
runtWith(validate1)
runtWith(validate2)
runtWith(validate3)
}
test("Validator.whenInvalid runs the guard check and if invalid then tries the next check") {
val validateStartsWithZero =
Validator.check[String](_.headOption.contains('0'), "first character must be a Zero")
val validateNonEmpty = Validator.check[String](_.nonEmpty, "must be non empty string")
val validateAllUpperCase = Validator.check[String](_.forall(_.isUpper), "all characters must be upper case")
val validate1: Validate[String] =
Validator.whenInvalid(validateStartsWithZero, validateNonEmpty & validateAllUpperCase)
val validate2: Validate[String] =
validateStartsWithZero.andWhenInvalid(validateNonEmpty & validateAllUpperCase)
val validate3: Validate[String] =
validateStartsWithZero ?! (validateNonEmpty & validateAllUpperCase)
def runtWith(validate: Validate[String]) = {
assert(validate("A").isValid)
assert(validate("AZ").isValid)
assert(validate("ABC").isValid)
assert(validate("0").isValid)
assert(validate("00").isValid)
assert(validate("000").isValid)
assert(validate("0000").isValid)
assert(validate("012").isValid)
assert(validate("0123").isValid)
assert(validate("").errorString == Some("must be non empty string"))
assert(validate("Az").errorString == Some("all characters must be upper case"))
assert(validate("az").errorString == Some("all characters must be upper case"))
assert(validate("a").errorString == Some("all characters must be upper case"))
assert(validate("1").errorString == Some("all characters must be upper case"))
assert(validate("12").errorString == Some("all characters must be upper case"))
assert(validate("123").errorString == Some("all characters must be upper case"))
}
runtWith(validate1)
runtWith(validate2)
runtWith(validate3)
}
property("Validator.product combines provided validators to verify tuples of values") {
val hasDigitValidator = Validator.check[Char](_.isDigit, "character must be a digit")
val hasLowerCaseValidator =
Validator.check[Char](_.isLower, "character must be lower case")
val validate: Validate[(Char, Char)] = hasDigitValidator * hasLowerCaseValidator
forAllNoShrink(Gen.alphaChar, Gen.numChar) { (a: Char, d: Char) =>
(a.isLower && !a.isDigit) ==>
Prop.all(
hasDigitValidator(d).isValid,
hasLowerCaseValidator(a).isValid,
hasDigitValidator(a).errorString == Some("character must be a digit"),
hasLowerCaseValidator(a.toUpper).errorString == Some("character must be lower case"),
validate.apply((d, a)).isValid,
validate.apply((a, d)).errorString == Some("character must be a digit,character must be lower case")
)
}
}
property("Validator.check returns Valid only if condition fulfilled") {
val validate =
Validator.check[Foo]((foo: Foo) => foo.bar.startsWith("a"), "foo.bar must start with A")
Prop.all(
forAll { (string: String) =>
validate(Foo(s"a$string")).isValid
validate(Foo(s"a$string")).errorString.isEmpty
},
forAll { (string: String, char: Char) =>
(char != 'a') ==>
(validate(Foo(s"$char$string")).errorString == Some("foo.bar must start with A"))
}
)
}
property("Validator.checkEquals returns Valid only if values are the same") {
val validate =
Validator.checkEquals[Foo, Int](_.bar.toInt, _.bazOpt.getOrElse(0), "foo.bar must be the same as foo.baz")
forAll { (int: Int) =>
validate(Foo(int.toString(), Some(int))).isValid
validate(Foo(int.toString(), Some(int - 1))).isInvalid
validate(Foo(int.toString(), Some(int + 1))).isInvalid
}
}
property("Validator.checkNotEquals returns Valid only if values are not the same") {
val validate =
Validator.checkNotEquals[Foo, Int](_.bar.toInt, _.bazOpt.getOrElse(0), "foo.bar must be not the same as foo.baz")
forAll { (int: Int) =>
validate(Foo(int.toString(), Some(int))).isInvalid
validate(Foo(int.toString(), Some(int - 1))).isValid
validate(Foo(int.toString(), Some(int + 1))).isValid
}
}
property("Validator.checkIsDefined returns Valid only if condition returns Some") {
val validate: Validate[Option[Int]] =
Validator.checkIsDefined[Option[Int]](identity, "option must be defined")
Prop.all(
forAll { (int: Int) =>
validate(Some(int)).isValid
},
validate(None).errorString == Some("option must be defined")
)
}
property("Validator.checkIsEmpty returns Valid only if condition returns None") {
val validate: Validate[Option[Int]] =
Validator.checkIsEmpty[Option[Int]](identity, "option must be defined")
Prop.all(
forAll { (int: Int) =>
validate(Some(int)).isInvalid
},
validate(None).isValid
)
}
property("Validator.checkFromEither returns Valid only if condition returns Right") {
val validate: Validate[Int] =
Validator.checkFromEither[Int]((i: Int) => if (i > 0) Right(i) else Left("must be positive"))
forAll { (int: Int) =>
if (int > 0)
validate(int).isValid
else
validate(int).errorString == Some("must be positive")
}
}
property("Validator.checkFromEither(with error prefix) returns Valid only if condition returns Right") {
val validate: Validate[Int] =
Validator
.checkFromEither[Int]((i: Int) => if (i > 0) Right(i) else Left("must be positive"))
.withErrorPrefix("integer ")
forAll { (int: Int) =>
if (int > 0)
validate(int).isValid
else
validate(int).errorString == Some("integer must be positive")
}
}
property("Validator.checkProperty returns Valid only if extracted property passes check") {
val nonEmptyStringValidator = Validator.check[String](_.nonEmpty, "string must be non-empty")
val validate: Validate[Foo] =
Validator.checkProperty[Foo, String]((foo: Foo) => foo.bar, nonEmptyStringValidator)
forAll { (string: String) =>
if (string.nonEmpty)
validate(Foo(string)).isValid
else
validate(Foo(string)).errorString == Some("string must be non-empty")
}
}
property("Validator.checkProperty(with error prefix) returns Valid only if nested validator returns Valid") {
val nonEmptyStringValidator = Validator.check[String](_.nonEmpty, "string must be non-empty")
val validate: Validate[Foo] =
Validator
.checkProperty[Foo, String]((foo: Foo) => foo.bar, nonEmptyStringValidator)
.withErrorPrefix("Foo.bar ")
forAll { (string: String) =>
if (string.nonEmpty)
validate(Foo(string)).isValid
else
validate(Foo(string)).errorString == Some("Foo.bar string must be non-empty")
}
}
property("Validator.checkIfSome returns Valid only if nested validator returns Valid") {
val positiveIntegerValidator = Validator.check[Int](_ > 0, "must be positive integer")
val validate: Validate[Foo] =
Validator.checkIfSome[Foo, Int]((foo: Foo) => foo.bazOpt, positiveIntegerValidator)
forAll { (int: Int) =>
Prop.all(
validate(Foo("", None)).isValid,
if (int > 0)
validate(Foo("", Some(int))).isValid
else
validate(Foo("", Some(int))).errorString == Some("must be positive integer")
)
}
}
property("Validator.checkIfSome(with invalid if None) returns Valid only if nested validator returns Valid") {
val positiveIntegerValidator = Validator.check[Int](_ > 0, "must be positive integer")
val validate: Validate[Foo] =
Validator.checkIfSome[Foo, Int]((foo: Foo) => foo.bazOpt, positiveIntegerValidator, isValidIfNone = false)
forAll { (int: Int) =>
Prop.all(
validate(Foo("", None)).isInvalid,
if (int > 0)
validate(Foo("", Some(int))).isValid
else
validate(Foo("", Some(int))).errorString == Some("must be positive integer")
)
}
}
property("Validator.checkIfSome(with error prefix) returns Valid only if nested validator returns Valid") {
val positiveIntegerValidator = Validator.check[Int](_ > 0, "must be positive integer")
val validate: Validate[Foo] =
Validator
.checkIfSome[Foo, Int]((foo: Foo) => foo.bazOpt, positiveIntegerValidator, isValidIfNone = true)
.withErrorPrefix("Foo.bazOpt ")
forAll { (int: Int) =>
Prop.all(
validate(Foo("", None)).isValid,
if (int > 0)
validate(Foo("", Some(int))).isValid
else
validate(Foo("", Some(int))).errorString == Some("Foo.bazOpt must be positive integer")
)
}
}
property(
"Validator.checkIfSome(with error prefix and invalid if none) returns Valid only if nested validator returns Valid"
) {
val positiveIntegerValidator = Validator.check[Int](_ > 0, "must be positive integer")
val validate: Validate[Foo] =
Validator
.checkIfSome[Foo, Int]((foo: Foo) => foo.bazOpt, positiveIntegerValidator, isValidIfNone = false)
.withErrorPrefix("Foo.bazOpt ")
forAll { (int: Int) =>
Prop.all(
validate(Foo("", None)).isInvalid,
if (int > 0)
validate(Foo("", Some(int))).isValid
else
validate(Foo("", Some(int))).errorString == Some("Foo.bazOpt must be positive integer")
)
}
}
property(
"Validator.checkEach returns Valid only if all elements of the sequence passes check"
) {
case class Ints(seq: Seq[Int])
val negativeIntegerValidator = Validator.check[Int](_ < 0, "must be negative integer")
val validate: Validate[Ints] =
Validator.checkEach[Ints, Int]((i: Ints) => i.seq, negativeIntegerValidator)
Prop.all(
validate(Ints(Seq.empty)).isValid,
forAll(Gen.nonEmptyContainerOf[Seq, Int](Gen.chooseNum(Integer.MIN_VALUE, -1))) { (ints: Seq[Int]) =>
validate(Ints(ints)).isValid
},
forAll(Gen.nonEmptyContainerOf[Seq, Int](Gen.chooseNum(0, Integer.MAX_VALUE))) { (ints: Seq[Int]) =>
validate(Ints(ints)).errorString == Some("must be negative integer")
}
)
}
property(
"Validator.checkEach(with error prefix fx) returns Valid only if all elements of the sequence passes check"
) {
case class Ints(seq: Seq[Int])
val negativeIntegerValidator = Validator.check[Int](_ < 0, "must be negative integer")
val validate: Validate[Ints] =
Validator
.checkEachWithErrorPrefix[Ints, Int]((i: Ints) => i.seq, negativeIntegerValidator, (i: Int) => s"is[$i] ")
Prop.all(
validate(Ints(Seq.empty)).isValid,
forAll(Gen.nonEmptyContainerOf[Seq, Int](Gen.chooseNum(Integer.MIN_VALUE, -1))) { (ints: Seq[Int]) =>
validate(Ints(ints)).isValid
},
forAll(Gen.nonEmptyContainerOf[Seq, Int](Gen.chooseNum(0, Integer.MAX_VALUE))) { (ints: Seq[Int]) =>
val result = validate(Ints(ints))
Prop.all(result.isInvalid, result.errorsCount == ints.size)
}
)
}
property(
"Validator.checkEach(with error prefix fx) returns Valid only if all elements of the sequence passes check"
) {
case class Ints(seq: Seq[Int])
val negativeIntegerValidator = Validator.check[Int](_ < 0, "must be negative integer")
val validate: Validate[Ints] =
Validator.checkEachWithErrorPrefix[Ints, Int](
(i: Ints) => i.seq,
negativeIntegerValidator,
(_: Int) => s"each element of 'is' "
)
Prop.all(
validate(Ints(Seq.empty)).isValid,
forAll(Gen.nonEmptyContainerOf[Seq, Int](Gen.chooseNum(Integer.MIN_VALUE, -1))) { (ints: Seq[Int]) =>
validate(Ints(ints)).isValid
},
forAll(Gen.nonEmptyContainerOf[Seq, Int](Gen.chooseNum(0, Integer.MAX_VALUE))) { (ints: Seq[Int]) =>
val result = validate(Ints(ints))
Prop.all(
result.isInvalid,
result.errorsCount == 1,
result.errorString == Some("each element of 'is' must be negative integer")
)
}
)
}
property(
"Validator.checkEachIfNonEmpty returns Valid only if all elements of the sequence passes check"
) {
case class Ints(seq: Seq[Int])
val negativeIntegerValidator = Validator.check[Int](_ < 0, "must be negative integer")
val validate: Validate[Ints] =
Validator.checkEachIfNonEmpty[Ints, Int]((i: Ints) => i.seq, negativeIntegerValidator)
Prop.all(
validate(Ints(Seq.empty)).isInvalid,
forAll(Gen.nonEmptyContainerOf[Seq, Int](Gen.chooseNum(Integer.MIN_VALUE, -1))) { (ints: Seq[Int]) =>
validate(Ints(ints)).isValid
},
forAll(Gen.nonEmptyContainerOf[Seq, Int](Gen.chooseNum(0, Integer.MAX_VALUE))) { (ints: Seq[Int]) =>
validate(Ints(ints)).errorString == Some("must be negative integer")
}
)
}
property(
"Validator.checkEachIfNonEmpty(with error prefix fx) returns Valid only if all elements of the sequence passes check"
) {
case class Ints(seq: Seq[Int])
val negativeIntegerValidator = Validator.check[Int](_ < 0, "must be negative integer")
val validate: Validate[Ints] =
Validator
.checkEachIfNonEmptyWithErrorPrefix[Ints, Int](
(i: Ints) => i.seq,
negativeIntegerValidator,
(i: Int) => s"is[$i] "
)
Prop.all(
validate(Ints(Seq.empty)).isInvalid,
forAll(Gen.nonEmptyContainerOf[Seq, Int](Gen.chooseNum(Integer.MIN_VALUE, -1))) { (ints: Seq[Int]) =>
validate(Ints(ints)).isValid
},
forAll(Gen.nonEmptyContainerOf[Seq, Int](Gen.chooseNum(0, Integer.MAX_VALUE))) { (ints: Seq[Int]) =>
val result = validate(Ints(ints))
Prop.all(result.isInvalid, result.errorsCount == ints.size)
}
)
}
property(
"Validator.checkEachIfNonEmpty(with error prefix fx) returns Valid only if all elements of the sequence passes check"
) {
case class Ints(seq: Seq[Int])
val negativeIntegerValidator = Validator.check[Int](_ < 0, "must be negative integer")
val validate: Validate[Ints] =
Validator.checkEachIfNonEmptyWithErrorPrefix[Ints, Int](
(i: Ints) => i.seq,
negativeIntegerValidator,
(_: Int) => s"each element of 'is' "
)
Prop.all(
validate(Ints(Seq.empty)).isInvalid,
forAll(Gen.nonEmptyContainerOf[Seq, Int](Gen.chooseNum(Integer.MIN_VALUE, -1))) { (ints: Seq[Int]) =>
validate(Ints(ints)).isValid
},
forAll(Gen.nonEmptyContainerOf[Seq, Int](Gen.chooseNum(0, Integer.MAX_VALUE))) { (ints: Seq[Int]) =>
val result = validate(Ints(ints))
Prop.all(
result.isInvalid,
result.errorsCount == 1,
result.errorString == Some("each element of 'is' must be negative integer")
)
}
)
}
property(
"Validator.checkEachIfSome returns Valid only if None or all elements of the sequence passes check"
) {
case class Ints(seqOpt: Option[Seq[Int]])
val positiveIntegerValidator = Validator.check[Int](_ > 0, "must be positive integer")
val validate: Validate[Ints] =
Validator.checkEachIfSome[Ints, Int]((i: Ints) => i.seqOpt, positiveIntegerValidator)
Prop.all(
validate(Ints(None)).isValid,
validate(Ints(Some(Seq.empty))).isValid,
forAll(Gen.nonEmptyContainerOf[Seq, Int](Gen.chooseNum(Integer.MIN_VALUE, 0))) { (ints: Seq[Int]) =>
validate(Ints(Some(ints))).errorString == Some("must be positive integer")
},
forAll(Gen.nonEmptyContainerOf[Seq, Int](Gen.chooseNum(1, Integer.MAX_VALUE))) { (ints: Seq[Int]) =>
validate(Ints(Some(ints))).isValid
}
)
}
property(
"Validator.checkEachIfSome returns Valid only if sequence is defined and all elements of the sequence passes check"
) {
case class Ints(seqOpt: Option[Seq[Int]])
val positiveIntegerValidator = Validator.check[Int](_ > 0, "must be positive integer")
val validate: Validate[Ints] =
Validator.checkEachIfSome[Ints, Int]((i: Ints) => i.seqOpt, positiveIntegerValidator, isValidIfNone = false)
Prop.all(
validate(Ints(None)).isInvalid,
validate(Ints(Some(Seq.empty))).isValid,
forAll(Gen.nonEmptyContainerOf[Seq, Int](Gen.chooseNum(Integer.MIN_VALUE, 0))) { (ints: Seq[Int]) =>
validate(Ints(Some(ints))).errorString == Some("must be positive integer")
},
forAll(Gen.nonEmptyContainerOf[Seq, Int](Gen.chooseNum(1, Integer.MAX_VALUE))) { (ints: Seq[Int]) =>
validate(Ints(Some(ints))).isValid
}
)
}
property(
"Validator.checkEachIfSome(with error prefix) returns Valid only if sequence is None or all elements of the sequence passes check"
) {
case class Ints(seqOpt: Option[Seq[Int]])
val positiveIntegerValidator = Validator.check[Int](_ > 0, "must be positive integer")
val validate: Validate[Ints] =
Validator.checkEachIfSomeWithErrorPrefix[Ints, Int](
(i: Ints) => i.seqOpt,
positiveIntegerValidator,
(i: Int) => s"intsOpt[$i] ",
isValidIfNone = true
)
Prop.all(
validate(Ints(None)).isValid,
validate(Ints(Some(Seq.empty))).isValid,
forAll(Gen.nonEmptyContainerOf[Seq, Int](Gen.chooseNum(Integer.MIN_VALUE, 0))) { (ints: Seq[Int]) =>
validate(Ints(Some(ints))).errorsCount == ints.size
},
forAll(Gen.nonEmptyContainerOf[Seq, Int](Gen.chooseNum(1, Integer.MAX_VALUE))) { (ints: Seq[Int]) =>
validate(Ints(Some(ints))).isValid
}
)
}
property(
"Validator.checkEachIfSome(with error prefix) returns Valid only if sequence is defined and all elements of the sequence passes check"
) {
case class Ints(seqOpt: Option[Seq[Int]])
val positiveIntegerValidator = Validator.check[Int](_ > 0, "must be positive integer")
val validate: Validate[Ints] =
Validator.checkEachIfSomeWithErrorPrefix[Ints, Int](
(i: Ints) => i.seqOpt,
positiveIntegerValidator,
(i: Int) => s"intsOpt[$i] ",
isValidIfNone = false
)
Prop.all(
validate(Ints(None)).isInvalid,
validate(Ints(Some(Seq.empty))).isValid,
forAll(Gen.nonEmptyContainerOf[Seq, Int](Gen.chooseNum(Integer.MIN_VALUE, 0))) { (ints: Seq[Int]) =>
validate(Ints(Some(ints))).errorsCount == ints.size
},
forAll(Gen.nonEmptyContainerOf[Seq, Int](Gen.chooseNum(1, Integer.MAX_VALUE))) { (ints: Seq[Int]) =>
validate(Ints(Some(ints))).isValid
}
)
}
property(
"Validator.checkIfAllDefined returns Valid only if all of the provided functions returns Some"
) {
case class Bar(a: Option[String], b: Option[Int], c: Option[Boolean], d: Option[Seq[Int]])
val validate = Validator.checkIfAllDefined[Bar](Seq(_.a, _.b, _.c, _.d), "a, b, c, d")
Prop.all(
validate(Bar(None, None, None, None)).isInvalid,
validate(Bar(Some(""), None, None, None)).isInvalid,
validate(Bar(None, Some(0), None, None)).isInvalid,
validate(Bar(None, None, Some(false), None)).isInvalid,
validate(Bar(None, None, None, Some(Seq(1, 2, 3)))).isInvalid,
validate(Bar(None, None, Some(true), Some(Seq(1, 2, 3)))).isInvalid,
validate(Bar(None, Some(-1), Some(true), Some(Seq(1, 2, 3)))).isInvalid,
validate(Bar(Some(""), Some(4), Some(true), Some(Seq(1, 2, 3)))).isValid,
validate(Bar(Some(""), Some(4), Some(true), None)).isInvalid,
validate(Bar(Some(""), Some(4), None, None)).isInvalid,
validate(Bar(Some(""), None, None, None)).isInvalid,
validate(Bar(Some(""), None, None, Some(Seq(1, 2, 3)))).isInvalid,
validate(Bar(None, Some(4), Some(true), None)).isInvalid
)
}
property(
"Validator.checkIfAllEmpty returns Valid only if all of the provided functions returns None"
) {
case class Bar(a: Option[String], b: Option[Int], c: Option[Boolean], d: Option[Seq[Int]])
val validate = Validator.checkIfAllEmpty[Bar](Seq(_.a, _.b, _.c, _.d), "a, b, c, d")
Prop.all(
validate(Bar(None, None, None, None)).isValid,
validate(Bar(Some(""), None, None, None)).isInvalid,
validate(Bar(None, Some(0), None, None)).isInvalid,
validate(Bar(None, None, Some(false), None)).isInvalid,
validate(Bar(None, None, None, Some(Seq(1, 2, 3)))).isInvalid,
validate(Bar(None, None, Some(true), Some(Seq(1, 2, 3)))).isInvalid,
validate(Bar(None, Some(-1), Some(true), Some(Seq(1, 2, 3)))).isInvalid,
validate(Bar(Some(""), Some(4), Some(true), Some(Seq(1, 2, 3)))).isInvalid,
validate(Bar(Some(""), Some(4), Some(true), None)).isInvalid,
validate(Bar(Some(""), Some(4), None, None)).isInvalid,
validate(Bar(Some(""), None, None, None)).isInvalid,
validate(Bar(Some(""), None, None, Some(Seq(1, 2, 3)))).isInvalid,
validate(Bar(None, Some(4), Some(true), None)).isInvalid
)
}
property(
"Validator.checkIfAllOrNoneDefined returns Valid only if all of the provided functions returns None"
) {
case class Bar(a: Option[String], b: Option[Int], c: Option[Boolean], d: Option[Seq[Int]])
val validate = Validator.checkIfAllOrNoneDefined[Bar](Seq(_.a, _.b, _.c, _.d), "a, b, c, d")
Prop.all(
validate(Bar(None, None, None, None)).isValid,
validate(Bar(Some(""), None, None, None)).isInvalid,
validate(Bar(None, Some(0), None, None)).isInvalid,
validate(Bar(None, None, Some(false), None)).isInvalid,
validate(Bar(None, None, None, Some(Seq(1, 2, 3)))).isInvalid,
validate(Bar(None, None, Some(true), Some(Seq(1, 2, 3)))).isInvalid,
validate(Bar(None, Some(-1), Some(true), Some(Seq(1, 2, 3)))).isInvalid,
validate(Bar(Some(""), Some(4), Some(true), Some(Seq(1, 2, 3)))).isValid,
validate(Bar(Some(""), Some(4), Some(true), None)).isInvalid,
validate(Bar(Some(""), Some(4), None, None)).isInvalid,
validate(Bar(Some(""), None, None, None)).isInvalid,
validate(Bar(Some(""), None, None, Some(Seq(1, 2, 3)))).isInvalid,
validate(Bar(None, Some(4), Some(true), None)).isInvalid
)
}
property(
"Validator.checkIfAtLeastOneIsDefined returns Valid only if at least one of the provided functions returns Some"
) {
case class Bar(a: Option[String], b: Option[Int], c: Option[Boolean], d: Option[Seq[Int]])
val validate = Validator.checkIfAtLeastOneIsDefined[Bar](Seq(_.a, _.b, _.c, _.d), "a, b, c, d")
Prop.all(
validate(Bar(None, None, None, None)).isInvalid,
validate(Bar(Some(""), None, None, None)).isValid,
validate(Bar(None, Some(0), None, None)).isValid,
validate(Bar(None, None, Some(false), None)).isValid,
validate(Bar(None, None, None, Some(Seq(1, 2, 3)))).isValid,
validate(Bar(None, None, Some(true), Some(Seq(1, 2, 3)))).isValid,
validate(Bar(None, Some(-1), Some(true), Some(Seq(1, 2, 3)))).isValid,
validate(Bar(Some(""), Some(4), Some(true), Some(Seq(1, 2, 3)))).isValid,
validate(Bar(Some(""), Some(4), Some(true), None)).isValid,
validate(Bar(Some(""), Some(4), None, None)).isValid,
validate(Bar(Some(""), None, None, None)).isValid,
validate(Bar(Some(""), None, None, Some(Seq(1, 2, 3)))).isValid,
validate(Bar(None, Some(4), Some(true), None)).isValid
)
}
property(
"Validator.checkIfAtMostOneIsDefined returns Valid only if at most one of the provided functions returns Some"
) {
case class Bar(a: Option[String], b: Option[Int], c: Option[Boolean], d: Option[Seq[Int]])
val validate = Validator.checkIfAtMostOneIsDefined[Bar](Seq(_.a, _.b, _.c, _.d), "a, b, c, d")
Prop.all(
validate(Bar(None, None, None, None)).isValid,
validate(Bar(Some(""), None, None, None)).isValid,
validate(Bar(None, Some(0), None, None)).isValid,
validate(Bar(None, None, Some(false), None)).isValid,
validate(Bar(None, None, None, Some(Seq(1, 2, 3)))).isValid,
validate(Bar(None, None, Some(true), Some(Seq(1, 2, 3)))).isInvalid,
validate(Bar(None, Some(-1), Some(true), Some(Seq(1, 2, 3)))).isInvalid,
validate(Bar(Some(""), Some(4), Some(true), Some(Seq(1, 2, 3)))).isInvalid,
validate(Bar(Some(""), Some(4), Some(true), None)).isInvalid,
validate(Bar(Some(""), Some(4), None, None)).isInvalid,
validate(Bar(Some(""), None, None, Some(Seq(1, 2, 3)))).isInvalid,
validate(Bar(None, Some(4), Some(true), None)).isInvalid
)
}
property(
"Validator.checkIfOnlyOneIsDefined returns Valid if only one of the provided functions returns Some"
) {
case class Bar(a: Option[String], b: Option[Int], c: Option[Boolean], d: Option[Seq[Int]])
val validate = Validator.checkIfOnlyOneIsDefined[Bar](Seq(_.a, _.b, _.c, _.d), "a, b, c, d")
Prop.all(
validate(Bar(None, None, None, None)).isInvalid,
validate(Bar(Some(""), None, None, None)).isValid,
validate(Bar(None, Some(0), None, None)).isValid,
validate(Bar(None, None, Some(false), None)).isValid,
validate(Bar(None, None, None, Some(Seq(1, 2, 3)))).isValid,
validate(Bar(None, None, Some(true), Some(Seq(1, 2, 3)))).isInvalid,
validate(Bar(None, Some(-1), Some(true), Some(Seq(1, 2, 3)))).isInvalid,
validate(Bar(Some(""), Some(4), Some(true), Some(Seq(1, 2, 3)))).isInvalid,
validate(Bar(Some(""), Some(4), Some(true), None)).isInvalid,
validate(Bar(Some(""), Some(4), None, None)).isInvalid,
validate(Bar(Some(""), None, None, Some(Seq(1, 2, 3)))).isInvalid,
validate(Bar(None, Some(4), Some(true), None)).isInvalid
)
}
property(
"Validator.checkIfOnlyOneSetIsDefined returns Valid if only one of the provided set of functions have all results defined"
) {
case class Bar(a: Option[String], b: Option[Int], c: Option[Boolean], d: Option[Seq[Int]])
val validate = Validator.checkIfOnlyOneSetIsDefined[Bar](Seq(Set(_.a, _.b, _.c, _.d)), "a and b and c and d")
Prop.all(
validate(Bar(None, None, None, None)).isInvalid,
validate(Bar(Some(""), None, None, None)).isInvalid,
validate(Bar(None, Some(0), None, None)).isInvalid,
validate(Bar(None, None, Some(false), None)).isInvalid,
validate(Bar(None, None, None, Some(Seq(1, 2, 3)))).isInvalid,
validate(Bar(None, None, Some(true), Some(Seq(1, 2, 3)))).isInvalid,
validate(Bar(None, Some(-1), Some(true), Some(Seq(1, 2, 3)))).isInvalid,
validate(Bar(Some(""), Some(4), Some(true), Some(Seq(1, 2, 3)))).isValid,
validate(Bar(Some(""), Some(4), Some(true), None)).isInvalid,
validate(Bar(Some(""), Some(4), None, None)).isInvalid,
validate(Bar(Some(""), None, None, Some(Seq(1, 2, 3)))).isInvalid,
validate(Bar(None, Some(4), Some(true), None)).isInvalid
)
}
property(
"Validator.checkIfOnlyOneSetIsDefined returns Valid if only one of the provided set of functions have all results defined"
) {
case class Bar(a: Option[String], b: Option[Int], c: Option[Boolean], d: Option[Seq[Int]])
val validate = Validator.checkIfOnlyOneSetIsDefined[Bar](
Seq(
Set(_.c, _.d),
Set(_.a, _.b)
),
"(a and b) or (c and d)"
)
Prop.all(
validate(Bar(None, None, None, None)).isInvalid,
validate(Bar(Some(""), None, None, None)).isInvalid,
validate(Bar(None, Some(0), None, None)).isInvalid,
validate(Bar(None, None, Some(false), None)).isInvalid,
validate(Bar(None, None, None, Some(Seq(1, 2, 3)))).isInvalid,
validate(Bar(None, None, Some(true), Some(Seq(1, 2, 3)))).isValid,
validate(Bar(None, Some(-1), Some(true), Some(Seq(1, 2, 3)))).isValid,
validate(Bar(Some(""), Some(4), Some(true), Some(Seq(1, 2, 3)))).isInvalid,
validate(Bar(Some(""), Some(4), Some(true), None)).isValid,
validate(Bar(Some(""), Some(4), None, None)).isValid,
validate(Bar(Some(""), None, None, Some(Seq(1, 2, 3)))).isInvalid,
validate(Bar(None, Some(4), Some(true), None)).isInvalid
)
}
property(
"Validator.checkIfAllTrue returns Valid only if all of the provided functions return true"
) {
case class Bar(a: Option[String], b: Option[Int], c: Option[Boolean], d: Option[Seq[Int]])
val validate = Validator
.checkIfAllTrue[Bar](
Seq(_.a.isDefined, _.b.exists(_ > 0), _.c.contains(false), _.d.exists(_.size > 1)),
"a must be defined or b must be gt zero or c must contain false or d must be a sequence of at least two elements"
)
Prop.all(
validate(Bar(None, None, None, None)).isInvalid,
validate(Bar(Some(""), None, None, None)).isInvalid,
validate(Bar(None, Some(1), None, None)).isInvalid,
validate(Bar(None, None, Some(false), None)).isInvalid,
validate(Bar(None, None, None, Some(Seq(1, 2)))).isInvalid,
validate(Bar(None, Some(0), None, None)).isInvalid,
validate(Bar(None, None, Some(true), None)).isInvalid,
validate(Bar(None, None, None, Some(Seq(1)))).isInvalid,
validate(Bar(None, None, Some(true), Some(Seq(1, 2, 3)))).isInvalid,
validate(Bar(None, Some(0), Some(false), Some(Seq(1)))).isInvalid,
validate(Bar(None, Some(0), Some(true), Some(Seq(1)))).isInvalid,
validate(Bar(Some(""), Some(-1), Some(true), Some(Seq.empty))).isInvalid,
validate(Bar(Some(""), Some(0), Some(true), None)).isInvalid,
validate(Bar(Some(""), Some(0), None, None)).isInvalid,
validate(Bar(Some(""), None, None, None)).isInvalid,
validate(Bar(Some(""), None, None, Some(Seq(1)))).isInvalid,
validate(Bar(None, Some(1), Some(true), None)).isInvalid,
validate(Bar(None, Some(0), Some(true), None)).isInvalid,
validate(Bar(None, Some(0), Some(false), None)).isInvalid,
validate(Bar(Some(""), Some(1), Some(false), Some(Seq(1, 2)))).isValid
)
}
property(
"Validator.checkIfAllFalse returns Valid only if all of the provided functions return true"
) {
case class Bar(a: Option[String], b: Option[Int], c: Option[Boolean], d: Option[Seq[Int]])
val validate = Validator
.checkIfAllFalse[Bar](
Seq(_.a.isDefined, _.b.exists(_ > 0), _.c.contains(false), _.d.exists(_.size > 1)),
"a must be defined or b must be gt zero or c must contain false or d must be a sequence of at least two elements"
)
.debug
Prop.all(
validate(Bar(None, None, None, None)).isValid,
validate(Bar(Some(""), None, None, None)).isInvalid,
validate(Bar(None, Some(1), None, None)).isInvalid,
validate(Bar(None, None, Some(false), None)).isInvalid,
validate(Bar(None, None, None, Some(Seq(1, 2)))).isInvalid,
validate(Bar(None, Some(0), None, None)).isValid,
validate(Bar(None, None, Some(true), None)).isValid,
validate(Bar(None, None, None, Some(Seq(1)))).isValid,
validate(Bar(None, None, Some(true), Some(Seq(1, 2, 3)))).isInvalid,
validate(Bar(None, Some(0), Some(false), Some(Seq(1)))).isInvalid,
validate(Bar(None, Some(0), Some(true), Some(Seq(1)))).isValid,
validate(Bar(Some(""), Some(-1), Some(true), Some(Seq.empty))).isInvalid,
validate(Bar(Some(""), Some(0), Some(true), None)).isInvalid,
validate(Bar(Some(""), Some(0), None, None)).isInvalid,
validate(Bar(Some(""), None, None, None)).isInvalid,
validate(Bar(Some(""), None, None, Some(Seq(1)))).isInvalid,
validate(Bar(None, Some(1), Some(true), None)).isInvalid,
validate(Bar(None, Some(0), Some(true), None)).isValid,
validate(Bar(None, Some(0), Some(false), None)).isInvalid,
validate(Bar(Some(""), Some(1), Some(false), Some(Seq(1, 2)))).isInvalid
)
}
property(
"Validator.checkIfAtLeastOneIsTrue returns Valid if only if at least one of the provided functions return true"
) {
case class Bar(a: Option[String], b: Option[Int], c: Option[Boolean], d: Option[Seq[Int]])
val validate = Validator
.checkIfAtLeastOneIsTrue[Bar](
Seq(_.a.isDefined, _.b.exists(_ > 0), _.c.contains(false), _.d.exists(_.size > 1)),
"a must be defined or b must be gt zero or c must contain false or d must be a sequence of at least two elements"
)
Prop.all(
validate(Bar(None, None, None, None)).isInvalid,
validate(Bar(Some(""), None, None, None)).isValid,
validate(Bar(None, Some(1), None, None)).isValid,
validate(Bar(None, None, Some(false), None)).isValid,
validate(Bar(None, None, None, Some(Seq(1, 2)))).isValid,
validate(Bar(None, Some(0), None, None)).isInvalid,
validate(Bar(None, None, Some(true), None)).isInvalid,
validate(Bar(None, None, None, Some(Seq(1)))).isInvalid,
validate(Bar(None, None, Some(true), Some(Seq(1, 2, 3)))).isValid,
validate(Bar(None, Some(0), Some(false), Some(Seq(1)))).isValid,
validate(Bar(None, Some(0), Some(true), Some(Seq(1)))).isInvalid,
validate(Bar(Some(""), Some(-1), Some(true), Some(Seq.empty))).isValid,
validate(Bar(Some(""), Some(0), Some(true), None)).isValid,
validate(Bar(Some(""), Some(0), None, None)).isValid,
validate(Bar(Some(""), None, None, None)).isValid,
validate(Bar(Some(""), None, None, Some(Seq(1)))).isValid,
validate(Bar(None, Some(1), Some(true), None)).isValid,
validate(Bar(None, Some(0), Some(true), None)).isInvalid,
validate(Bar(None, Some(0), Some(false), None)).isValid
)
}
property(
"Validator.checkIfAtMostOneIsTrue returns Valid only if at most one of the provided functions return true"
) {
case class Bar(a: Option[String], b: Option[Int], c: Option[Boolean], d: Option[Seq[Int]])
val validate = Validator
.checkIfAtMostOneIsTrue[Bar](
Seq(_.a.isDefined, _.b.exists(_ > 0), _.c.contains(false), _.d.exists(_.size > 1)),
"a must be defined or b must be gt zero or c must contain false or d must be a sequence of at least two elements"
)
Prop.all(
validate(Bar(None, None, None, None)).isValid,
validate(Bar(Some(""), None, None, None)).isValid,
validate(Bar(None, Some(1), None, None)).isValid,
validate(Bar(None, None, Some(false), None)).isValid,
validate(Bar(None, None, None, Some(Seq(1, 2)))).isValid,
validate(Bar(None, Some(0), None, None)).isValid,
validate(Bar(None, None, Some(true), None)).isValid,
validate(Bar(None, None, None, Some(Seq(1)))).isValid,
validate(Bar(None, None, Some(true), Some(Seq(1, 2, 3)))).isValid,
validate(Bar(None, Some(0), Some(false), Some(Seq(1)))).isValid,
validate(Bar(None, Some(0), Some(true), Some(Seq(1)))).isValid,
validate(Bar(Some(""), Some(-1), Some(true), Some(Seq.empty))).isValid,
validate(Bar(Some(""), Some(0), Some(true), None)).isValid,
validate(Bar(Some(""), Some(0), None, None)).isValid,
validate(Bar(Some(""), None, None, None)).isValid,
validate(Bar(Some(""), None, None, Some(Seq(1)))).isValid,
validate(Bar(None, Some(1), Some(true), None)).isValid,
validate(Bar(None, Some(0), Some(true), None)).isValid,
validate(Bar(None, Some(1), Some(false), None)).isInvalid,
validate(Bar(Some(""), Some(1), Some(true), None)).isInvalid,
validate(Bar(Some(""), Some(0), Some(true), Some(Seq(1, 2, 3)))).isInvalid,
validate(Bar(Some(""), Some(1), Some(true), Some(Seq(1, 2, 3)))).isInvalid,
validate(Bar(Some(""), Some(1), Some(false), Some(Seq(1, 2, 3)))).isInvalid,
validate(Bar(None, Some(0), Some(false), None)).isValid
)
}
property(
"Validator.checkIfOnlyOneIsTrue returns Valid if only one of the provided functions return true"
) {
case class Bar(a: Option[String], b: Option[Int], c: Option[Boolean], d: Option[Seq[Int]])
val validate = Validator
.checkIfOnlyOneIsTrue[Bar](
Seq(_.a.isDefined, _.b.exists(_ > 0), _.c.contains(false), _.d.exists(_.size > 1)),
"a must be defined or b must be gt zero or c must contain false or d must be a sequence of at least two elements"
)
Prop.all(
validate(Bar(None, None, None, None)).isInvalid,
validate(Bar(Some(""), None, None, None)).isValid,
validate(Bar(None, Some(1), None, None)).isValid,
validate(Bar(None, None, Some(false), None)).isValid,
validate(Bar(None, None, None, Some(Seq(1, 2)))).isValid,
validate(Bar(None, Some(0), None, None)).isInvalid,
validate(Bar(None, None, Some(true), None)).isInvalid,
validate(Bar(None, None, None, Some(Seq(1)))).isInvalid,
validate(Bar(None, None, Some(true), Some(Seq(1, 2, 3)))).isValid,
validate(Bar(None, Some(0), Some(false), Some(Seq(1)))).isValid,
validate(Bar(None, Some(0), Some(true), Some(Seq(1)))).isInvalid,
validate(Bar(Some(""), Some(-1), Some(true), Some(Seq.empty))).isValid,
validate(Bar(Some(""), Some(0), Some(true), None)).isValid,
validate(Bar(Some(""), Some(0), None, None)).isValid,
validate(Bar(Some(""), None, None, None)).isValid,
validate(Bar(Some(""), None, None, Some(Seq(1)))).isValid,
validate(Bar(None, Some(1), Some(true), None)).isValid,
validate(Bar(None, Some(0), Some(true), None)).isInvalid,
validate(Bar(None, Some(0), Some(false), None)).isValid
)
}
property(
"Validator.checkIfOnlyOneSetIsTrue returns Valid if only one of the provided set of functions is all true"
) {
case class Bar(a: Option[String], b: Option[Int], c: Option[Boolean], d: Option[Seq[Int]])
val validate = Validator
.checkIfOnlyOneSetIsTrue[Bar](
Seq(Set(_.a.isDefined, _.b.exists(_ > 0), _.c.contains(false), _.d.exists(_.size > 1))),
"a must be defined or b must be gt zero or c must contain false or d must be a sequence of at least two elements"
)
Prop.all(
validate(Bar(None, None, None, None)).isInvalid,
validate(Bar(Some(""), None, None, None)).isInvalid,
validate(Bar(None, Some(1), None, None)).isInvalid,
validate(Bar(None, None, Some(false), None)).isInvalid,
validate(Bar(None, None, None, Some(Seq(1, 2)))).isInvalid,
validate(Bar(None, Some(0), None, None)).isInvalid,
validate(Bar(None, None, Some(true), None)).isInvalid,
validate(Bar(None, None, None, Some(Seq(1)))).isInvalid,
validate(Bar(None, None, Some(true), Some(Seq(1, 2, 3)))).isInvalid,
validate(Bar(None, Some(0), Some(false), Some(Seq(1)))).isInvalid,
validate(Bar(None, Some(0), Some(true), Some(Seq(1)))).isInvalid,
validate(Bar(Some(""), Some(-1), Some(true), Some(Seq.empty))).isInvalid,
validate(Bar(Some(""), Some(0), Some(true), None)).isInvalid,
validate(Bar(Some(""), Some(0), None, None)).isInvalid,
validate(Bar(Some(""), None, None, None)).isInvalid,
validate(Bar(Some(""), None, None, Some(Seq(1)))).isInvalid,
validate(Bar(None, Some(1), Some(true), None)).isInvalid,
validate(Bar(None, Some(0), Some(true), None)).isInvalid,
validate(Bar(None, Some(0), Some(false), None)).isInvalid,
validate(Bar(Some(""), Some(1), Some(false), Some(Seq(1, 2)))).isValid
)
}
property(
"Validator.checkIfOnlyOneSetIsTrue returns Valid if only one of the provided set of functions is all true"
) {
case class Bar(a: Option[String], b: Option[Int], c: Option[Boolean], d: Option[Seq[Int]])
val validate = Validator
.checkIfOnlyOneSetIsTrue[Bar](
Seq(
Set(_.a.isDefined, _.b.exists(_ > 0)),
Set(_.c.contains(false), _.d.exists(_.size > 1))
),
"a must be defined or b must be gt zero or c must contain false or d must be a sequence of at least two elements"
)
Prop.all(
validate(Bar(None, None, None, None)).isInvalid,
validate(Bar(Some(""), None, None, None)).isInvalid,
validate(Bar(None, Some(1), None, None)).isInvalid,
validate(Bar(None, None, Some(false), None)).isInvalid,
validate(Bar(None, None, None, Some(Seq(1, 2)))).isInvalid,
validate(Bar(None, Some(0), None, None)).isInvalid,
validate(Bar(None, None, Some(true), None)).isInvalid,
validate(Bar(None, None, None, Some(Seq(1)))).isInvalid,
validate(Bar(None, None, Some(true), Some(Seq(1, 2, 3)))).isInvalid,
validate(Bar(None, Some(0), Some(false), Some(Seq(1)))).isInvalid,
validate(Bar(None, Some(0), Some(true), Some(Seq(1)))).isInvalid,
validate(Bar(Some(""), Some(-1), Some(true), Some(Seq.empty))).isInvalid,
validate(Bar(Some(""), Some(0), Some(true), None)).isInvalid,
validate(Bar(Some(""), Some(0), None, None)).isInvalid,
validate(Bar(Some(""), None, None, None)).isInvalid,
validate(Bar(Some(""), None, None, Some(Seq(1)))).isInvalid,
validate(Bar(None, Some(1), Some(true), None)).isInvalid,
validate(Bar(None, Some(0), Some(true), None)).isInvalid,
validate(Bar(None, Some(0), Some(false), None)).isInvalid,
validate(Bar(Some(""), Some(1), Some(false), Some(Seq(1, 2)))).isInvalid,
validate(Bar(Some(""), Some(0), Some(false), Some(Seq(1, 2)))).isValid,
validate(Bar(Some(""), Some(1), Some(true), Some(Seq(1, 2)))).isValid
)
}
}
|
/*
* Copyright 2015 Samsung Electronics All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.oic.simulator.client;
/**
* Enum to represent the supported connectivity types.
*/
public enum SimulatorConnectivityType {
SIMULATOR_CT_DEFAULT(0),
/** IPv4 and IPv6, including 6LoWPAN. */
SIMULATOR_CT_ADAPTER_IP(1 << 16),
/** GATT over Bluetooth LE. */
SIMULATOR_CT_ADAPTER_GATT_BTLE(1 << 17),
/** RFCOMM over Bluetooth EDR. */
SIMULATOR_CT_ADAPTER_RFCOMM_BTEDR(1 << 18),
/** Remote Access over XMPP. */
SIMULATOR_CT_ADAPTER_REMOTE_ACCESS(1 << 19),
/** Insecure transport is the default (subject to change). */
/** secure the transport path. */
SIMULATOR_CT_FLAG_SECURE(1 << 4),
/** IPv4 & IPv6 autoselection is the default. */
/** IP adapter only. */
SIMULATOR_CT_IP_USE_V6(1 << 5),
/** IP adapter only. */
SIMULATOR_CT_IP_USE_V4(1 << 6),
/**
* Link-Local multicast is the default multicast scope for IPv6. These are
* placed here to correspond to the IPv6 address bits.
*/
/** IPv6 Interface-Local scope(loopback). */
SIMULATOR_CT_SCOPE_INTERFACE(0x1),
/** IPv6 Link-Local scope (default). */
SIMULATOR_CT_SCOPE_LINK(0x2),
/** IPv6 Realm-Local scope. */
SIMULATOR_CT_SCOPE_REALM(0x3),
/** IPv6 Admin-Local scope. */
SIMULATOR_CT_SCOPE_ADMIN(0x4),
/** IPv6 Site-Local scope. */
SIMULATOR_CT_SCOPE_SITE(0x5),
/** IPv6 Organization-Local scope. */
SIMULATOR_CT_SCOPE_ORG(0x8),
/** IPv6 Global scope. */
SIMULATOR_CT_SCOPE_GLOBAL(0xE);
private int value;
private SimulatorConnectivityType(int value) {
this.value = value;
}
public int getValue() {
return this.value;
}
/**
* Method to get the {@link SimulatorConnectivityType} from an integer
* value.
*
* @param value
* Integral value of {@link SimulatorConnectivityType}.
* @return {@link SimulatorConnectivityType} corresponding to the given
* value.
*/
public static SimulatorConnectivityType getConnectivityType(int value) {
SimulatorConnectivityType result = null;
SimulatorConnectivityType[] types = SimulatorConnectivityType.values();
for (SimulatorConnectivityType type : types) {
if (type.getValue() == value) {
result = type;
break;
}
}
return result;
}
} |
#!/bin/bash
. /opt/farm/scripts/init
. /opt/farm/scripts/functions.install
set_sshd_option() {
file=$1
key=$2
value=$3
if ! grep -q ^$key $file; then
echo >>$file
echo "$key $value" >>$file
elif [ "$OSTYPE" = "freebsd" ] || [ "$OSVER" = "netbsd-6" ]; then
sed -e "s/^\($key\)[ ].*/\\1 $value/" $file >$file.$$
cat $file.$$ >$file
else
sed -i -e "s/^\($key\)[ ].*/\\1 $value/" $file
fi
}
if [ "$HWTYPE" = "oem" ]; then
echo "skipping secure sshd setup on oem platform"
exit 0
fi
echo "setting up secure sshd configuration"
chmod 0700 /root/.ssh
file="/etc/ssh/sshd_config"
oldmd5=`md5sum $file`
save_original_config $file
set_sshd_option $file Protocol 2
set_sshd_option $file MaxAuthTries 1
set_sshd_option $file LoginGraceTime 60
set_sshd_option $file ClientAliveCountMax 2
set_sshd_option $file ClientAliveInterval 60
set_sshd_option $file HostbasedAuthentication no
set_sshd_option $file PubkeyAuthentication yes
set_sshd_option $file PermitEmptyPasswords no
set_sshd_option $file PermitRootLogin without-password
set_sshd_option $file StrictModes yes
set_sshd_option $file UseDNS no
set_sshd_option $file Compression no
set_sshd_option $file X11Forwarding no
set_sshd_option $file TCPKeepAlive no
set_sshd_option $file LogLevel INFO
if grep -qFx $OSVER /opt/farm/ext/secure-sshd/config/nosandbox.conf; then
set_sshd_option $file UsePrivilegeSeparation yes
elif ! grep -qFx $OSVER /opt/farm/ext/secure-sshd/config/noseparation.conf; then
if [ "$OSTYPE" != "debian" ] && [ "$OSTYPE" != "redhat" ] && [ "$OSTYPE" != "netbsd" ]; then
set_sshd_option $file UsePrivilegeSeparation yes
else
set_sshd_option $file UsePrivilegeSeparation sandbox
fi
fi
if ! grep -qFx $OSVER /opt/farm/ext/secure-sshd/config/nomaxsessions.conf; then
set_sshd_option $file MaxSessions 2
fi
if [ "$USE_PASSWORD_AUTHENTICATION" = "disable" ]; then
set_sshd_option $file PasswordAuthentication no
elif [ "$USE_PASSWORD_AUTHENTICATION" = "enable" ]; then
set_sshd_option $file PasswordAuthentication yes
fi
newmd5=`md5sum $file`
if [ "$oldmd5" = "$newmd5" ]; then
echo "skipping sshd restart, configuration has not changed"
else
case "$OSTYPE" in
debian)
service ssh reload
;;
redhat | suse)
service sshd reload
;;
freebsd | netbsd)
/etc/rc.d/sshd restart
;;
*)
;;
esac
fi
|
#!/bin/sh
RESOURCE_GROUP_NAME="devopstamops-rg"
STORAGE_ACCOUNT_NAME="devopstamopssa"
# Create Resource Group
az group create -l uksouth -n $RESOURCE_GROUP_NAME
# Create Storage Account
az storage account create -n $STORAGE_ACCOUNT_NAME -g $RESOURCE_GROUP_NAME -l uksouth --sku Standard_LRS
# Create Storage Account blob
az storage container create --name tfstate --account-name $STORAGE_ACCOUNT_NAME |
<gh_stars>1-10
import {
handleActions
} from 'redux-actions';
import {
addNumber, orginalAddNumber
} from '../Actions/Second.js';
const initState = {
testNumber: 1
}
const secondState = handleActions(
new Map([
// [
// addNumber,
// (state, action) => console.log(action.meta)
// || ({ testNumber: state.testNumber + action.payload })
// ],
[
orginalAddNumber,
(state, action) => ({ testNumber: state.testNumber + action.payload })
]
]),
initState
)
console.log(orginalAddNumber);
export default secondState |
'use strict';
const Joi = require('joi');
Joi.objectId = require('joi-objectid')(Joi);
const {
create,
update,
list,
listWithFirstIntervention,
listWithSubscriptions,
listBySector,
listWithBilledEvents,
listWithIntervention,
show,
remove,
addSubscription,
updateSubscription,
deleteSubscription,
getMandates,
updateMandate,
getCustomerQuotes,
createCustomerQuote,
uploadFile,
deleteCertificates,
getMandateSignatureRequest,
saveSignedMandate,
createHistorySubscription,
createFunding,
updateFunding,
deleteFunding,
getQRCode,
} = require('../controllers/customerController');
const { FUNDING_FREQUENCIES, FUNDING_NATURES, SITUATION_OPTIONS, STOP_REASONS } = require('../models/Customer');
const {
authorizeCustomerDelete,
authorizeCustomerUpdate,
authorizeFundingDeletion,
authorizeCustomerGet,
authorizeCustomerGetBySector,
authorizeSubscriptionCreation,
authorizeSubscriptionUpdate,
authorizeSubscriptionDeletion,
} = require('./preHandlers/customers');
const { CIVILITY_OPTIONS } = require('../models/schemaDefinitions/identity');
const { addressValidation, objectIdOrArray, phoneNumberValidation, formDataPayload } = require('./validations/utils');
const { fundingValidation } = require('./validations/customer');
exports.plugin = {
name: 'routes-customers',
register: async (server) => {
server.route({
method: 'POST',
path: '/',
options: {
auth: { scope: ['customers:create'] },
validate: {
payload: Joi.object().keys({
identity: Joi.object().keys({
title: Joi.string().valid(...CIVILITY_OPTIONS).required(),
firstname: Joi.string().allow(null, ''),
lastname: Joi.string().required(),
}).required(),
contact: Joi.object().keys({ primaryAddress: addressValidation.required() }).required(),
}),
},
},
handler: create,
});
server.route({
method: 'PUT',
path: '/{_id}',
options: {
auth: { scope: ['customers:edit', 'customer-{params._id}'] },
validate: {
params: Joi.object({ _id: Joi.objectId().required() }),
payload: Joi.object().keys({
referent: Joi.objectId().allow(null, ''),
identity: Joi.object().keys({
title: Joi.string().valid(...CIVILITY_OPTIONS),
firstname: Joi.string().allow('', null),
lastname: Joi.string(),
birthDate: Joi.date().allow(''),
}).min(1),
email: Joi.string().email(),
contact: Joi.object().keys({
phone: phoneNumberValidation.allow('', null),
primaryAddress: addressValidation,
secondaryAddress: Joi.alternatives().try(addressValidation, {}),
accessCodes: Joi.string().allow('', null),
others: Joi.string().allow('', null),
}).min(1),
followUp: Joi.object().keys({
situation: Joi.string().valid(...SITUATION_OPTIONS),
environment: Joi.string().allow('', null),
objectives: Joi.string().allow('', null),
misc: Joi.string().allow('', null),
}).min(1),
payment: Joi.object().keys({
bankAccountOwner: Joi.string(),
iban: Joi.string(),
bic: Joi.string(),
}).min(1),
stoppedAt: Joi.date(),
archivedAt: Joi.date(),
stopReason: Joi.string().valid(...STOP_REASONS),
})
.and('stoppedAt', 'stopReason')
.oxor('stoppedAt', 'archivedAt'),
},
pre: [{ method: authorizeCustomerUpdate }],
},
handler: update,
});
server.route({
method: 'GET',
path: '/',
options: {
auth: { scope: ['customers:read'] },
validate: { query: Joi.object({ archived: Joi.boolean() }) },
},
handler: list,
});
server.route({
method: 'GET',
path: '/first-intervention',
options: {
auth: { scope: ['customers:read'] },
},
handler: listWithFirstIntervention,
});
server.route({
method: 'GET',
path: '/subscriptions',
options: {
auth: { scope: ['customers:read'] },
},
handler: listWithSubscriptions,
});
server.route({
method: 'GET',
path: '/sectors',
options: {
auth: { scope: ['customers:read'] },
validate: {
query: Joi.object().keys({ sector: objectIdOrArray, startDate: Joi.date(), endDate: Joi.date() }),
},
pre: [{ method: authorizeCustomerGetBySector }],
},
handler: listBySector,
});
server.route({
method: 'GET',
path: '/billed-events',
options: {
auth: { scope: ['customers:administrative:edit'] },
},
handler: listWithBilledEvents,
});
server.route({
method: 'GET',
path: '/with-intervention',
options: {
auth: { scope: ['events:read'] },
},
handler: listWithIntervention,
});
server.route({
method: 'GET',
path: '/{_id}',
options: {
auth: { scope: ['customers:read', 'customer-{params._id}'] },
validate: {
params: Joi.object({ _id: Joi.objectId().required() }),
},
pre: [{ method: authorizeCustomerGet }],
},
handler: show,
});
server.route({
method: 'DELETE',
path: '/{_id}',
options: {
auth: { scope: ['customers:create'] },
validate: {
params: Joi.object({ _id: Joi.objectId().required() }),
},
pre: [{ method: authorizeCustomerDelete }],
},
handler: remove,
});
server.route({
method: 'GET',
path: '/{_id}/qrcode',
options: {
auth: { scope: ['customers:read', 'customer-{params._id}'] },
validate: {
params: Joi.object({ _id: Joi.objectId().required() }),
},
pre: [{ method: authorizeCustomerGet }],
},
handler: getQRCode,
});
server.route({
method: 'POST',
path: '/{_id}/subscriptions',
options: {
auth: { scope: ['customers:administrative:edit'] },
validate: {
params: Joi.object({ _id: Joi.objectId().required() }),
payload: Joi.object({
service: Joi.string().required(),
versions: Joi.array().items(Joi.object({
unitTTCRate: Joi.number().min(0).required(),
weeklyHours: Joi.number().min(0),
weeklyCount: Joi.number().integer().min(0),
evenings: Joi.number().min(0),
saturdays: Joi.number().min(0),
sundays: Joi.number().min(0),
}).or('weeklyHours', 'weeklyCount')),
}),
},
pre: [{ method: authorizeSubscriptionCreation }],
},
handler: addSubscription,
});
server.route({
method: 'PUT',
path: '/{_id}/subscriptions/{subscriptionId}',
options: {
auth: { scope: ['customers:administrative:edit'] },
validate: {
params: Joi.object({ _id: Joi.objectId().required(), subscriptionId: Joi.objectId().required() }),
payload: Joi.object({
unitTTCRate: Joi.number().min(0).required(),
weeklyHours: Joi.number().min(0),
weeklyCount: Joi.number().integer().min(0),
evenings: Joi.number().min(0),
saturdays: Joi.number().min(0),
sundays: Joi.number().min(0),
}).or('weeklyHours', 'weeklyCount'),
},
pre: [{ method: authorizeSubscriptionUpdate }],
},
handler: updateSubscription,
});
server.route({
method: 'DELETE',
path: '/{_id}/subscriptions/{subscriptionId}',
options: {
auth: { scope: ['customers:administrative:edit'] },
validate: {
params: Joi.object({ _id: Joi.objectId().required(), subscriptionId: Joi.objectId().required() }),
},
pre: [{ method: authorizeSubscriptionDeletion }],
},
handler: deleteSubscription,
});
server.route({
method: 'GET',
path: '/{_id}/mandates',
options: {
auth: { scope: ['customers:administrative:edit'] },
validate: {
params: Joi.object({ _id: Joi.objectId().required() }),
},
pre: [{ method: authorizeCustomerGet }],
},
handler: getMandates,
});
server.route({
method: 'PUT',
path: '/{_id}/mandates/{mandateId}',
options: {
auth: { scope: ['customers:administrative:edit'] },
validate: {
params: Joi.object({ _id: Joi.objectId().required(), mandateId: Joi.objectId().required() }),
},
pre: [{ method: authorizeCustomerUpdate }],
},
handler: updateMandate,
});
server.route({
method: 'POST',
path: '/{_id}/mandates/{mandateId}/esign',
options: {
auth: { scope: ['customer-{params._id}'] },
validate: {
params: Joi.object({ _id: Joi.objectId().required(), mandateId: Joi.objectId().required() }),
payload: Joi.object({
fileId: Joi.string().required(),
customer: Joi.object().keys({
name: Joi.string().required(),
email: Joi.string().email().required(),
}).required(),
fields: Joi.object().required(),
redirect: Joi.string(),
redirectDecline: Joi.string(),
}),
},
pre: [{ method: authorizeCustomerUpdate }],
},
handler: getMandateSignatureRequest,
});
server.route({
method: 'GET',
path: '/{_id}/quotes',
options: {
auth: { scope: ['customers:administrative:edit'] },
validate: {
params: Joi.object({ _id: Joi.objectId().required() }),
},
pre: [{ method: authorizeCustomerGet }],
},
handler: getCustomerQuotes,
});
server.route({
method: 'POST',
path: '/{_id}/quotes',
options: {
auth: { scope: ['customers:administrative:edit'] },
validate: {
params: Joi.object({ _id: Joi.objectId().required() }),
payload: Joi.object().keys({
subscriptions: Joi.array().items(Joi.object().keys({
service: Joi.object().keys({
name: Joi.string().required(),
nature: Joi.string().required(),
surcharge: Joi.object().keys({ evening: Joi.number(), sunday: Joi.number() }),
}).required(),
unitTTCRate: Joi.number().min(0),
weeklyHours: Joi.number().min(0),
weeklyCount: Joi.number().integer().min(0),
evenings: Joi.number().min(0),
saturdays: Joi.number().min(0),
sundays: Joi.number().min(0),
billingItemsTTCRate: Joi.number().min(0),
serviceBillingItems: Joi.array().items(Joi.string()),
}).or('weeklyHours', 'weeklyCount')).required(),
}),
},
pre: [{ method: authorizeCustomerUpdate }],
},
handler: createCustomerQuote,
});
server.route({
method: 'POST',
path: '/{_id}/gdrive/{driveId}/upload',
handler: uploadFile,
options: {
auth: { scope: ['customers:administrative:edit', 'customer-{params._id}'] },
payload: formDataPayload(),
validate: {
params: Joi.object({ _id: Joi.objectId().required(), driveId: Joi.string().required() }),
payload: Joi.object({
fileName: Joi.string().required(),
file: Joi.any().required(),
type: Joi.string().valid('signedQuote', 'signedMandate', 'financialCertificates').required(),
quoteId: Joi.string().when('type', { is: 'signedQuote', then: Joi.required(), otherwise: Joi.forbidden() }),
mandateId: Joi.string().when(
'type',
{ is: 'signedMandate', then: Joi.required(), otherwise: Joi.forbidden() }
),
}),
},
pre: [{ method: authorizeCustomerUpdate }],
},
});
server.route({
method: 'PUT',
path: '/{_id}/certificates',
options: {
auth: { scope: ['customers:administrative:edit', 'customer-{params._id}'] },
validate: {
params: Joi.object({ _id: Joi.objectId().required() }),
payload: Joi.object().keys({ driveId: Joi.string().required() }),
},
pre: [{ method: authorizeCustomerUpdate }],
},
handler: deleteCertificates,
});
server.route({
method: 'POST',
path: '/{_id}/mandates/{mandateId}/savesigneddoc',
options: {
auth: { scope: ['customer-{params._id}'] },
validate: {
params: Joi.object({ _id: Joi.objectId().required(), mandateId: Joi.objectId().required() }),
},
pre: [{ method: authorizeCustomerUpdate }],
},
handler: saveSignedMandate,
});
server.route({
method: 'POST',
path: '/{_id}/subscriptionshistory',
options: {
auth: { scope: ['customer-{params._id}'] },
validate: {
params: Joi.object({ _id: Joi.objectId().required() }),
payload: Joi.object().keys({
subscriptions: Joi.array().items(Joi.object().keys({
subscriptionId: Joi.objectId().required(),
service: Joi.string().required(),
unitTTCRate: Joi.number().min(0).required(),
weeklyCount: Joi.number().min(0).integer().required(),
startDate: Joi.date(),
weeklyHours: Joi.number().min(0),
evenings: Joi.number().min(0),
saturdays: Joi.number().min(0),
sundays: Joi.number().min(0),
}).or('weeklyCount', 'weeklyHours')).required(),
helper: Joi.object().keys({
firstname: Joi.string().allow(null, ''),
lastname: Joi.string(),
title: Joi.string().allow(null, ''),
}).required(),
}),
},
pre: [{ method: authorizeCustomerUpdate }],
},
handler: createHistorySubscription,
});
server.route({
method: 'POST',
path: '/{_id}/fundings',
options: {
auth: { scope: ['customers:administrative:edit'] },
validate: {
params: Joi.object({ _id: Joi.objectId().required() }),
payload: Joi.object().keys({
nature: Joi.string().valid(...FUNDING_NATURES).required(),
thirdPartyPayer: Joi.objectId().required(),
subscription: Joi.objectId().required(),
frequency: Joi.string().valid(...FUNDING_FREQUENCIES).required(),
versions: Joi.array().items(Joi.object().keys({ ...fundingValidation })),
}),
},
pre: [{ method: authorizeCustomerUpdate }],
},
handler: createFunding,
});
server.route({
method: 'PUT',
path: '/{_id}/fundings/{fundingId}',
options: {
auth: { scope: ['customers:administrative:edit'] },
validate: {
params: Joi.object({ _id: Joi.objectId().required(), fundingId: Joi.objectId().required() }),
payload: Joi.object().keys({
subscription: Joi.objectId().required(),
...fundingValidation,
}),
},
pre: [{ method: authorizeCustomerUpdate }],
},
handler: updateFunding,
});
server.route({
method: 'DELETE',
path: '/{_id}/fundings/{fundingId}',
options: {
auth: { scope: ['customers:administrative:edit'] },
validate: {
params: Joi.object({ _id: Joi.objectId().required(), fundingId: Joi.objectId().required() }),
},
pre: [{ method: authorizeCustomerUpdate }, { method: authorizeFundingDeletion }],
},
handler: deleteFunding,
});
},
};
|
<gh_stars>0
module MultiSessionStore
class Railtie < Rails::Railtie
config.multi_session_store = ActiveSupport::OrderedOptions.new
initializer 'multi_session_store.add_middleware' do |app|
app.config.middleware.insert_before ActionDispatch::Session::MultiSessionStore,
MultiSessionStore::SubsessionGeneratorMiddleware,
exclude_paths: app.config.multi_session_store.exclude_paths
end
config.to_prepare do
ApplicationController.prepend MultiSessionStore::DefaultUrlOptions unless ApplicationController.ancestors.include? MultiSessionStore::DefaultUrlOptions
end
end
end
|
package com.createchance.imageeditor.transitions;
import com.createchance.imageeditor.drawers.StereoViewerTransDrawer;
/**
* Stereo viewer transition.
*
* @author createchance
* @date 2019/1/1
*/
public class StereoViewerTransition extends AbstractTransition {
private static final String TAG = "StereoViewerTransition";
private float mZoom = 0.88f;
private float mCornerRadius = 0.22f;
public StereoViewerTransition() {
super(StereoViewerTransition.class.getSimpleName(), TRANS_STEREO_VIEWER);
}
@Override
protected void getDrawer() {
mDrawer = new StereoViewerTransDrawer();
}
@Override
protected void setDrawerParams() {
super.setDrawerParams();
((StereoViewerTransDrawer) mDrawer).setZoom(mZoom);
((StereoViewerTransDrawer) mDrawer).setCornerRadius(mCornerRadius);
}
}
|
echo "Please enter a resource prefix (to ensure unique FQDNs)"
read prefix
# this demo is based on the exercise from DP-200 course Module 6
# https://github.com/MicrosoftLearning/DP-200-Implementing-an-Azure-Data-Solution/blob/master/instructions/dp-200-06_instructions.md
myLocation=westeurope
myResourceGroup="demos-az204-module10-rg"
storageName="${prefix}az204mod10capturestor"
containerName="capture"
ehnsName="${prefix}-module10-ehns"
ehName="phoneanalysis"
ehSapName="phoneanalysis-sap"
#create resource group
az group create -n $myResourceGroup -l $myLocation
# create a storage to dump capture in later
storage=$(az storage account create -g $myResourceGroup -n $storageName)
storageConnString=$(az storage account show-connection-string \
-g $myResourceGroup \
-n $storageName \
--query "connectionString" -o tsv)
# create event hubs namespace
#sku is standard by default, so we can test capture functionality
ehns=$(az eventhubs namespace create -n $ehnsName -g $myResourceGroup)
# create event hub
eh=$(az eventhubs eventhub create \
-g $myResourceGroup \
--namespace-name $ehnsName \
-n $ehName)
# create sap
sap=$(az eventhubs eventhub authorization-rule create \
-g $myResourceGroup \
--namespace-name $ehnsName \
--eventhub-name $ehName \
-n $ehSapName \
--rights Send Listen)
# connection string
ehConnString=$(az eventhubs eventhub authorization-rule keys list \
-g $myResourceGroup \
-n $ehSapName \
--eventhub-name $ehName \
--namespace-name $ehnsName \
-o tsv --query 'primaryConnectionString')
# set the connection string value in the telcodatagen.config.exe in the datagenerator subfolder
# then do a test: run telcodatagen.exe with the following parameters:
# 1000 0.2 2 (1000 messages per hour, 20% simulated to be fraudulent calls, run for 2 hours)
# you can now use the WPF app to look at the events coming in,
# or you can follow the rest of the DP-200 module 6 exercise to use a stream analytics job |
<gh_stars>0
package main
import (
"context"
"flag"
"io"
"log"
"net"
"net/http"
"os"
"os/signal"
"syscall"
"time"
)
func main() {
if err := run(); err != nil {
log.Fatalf("error: %v", err)
}
}
func run() error {
var socket string
flag.StringVar(&socket, "socket", "ping.sock", "Ping (receiver) socket")
flag.Parse()
var dialer net.Dialer
client := http.Client{
Transport: &http.Transport{
DialContext: func(ctx context.Context, network, addr string) (net.Conn, error) {
return dialer.DialContext(ctx, "unix", socket)
},
},
}
signalCh := make(chan os.Signal, 1)
signal.Notify(signalCh, syscall.SIGINT, syscall.SIGTERM)
for {
if err := ping(&client); err != nil {
log.Printf("ping: %v", err)
}
select {
case <-signalCh:
return nil
case <-time.After(1 * time.Second):
// Send next ping.
}
}
}
func ping(client *http.Client) error {
req, err := http.NewRequest(http.MethodPost, "http://unix/ping", nil)
if err != nil {
return err
}
res, err := client.Do(req)
if err != nil {
return err
}
defer func() {
if err := res.Body.Close(); err != nil {
log.Printf("close: %v", err)
}
}()
data, err := io.ReadAll(res.Body)
if err != nil {
return err
}
log.Printf("received '%s'", string(data))
return nil
}
|
export PATH=${PATH}:$HOME/.lua:$HOME/.local/bin:${TRAVIS_BUILD_DIR}/install/luarocks/bin
|
#!/bin/sh
echo create EWA.js
sh_dir=`dirname $0`
src=${sh_dir}/../src/main/resources/EmpScriptV2/EWA_STYLE/js/source/src
target=${sh_dir}/../src/main/resources/EmpScriptV2/EWA_STYLE/js/source
echo DIR:${sh_dir}
echo SRC:${src}
echo TARGET:${target}
cat ${src}/core/*.js > ${target}/EWA.js
echo create EWA_UI.js
cat ${src}/ui/*.js > ${target}/EWA_UI.js
echo create EWA_FRAME.js
cat ${src}/frames/*.js > ${target}/EWA_FRAME.js
echo create EWA_MISC.js
cat ${src}/misc/*.js > ${target}/EWA_MISC.js
echo create EWA_ALL.js
cat ${target}/EWA.js ${target}/EWA_UI.js ${target}/EWA_FRAME.js ${target}/EWA_MISC.js > ${target}/EWA_ALL.js
echo start compress with google
#echo start compress EWA_ALL.js
java -jar ${sh_dir}/compiler.jar --js ${target}/EWA_ALL.js --js_output_file ${target}/EWA_ALL.min.js --create_source_map ${target}/EWA_ALL.min.map
echo "//# sourceMappingURL=ewa.min.map" >> ${target}/EWA_ALL.min.js
echo start combine EWA_ALL.min.2.0.js
mv ${target}/EWA_ALL.min.js ${target}/../ewa.min.js
mv ${target}/EWA_ALL.js ${target}/../ewa.js
mv ${target}/EWA_ALL.min.map ${target}/../ewa.min.map
|
#!/usr/bin/env bash
set -o xtrace
DISTRO_CFG=""
if [[ "$OSTYPE" == "linux-gnu" ]]; then
CPACK_TYPE="TBZ2"
distro=$(lsb_release -i -c -s|tr '\n' '_')
DISTRO_CFG="-DPAPER_DISTRO_NAME=${distro}"
elif [[ "$OSTYPE" == "darwin"* ]]; then
CPACK_TYPE="DragNDrop"
elif [[ "$OSTYPE" == "cygwin" ]]; then
CPACK_TYPE="TBZ2"
elif [[ "$OSTYPE" == "msys" ]]; then
CPACK_TYPE="NSIS" #?
elif [[ "$OSTYPE" == "win32" ]]; then
CPACK_TYPE="NSIS"
elif [[ "$OSTYPE" == "freebsd"* ]]; then
CPACK_TYPE="TBZ2"
DISTRO_CFG="-DPAPER_DISTRO_NAME='freebsd'"
else
CPACK_TYPE="TBZ2"
fi
if [[ ${SIMD} -eq 1 ]]; then
SIMD_CFG="-DPAPER_SIMD_OPTIMIZATIONS=ON"
CRYPTOPP_CFG=""
echo SIMD and other optimizations enabled
echo local CPU:
cat /proc/cpuinfo # TBD for macOS
else
SIMD_CFG=""
CRYPTOPP_CFG="-DCRYPTOPP_CUSTOM=ON"
fi
if [[ ${ASAN_INT} -eq 1 ]]; then
SANITIZERS="-DPAPER_ASAN_INT=ON"
elif [[ ${ASAN} -eq 1 ]]; then
SANITIZERS="-DPAPER_ASAN=ON"
elif [[ ${TSAN} -eq 1 ]]; then
SANITIZERS="-DPAPER_TSAN=ON"
else
SANITIZERS=""
fi
if [[ "${BOOST_ROOT}" -ne "" ]]; then
BOOST_CFG="-DBOOST_ROOT='${BOOST_ROOT}'"
else
BOOST_CFG=""
fi
BUSYBOX_BASH=${BUSYBOX_BASH-0}
if [[ ${FLAVOR-_} == "_" ]]; then
FLAVOR=""
fi
set -o nounset
run_build() {
build_dir=build_${FLAVOR}
mkdir ${build_dir}
cd ${build_dir}
cmake -GNinja \
-DPAPER_GUI=ON \
-DCMAKE_BUILD_TYPE=Release \
-DCMAKE_VERBOSE_MAKEFILE=ON \
-DCMAKE_INSTALL_PREFIX="../install" \
${CRYPTOPP_CFG} \
${DISTRO_CFG} \
${SIMD_CFG} \
-DBOOST_ROOT=/usr/local/boost \
${BOOST_CFG} \
${SANITIZERS} \
..
cmake --build ${PWD} -- -v
cmake --build ${PWD} -- install -v
cpack -G ${CPACK_TYPE} ${PWD}
sha1sum *.tar* > SHA1SUMS
}
run_build
|
package de.lmu.cis.ocrd.ml.features;
import de.lmu.cis.ocrd.ml.OCRToken;
public abstract class NamedDoubleFeature extends NamedFeature {
private static final long serialVersionUID = -2596016620596603718L;
protected NamedDoubleFeature(String name) {
super(name);
}
@Override
public final Object calculate(OCRToken token, int i, int n) {
final Double res = doCalculate(token, i, n);
if (res.isNaN() || res.isInfinite()) {
throw new RuntimeException("token: " + token.toString() + " (" + i + ") (" + n + ") isNAN() or isInfinite()");
}
return res;
}
@Override
public final String getClasses() {
return "REAL";
}
protected abstract double doCalculate(OCRToken token, int i, int n);
}
|
#!/bin/bash
# Iconizer shell script by Stefan Herold
# This is a simple tool to generate all necessary app icon sizes from one vector file.
# To use: specify the path to your vector graphic (PDF format)
# Example: sh iconizer.sh MyVectorGraphic.pdf
# Requires ImageMagick: http://www.imagemagick.org/
# Requires jq: https://stedolan.github.io/jq/
# set -e
# set -x
CONTENTS_FILE="Contents.json"
if [ $# -ne 2 ]
then
echo "\nUsage: sh iconizer.sh file.pdf FolderName"
elif [ ! -e "$1" ]
then
echo "Did not find file $1, expected path to a vector image file."
elif [ ${1: -4} != ".pdf" ]
then
echo "File $1 is not a vector image file! Expected PDF file."
elif [ ! -e "./$2/$CONTENTS_FILE" ]
then
echo "Did not find $2/$CONTENTS_FILE, expected folder which contains $CONTENTS_FILE"
else
echo "Creating icons from $1 and updating $2/$CONTENTS_FILE..."
i=0
while :
do
image=$(jq ".images[$i]" ./$2/$CONTENTS_FILE)
scale=$(echo $image | jq ".scale" | cut -d "\"" -f 2 | cut -d "x" -f 1 )
sizePT=$(echo $image | jq ".size" | cut -d "\"" -f 2 | cut -d "x" -f 1 )
sizePX=$(bc -l <<< "scale=1; $sizePT*$scale")
newFileName="appicon_${sizePX}.png"
if [[ "$image" == "null" ]]; then
break
fi
jq ".images[$i].filename = \"$newFileName\"" "./$2/$CONTENTS_FILE" > tmp.$$.json && mv tmp.$$.json "./$2/$CONTENTS_FILE"
if [ -e "$2/$newFileName" ]; then
echo "File $newFileName already created... Continue"
i=$(( $i + 1 ))
continue
fi
echo -n "Creating $newFileName of size and update $CONTENTS_FILE..."
convert -density 400 "$1" -scale "$sizePX" "$2/$newFileName"
echo " ✅"
i=$(( $i + 1 ))
done
fi |
#!/bin/bash
# After clean install first run: sudo apt-get update && sudo apt-get upgrade && sudo apt-get dist-upgrade
# Then run: sudo reboot
# MAKE SURE NO REALSENSE IS PLUGGED INTO YOUR COMPUTER
# Make sure acrv_apc_2017 is cloned into ~/ros_ws/src/
sudo apt-get install -y ntp
sudo apt-get install -y ntpdate
# ROS install section
sudo sh -c 'echo "deb http://packages.ros.org/ros/ubuntu $(lsb_release -sc) main" > /etc/apt/sources.list.d/ros-latest.list'
sudo apt-key adv --keyserver hkp://ha.pool.sks-keyservers.net:80 --recv-key 421C365BD9FF1F717815A3895523BAEEB01FA116
sudo apt-get update
sudo apt-get install -y ros-kinetic-desktop-full
if [ ! -e "/etc/ros/rosdep/sources.list.d/20-default.list" ]; then
sudo rosdep init
fi
rosdep update
sudo apt-get install -y libssl-dev
sudo apt-get install -y libeigen3-dev
sudo apt-get install -y libnlopt-dev
sudo apt-get install -y libxmlrpc-c++8-dev
sudo apt-get install -y libudev-dev
sudo apt-get install -y libusb-1.0-0-dev
sudo apt-get install -y pkg-config
sudo apt-get install -y libglfw3-dev
sudo apt-get install -y git-core
sudo apt-get install -y git
sudo apt-get install -y cmake
sudo apt-get install -y python-argparse
sudo apt-get install -y python-wstool
sudo apt-get install -y python-vcstools
sudo apt-get install -y python-rosdep
sudo apt-get install -y python-sklearn
sudo apt-get install -y python-termcolor
sudo apt-get install -y python-rosinstall
sudo apt-get install -y ros-kinetic-pcl-conversions
sudo apt-get install -y ros-kinetic-ar-track-alvar
sudo apt-get install -y ros-kinetic-image-transport
sudo apt-get install -y ros-kinetic-opencv3
sudo apt-get install -y ros-kinetic-position-controller
sudo apt-get install -y ros-kinetic-joint-trajectory-controller
sudo apt-get install -y ros-kinetic-joint-state-controller
sudo apt-get install -y ros-kinetic-rosserial-python
sudo apt-get install -y ros-kinetic-control-msgs
sudo apt-get install -y ros-kinetic-joystick-drivers
sudo apt-get install -y ros-kinetic-gazebo-ros-control
sudo apt-get install -y ros-kinetic-effort-controllers
sudo apt-get install -y ros-kinetic-moveit-*
sudo apt-get install -y ros-kinetic-tf2
sudo apt-get install -y ros-kinetic-ros-control
sudo apt-get install -y ros-kinetic-trac-ik
# sudo apt-get install -y ros-kinetic-librealsense
. "/opt/ros/kinetic/setup.bash"
## Not needed anymore due to custom version being used
# Librealsense stuff
# mkdir -p "${HOME}/co"
# cd "${HOME}/co"
# git clone https://github.com/IntelRealSense/librealsense.git
# cd "./librealsense"
# mkdir build
# cd "./build"
# cmake ../ -DBUILD_EXAMPLES=true
# make
# sudo make install
# cd ../
# sudo cp config/99-realsense-libusb.rules /etc/udev/rules.d/
# sudo udevadm control --reload-rules && udevadm trigger
# ./scripts/patch-realsense-ubuntu-xenial.sh
# Build cv_bridge from source
# Has to be done due to cv_bridge from ros-kinetic packages wanting OpenCV3-3.1.0
# But OpenCV from ros-kinetic packages is 3.2.0
mkdir -p "${HOME}/ros_ws/src/"
cd "${HOME}/ros_ws/src"
git clone https://github.com/ros-perception/vision_opencv.git
mkdir "./vision_opencv/cv_bridge/build"
cd "./vision_opencv/cv_bridge/build"
cmake ..
make
sudo make install
# PCL stuff
mkdir -p "${HOME}/co"
cd "${HOME}/co"
git clone https://github.com/PointCloudLibrary/pcl.git
cd "./pcl"
git checkout tags/pcl-1.8.0 -b local-1.8.0
mkdir -p "./build"
cd "./build"
cmake .. -DCMAKE_BUILD_TYPE=Release -DBUILD_GPU=ON -DBUILD_CUDA=ON
make -j3
sudo make install
# ACRV_APC stuff
# sudo apt-get install -y libeigen3-dev libnlopt-dev libxmlrpc-c++8-dev libudev-dev python-sklearn python-termcolor
cd "${HOME}/ros_ws/src"
git clone https://bitbucket.org/acrv/acrv_apc_2017.git
git clone https://github.com/code-iai/iai_robots.git
# Kinect will run off of Juxi's nuc. iai_kinect2 requires OpenCV2 which is not available for ros-kinetic
# Install the segmentatioin library.
cd "./acrv_apc_2017"
git checkout cartesian
cd "./segmentation"
mkdir build
cd build
cmake ..
make
sudo make install
# Convenience things
# Build and profile the ROS workspace
cd "${HOME}/ros_ws"
catkin_make -j3
rospack profile
# Scales
sudo apt-get install -y libhidapi-dev
# Dynamixels
sudo apt-get install -y ros-kinetic-dynamixel-*
|
#!/usr/bin/env bash
test_root=$(readlink -f $(dirname $0))
rootdir="$test_root/../.."
source "$rootdir/test/common/autotest_common.sh"
set -e
SPDK_DIR=$1
if [ -z "$EXTERNAL_MAKE_HUGEMEM" ]; then
EXTERNAL_MAKE_HUGEMEM=$HUGEMEM
fi
sudo HUGEMEM="$EXTERNAL_MAKE_HUGEMEM" $SPDK_DIR/scripts/setup.sh
if [ -n "$SPDK_RUN_EXTERNAL_DPDK" ]; then
WITH_DPDK="--with-dpdk=$SPDK_RUN_EXTERNAL_DPDK"
fi
make -C $SPDK_DIR clean
$SPDK_DIR/configure --with-shared --without-isal --without-ocf --disable-asan $WITH_DPDK
make -C $SPDK_DIR -j$(nproc)
export SPDK_HEADER_DIR="$SPDK_DIR/include"
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:"$test_root/passthru"
_sudo="sudo -E --preserve-env=PATH LD_LIBRARY_PATH=$LD_LIBRARY_PATH"
# The default target is to make both the app and bdev and link them against the combined SPDK shared library libspdk.so.
run_test "external_make_hello_bdev_shared_combo" make -C $test_root hello_world_bdev_shared_combo
run_test "external_run_hello_bdev_shared_combo" $_sudo $test_root/hello_world/hello_bdev \
--json $test_root/hello_world/bdev_external.json -b TestPT
make -C $test_root clean
# Make just the application linked against the combined SPDK shared library libspdk.so.
run_test "external_make_hello_no_bdev_shared_combo" make -C $test_root hello_world_no_bdev_shared_combo
run_test "external_run_hello_no_bdev_shared_combo" $_sudo $test_root/hello_world/hello_bdev \
--json $test_root/hello_world/bdev.json -b Malloc0
make -C $test_root clean
# Make both the application and bdev against individual SPDK shared libraries.
run_test "external_make_hello_bdev_shared_iso" make -C $test_root hello_world_bdev_shared_iso
run_test "external_run_hello_bdev_shared_iso" $_sudo $test_root/hello_world/hello_bdev \
--json $test_root/hello_world/bdev_external.json -b TestPT
make -C $test_root clean
# Make just the application linked against individual SPDK shared libraries.
run_test "external_make_hello_no_bdev_shared_iso" make -C $test_root hello_world_no_bdev_shared_iso
run_test "external_run_hello_no_bdev_shared_iso" $_sudo $test_root/hello_world/hello_bdev \
--json $test_root/hello_world/bdev.json -b Malloc0
# Make the basic NVMe driver linked against individual shared SPDK libraries.
run_test "external_make_nvme_shared" make -C $test_root nvme_shared
run_test "external_run_nvme_shared" $_sudo $test_root/nvme/identify.sh
make -C $test_root clean
make -C $SPDK_DIR clean
$SPDK_DIR/configure --without-shared --without-isal --without-ocf --disable-asan $WITH_DPDK
make -C $SPDK_DIR -j$(nproc)
# Make both the application and bdev against individual SPDK archives.
run_test "external_make_hello_bdev_static" make -C $test_root hello_world_bdev_static
run_test "external_run_hello_bdev_static" $_sudo $test_root/hello_world/hello_bdev \
--json $test_root/hello_world/bdev_external.json -b TestPT
make -C $test_root clean
# Make just the application linked against individual SPDK archives.
run_test "external_make_hello_no_bdev_static" make -C $test_root hello_world_no_bdev_static
run_test "external_run_hello_no_bdev_static" $_sudo $test_root/hello_world/hello_bdev \
--json $test_root/hello_world/bdev.json -b Malloc0
# Make the basic NVMe driver statically linked against individual SPDK archives.
run_test "external_make_nvme_static" make -C $test_root nvme_static
run_test "external_run_nvme_static" $_sudo $test_root/nvme/identify.sh
make -C $test_root clean
make -C $SPDK_DIR -j$(nproc) clean
sudo HUGEMEM="$HUGEMEM" $SPDK_DIR/scripts/setup.sh reset
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.