text
stringlengths 1
1.05M
|
|---|
package com.engg.digitalorg.managers;
import com.engg.digitalorg.model.entity.Url;
import com.engg.digitalorg.repository.UrlRepository;
import com.engg.digitalorg.util.BaseConversion;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import java.util.Calendar;
import java.util.GregorianCalendar;
import java.util.Optional;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import static org.mockito.MockitoAnnotations.initMocks;
import static org.testng.Assert.assertEquals;
/**
* The type Url manager test.
*/
public class UrlManagerTest {
@Mock
private UrlRepository mockUrlRepository;
@Mock
private BaseConversion mockBaseConversion;
@InjectMocks
private UrlManager urlManagerUnderTest;
/**
* Sets up.
*/
@BeforeMethod
public void setUp() {
initMocks(this);
}
/**
* Test get original url.
*/
@Test
public void testGetOriginalUrl() {
// Setup
when(mockBaseConversion.decode("input")).thenReturn(0);
// Configure UrlRepository.findById(...).
final Url url1 = new Url();
url1.setId(0);
url1.setLong_url("long_url");
url1.setCreated_date(new GregorianCalendar(2019, Calendar.JANUARY, 1).getTime());
url1.setCard_id(0);
url1.setShort_url("short_url");
url1.setExpires_date(new GregorianCalendar(2019, Calendar.JANUARY, 1).getTime());
final Optional<Url> url = Optional.of(url1);
when(mockUrlRepository.findById(0)).thenReturn(url);
// Run the test
final String result = urlManagerUnderTest.getOriginalUrl("shortUrl");
// Verify the results
assertEquals("result", result);
verify(mockUrlRepository).delete(any(Url.class));
}
}
|
import got from 'got';
import cbor from 'cbor';
import memoize from 'memoizee';
import { path } from 'ramda';
import { map, fromPairs } from 'lodash';
import { inflate } from 'pako';
import { Schema, Validator, ValidatorResult, RewriteFunction } from 'jsonschema';
import { Logger, BadRequestError, invariant, threadP } from '@navch/common';
import { Decoder, DecodeResult, PayloadRecord } from '../types';
/**
* According to the HCERT spec, the QR code content SHALL be prefixed by the Context
* Identifier string "HC1:".
*
* https://github.com/ehn-dcc-development/hcert-spec/blob/main/hcert_spec.md
*/
const HCERT_PATTERN = '^HC1:?(.+)$';
const DCC_JSON_SCHEMA_REPO = 'https://raw.githubusercontent.com/ehn-dcc-development/ehn-dcc-schema';
const DCC_JSON_SCHEMA_FILE = 'DCC.combined-schema.json';
const DCC_VALUESETS_REPO = 'https://raw.githubusercontent.com/ehn-dcc-development/ehn-dcc-valuesets';
const DCC_VALUESETS = {
'valuesets/country-2-codes.json': 'country-2-codes.json',
'valuesets/disease-agent-targeted.json': 'disease-agent-targeted.json',
'valuesets/test-manf.json': 'test-manf.json',
'valuesets/test-result.json': 'test-result.json',
'valuesets/test-type.json': 'test-type.json',
'valuesets/vaccine-mah-manf.json': 'vaccine-mah-manf.json',
'valuesets/vaccine-medicinal-product.json': 'vaccine-medicinal-product.json',
'valuesets/vaccine-prophylaxis.json': 'vaccine-prophylaxis.json',
};
const schemaCacheOptions = {
max: 10,
maxAge: 3600000, // 1 hour
promise: true, // remove rejected result
normalizer: ([_, version]: [Logger, string | undefined]) => version ?? 'default',
};
async function fetchJsonSchema(logger: Logger, version?: string) {
const branch = version ? `release/${version}` : 'main';
const uri = `${DCC_JSON_SCHEMA_REPO}/${branch}/${DCC_JSON_SCHEMA_FILE}`;
const request = got(uri).json<Schema>();
await request.catch(err => {
logger.error('Failed to download DCC JSON schema', err);
});
return await request;
}
async function fetchValuesets(logger: Logger, version?: string) {
const branch = version ? `release/${version}` : 'main';
const pairs = map(DCC_VALUESETS, async (filePath, key) => {
const uri = `${DCC_VALUESETS_REPO}/${branch}/${filePath}`;
const request = got(uri).json<Schema>();
await request.catch(err => {
logger.error('Failed to download DCC JSON Schema Valueset', err);
});
return [key, await request] as [string, string];
});
return fromPairs(await Promise.all(pairs));
}
export class HCERTDecoder implements Decoder {
constructor(readonly logger: Logger) {
this.decode = this.decode.bind(this);
this.validate = this.validate.bind(this);
this.validateOrThrow = this.validateOrThrow.bind(this);
}
readonly validator = new Validator();
readonly fetchJsonSchemaCached = memoize(fetchJsonSchema, schemaCacheOptions);
readonly fetchValuesetsCached = memoize(fetchValuesets, schemaCacheOptions);
public isMatch(input: string): boolean {
return Boolean(input.match(HCERT_PATTERN));
}
async validate(record: PayloadRecord): Promise<ValidatorResult> {
const version = record?.ver as string | undefined;
const schema = await this.fetchJsonSchemaCached(this.logger, version);
const valuesets = await this.fetchValuesetsCached(this.logger);
const rewrite: RewriteFunction = (field, fieldSchema) => {
const valuesetKey = fieldSchema['valueset-uri'];
if (!valuesetKey) {
return field;
}
const valueset = valuesets[valuesetKey];
invariant(valueset, `No valueset found with [${valuesetKey}]`);
const value = path(['valueSetValues', field], valueset);
invariant(valueset, `No value found from [${valuesetKey}] with key [${field}]`);
return value;
};
return this.validator.validate(record, schema, { rewrite });
}
async validateOrThrow<T extends PayloadRecord>(record: PayloadRecord): Promise<T> {
const result = await this.validate(record);
if (!result.valid) {
const errors = result.errors.map(err => err.toString());
throw new BadRequestError(`Invalid DCC payload: ${JSON.stringify(errors)}`);
}
return result.instance as T;
}
// Base45 > Zlib > COSE > CBOR > JSON
//
// https://github.com/mozq/dencode-web
// https://github.com/ehn-dcc-development/hcert-spec
// https://github.com/ehn-dcc-development/ehn-sign-verify-javascript-trivial
async decode(input: string): Promise<DecodeResult> {
this.logger.debug('Decoding HCERT payload', { input });
const base45 = require('base45-js'); // missing type definitions
const payload = input.match(HCERT_PATTERN)?.[1];
if (!payload) {
throw new Error(`Payload does not confirm to HCERT format`);
}
return await threadP(
payload,
// Base45 to COSE
async data => {
return base45.decode(data);
},
// Decompress COSE
async (buffer: Buffer) => {
// Zlib magic headers:
//
// 78 01 - No Compression/low
// 78 9C - Default Compression
// 78 DA - Best Compression
//
if (buffer[0] == 0x78) {
return inflate(buffer);
}
return Uint8Array.from(buffer);
},
// COSE to CBOR to JSON
//
// https://github.com/ehn-dcc-development/hcert-spec/blob/main/hcert_spec.md#331-cwt-structure-overview
async buffer => {
const coseData = cbor.decode(buffer);
const cborData = cbor.decode(coseData.value[2]);
const hcert = Object.fromEntries(cborData.get(-260))[1];
await this.validateOrThrow(hcert);
const meta = {
iss: cborData.get(1), // Issuer, ISO 3166-1 alpha-2
iat: cborData.get(6) * 1000, // Issued At
exp: cborData.get(4) * 1000, // Expiration Time
kind: hcert.v ? 'Vaccination' : hcert.t ? 'Test' : 'Recovery',
};
return { raw: Object.fromEntries(cborData), data: hcert, meta };
}
).catch(err => {
this.logger.error('Failed to decode HCERT input', { err });
throw new Error(`Invalid HCERT payload: ${err}`);
});
}
}
|
<filename>my-first-miniprogram/miniprogram/pages/myCart/myCart.js
// pages/myCart/myCart.js
Page({
/**
* 页面的初始数据
*/
data: {
cart: [],
totalPrice: 0,
},
/**
* 生命周期函数--监听页面加载
*/
onLoad: function (options) {
this.getCartData();
},
/**
* 生命周期函数--监听页面初次渲染完成
*/
onReady: function () {
},
/**
* 生命周期函数--监听页面显示
*/
onShow: function () {
this.getCartData();
},
/**
* 生命周期函数--监听页面隐藏
*/
onHide: function () {
},
/**
* 生命周期函数--监听页面卸载
*/
onUnload: function () {
},
/**
* 页面相关事件处理函数--监听用户下拉动作
*/
onPullDownRefresh: function () {
},
/**
* 页面上拉触底事件的处理函数
*/
onReachBottom: function () {
},
/**
* 用户点击右上角分享
*/
onShareAppMessage: function () {
},
getCartData: function () {
this.setData({
cart: getApp().globalData.cart
})
var total = 0;
var cart = this.data.cart;
cart.forEach(item => {
var price = item.item.price;
var count = item.count;
total = total + price * count;
});
this.setData({
totalPrice: total
})
},
removeItem: function (e) {
this.caculateCount(e, false);
},
addItem: function (e) {
this.caculateCount(e, true);
},
caculateCount: function (e, isAdd) {
var selectedItem = e.currentTarget.dataset.item;
var cart = getApp().globalData.cart;
cart.forEach(item => {
if (item.item.id == selectedItem.item.id) {
if (isAdd) {
item.count++;
} else {
item.count--;
if (item.count == 0) {
var index = cart.indexOf(item);
cart.splice(index);
}
}
}
})
this.getCartData();
},
navigateToDetails: function () {
wx.navigateTo({
url: '../goodsDetail/goodsDetail',
success: function (res) {
},
fail: function () {
},
complete: function () {
}
})
},
pay: function () {
wx.requestPayment({})
}
})
|
import pandas as pd
import numpy as np
# Read data from csv
data = pd.read_csv('dataset.csv')
# Separate features and target
X = data.iloc[:, 0:-1].values
Y = data.iloc[:, -1].values
# Train-test split
from sklearn.model_selection import train_test_split
X_train, X_test, Y_train, Y_test = train_test_split(X, Y, test_size=0.20, random_state=0)
# Feature Scaling
from sklearn.preprocessing import StandardScaler
sc_X = StandardScaler()
X_train = sc_X.fit_transform(X_train)
X_test = sc_X.transform(X_test)
# Train a model
from sklearn.linear_model import LinearRegression
regressor = LinearRegression()
regressor.fit(X_train, Y_train)
# predict
Y_pred = regressor.predict(X_test)
|
import React from 'react'
import styled from 'styled-components'
import Img from 'gatsby-image'
// import { Img } from '../../utils/styles'
import { useStaticQuery, graphql, Link } from 'gatsby'
const Wrapper = styled.div`
width: 100%;
position: fixed;
height: 100vh;
z-index: -10;
left: 0;
margin: 0;
top: 7vh;
`
const SplashImage = styled(Img)`
position: relative;
height: 100%;
width: 100%;
`
const OverText = styled.h2`
text-align: left;
font-size: 5rem;
width: 100vw;
color: white;
position: absolute;
top: 40vh;
left: 15vw;
@media (max-width: 768px) {
left: 0;
text-align: center;
}
@media (max-width: 576px) {
font-size: 2.5rem;
}
`
const Links = styled(Link)`
text-decoration: underline;
color: white;
font-size: 3rem;
@media (max-width: 768px) {
text-align: center;
}
`
export const Splash = () => {
const images = useStaticQuery(
graphql`
{
splash: file(relativePath: { eq: "unsplash.jpg" }) {
childImageSharp {
fluid(maxWidth: 1066) {
...GatsbyImageSharpFluid_noBase64
}
}
}
}
`
)
return (
<Wrapper>
<SplashImage fluid={images.splash.childImageSharp.fluid} />
<OverText>
New Kool Sunglasses: <br /> <Links> Shop Now</Links>{' '}
</OverText>
</Wrapper>
)
}
|
def evaluate(expression):
# Converting expression to postfix
tokens = infix_to_postfix(expression)
# Initializing stack
stack = Stack()
# Iterating over the token list
for token in tokens:
# If token is an operand, push it to stack
if is_operand(token):
stack.push(float(token))
# If token is an operator, pop two elements from stack and evaluate them using token
else:
val2 = stack.pop()
val1 = stack.pop()
stack.push(operate(token, val1, val2))
# Stack contains just the final result which is the required output
return stack.pop()
# Call evaluate
result = evaluate("5 * 6 + 4 / 2 - 15")
# Output: -2.0
|
#!/bin/bash
set -e
IAM=$USER
#echo $IAM
source exportVariables.sh
./exportVariables.sh
env | grep FABRIC
env | grep PLATFORM
node server.js
|
<filename>util/util.go
package util
import (
"bufio"
"errors"
"fmt"
"os"
"reflect"
"strconv"
"strings"
"github.com/rivo/uniseg"
)
/*
* // Makerange creates a sequence of number (range)
* // Ref. https://stackoverflow.com/questions/39868029
* func MakeRange(min, max int) []int {
* if min == max {
* return []int{}
* }
* a := make([]int, max-min+1)
* for i := range a {
* a[i] = min + i
* }
* return a
* }
* */
func MakeRange(start, end int) []int {
var r []int
if end == start {
return []int{}
} else {
for i := start; i < end; i++ {
r = append(r, start)
}
}
return r
}
// StringIndex returns index (start) for substring on a given string
// It returns -1 and error if not matching
func StringIndex(s, sub string) (index int, err error) {
i := strings.Index(s, sub)
if i <= -1 {
err := errors.New("Index not found")
return -1, err
}
return i, nil
}
// ToASCII converts string to ASCII form
// Ref. https://stackoverflow.com/questions/12668681
func ToASCII(s string) string {
var as []string
for _, r := range []rune(s) {
quoted := strconv.QuoteRuneToASCII(r)
as = append(as, quoted[1:len(quoted)-1])
}
return strings.Join(as, "")
}
// ToGrapheme converts string to grapheme
// TODO: should we include this func?
// Ref: https://github.com/golang/go/issues/14820
func ToGrapheme(s string) string {
gr := uniseg.NewGraphemes(s)
var str []string
for gr.Next() {
s := fmt.Sprintf("%x", gr.Runes())
str = append(str, s)
}
return strings.Join(str, "")
}
// Ref. https://stackoverflow.com/questions/14000534
type RuneGen func() rune
// MapRune maps ...
func MapRune(g RuneGen, f func(rune) rune) RuneGen {
return func() rune {
return f(g())
}
}
// MinMax returns min and max from input int array
func MinMax(array []int) (int, int) {
var max int = array[0]
var min int = array[0]
for _, value := range array {
if max < value {
max = value
}
if min > value {
min = value
}
}
return min, max
}
// StringInSlice check whether given string is in a slice
func StringInSlice(a string, list []string) bool {
for _, b := range list {
if b == a {
return true
}
}
return false
}
// FileSize returns length of given file using `os.Stat()`
// Ref. https://stackoverflow.com/questions/17133590
func FileSize(filepath string) (int64, error) {
fi, err := os.Stat(filepath)
if err != nil {
return 0, err
}
// get the size
return fi.Size(), nil
}
// ReadAllLn reads all line by line from a file using bufio.scanner
func ReadAllLn(filepath string, keepBreakLine bool) ([]string, error) {
var lines []string
file, err := os.Open(filepath)
if err != nil {
return nil, err
}
defer file.Close()
scanner := bufio.NewScanner(file)
for scanner.Scan() {
l := scanner.Text()
if keepBreakLine {
lines = append(lines, fmt.Sprintf("%v\n", l))
}
lines = append(lines, l)
}
if err := scanner.Err(); err != nil {
return nil, err
}
return lines, nil
}
// Zip zips 2 slices in first and second arguments to third argument
// Ref: https://stackoverflow.com/questions/26957040
//
// Usage Example
// a := []int{1, 2, 3, 4, 5, 6, 7, 8, 9, 0}
// b := []int{0, 9, 8, 7, 6, 5, 4, 3, 2, 1}
// c := [][2]int{}
//
// e := zip(a, b, &c)
//
// if e != nil {
// fmt.Println(e)
// return
// }
//
// fmt.Println(c)
func Zip(a, b, c interface{}) error {
ta, tb, tc := reflect.TypeOf(a), reflect.TypeOf(b), reflect.TypeOf(c)
if ta.Kind() != reflect.Slice || tb.Kind() != reflect.Slice || ta != tb {
return fmt.Errorf("zip: first two arguments must be slices of the same type")
}
if tc.Kind() != reflect.Ptr {
return fmt.Errorf("zip: third argument must be pointer to slice")
}
for tc.Kind() == reflect.Ptr {
tc = tc.Elem()
}
if tc.Kind() != reflect.Slice {
return fmt.Errorf("zip: third argument must be pointer to slice")
}
eta, _, etc := ta.Elem(), tb.Elem(), tc.Elem()
if etc.Kind() != reflect.Array || etc.Len() != 2 {
return fmt.Errorf("zip: third argument's elements must be an array of length 2")
}
if etc.Elem() != eta {
return fmt.Errorf("zip: third argument's elements must be an array of elements of the same type that the first two arguments are slices of")
}
va, vb, vc := reflect.ValueOf(a), reflect.ValueOf(b), reflect.ValueOf(c)
for vc.Kind() == reflect.Ptr {
vc = vc.Elem()
}
if va.Len() != vb.Len() {
return fmt.Errorf("zip: first two arguments must have same length")
}
for i := 0; i < va.Len(); i++ {
ea, eb := va.Index(i), vb.Index(i)
tt := reflect.New(etc).Elem()
tt.Index(0).Set(ea)
tt.Index(1).Set(eb)
vc.Set(reflect.Append(vc, tt))
}
return nil
}
// MinMax returns min and max from input int array
func MinMaxFloat64(array []float64) (float64, float64) {
var max float64 = array[0]
var min float64 = array[0]
for _, value := range array {
if max < value {
max = value
}
if min > value {
min = value
}
}
return min, max
}
|
#ifndef CONNECTION_H_
#define CONNECTION_H_
#include "typedef.h"
class connection {
public:
unsigned long bandwidth = 0u;
virtual ~connection();
virtual void init_server(int port) = 0;
virtual void init_client(const char *ip, int port) = 0;
virtual void set_no_delay() = 0;
virtual void write(const uchar *data, unsigned long bytes,
bool count_band = true) = 0;
virtual void read(uchar *data, unsigned long bytes) = 0;
// virtual void fwrite(const uchar *data, unsigned long bytes,
// bool count_band = true) = 0;
// virtual void fread(uchar *data, unsigned long bytes) = 0;
virtual void flush() = 0;
virtual void close() = 0;
void write_int(int n, bool count_band = true);
int read_int();
void write_long(long n, bool count_band = true);
long read_long();
};
#endif /* CONNECTION_H_ */
|
<reponame>dylandoamaral/qush
import fs from "fs";
import { isRight, isLeft } from "fp-ts/lib/Either";
import { validateSource, validateArgumentsCoherence, validateSources, validateArgumentExistence, validateArgumentsExistence } from "./validator";
import minimist from "minimist";
import config from "../../asset/default.config.json";
test("the validation of the sources", () => {
const file = "./testfile.txt";
fs.writeFileSync(file, "");
expect(isRight(validateSource(file))).toEqual(true);
expect(isLeft(validateSource("./testnofile.txt"))).toEqual(true);
expect(isRight(validateSources([file]))).toEqual(true);
expect(isLeft(validateSources([file, "./testnofile.txt"]))).toEqual(true);
fs.unlinkSync(file);
});
test("the validation of the arguments coherence", () => {
const args = minimist(["a", "p", "my commit"]);
expect(isRight(validateArgumentsCoherence(args)(config))).toEqual(true);
});
test("the validation of the arguments existence", () => {
const args = minimist(["a", "p", "my commit"]);
expect(isRight(validateArgumentExistence("a")(config.instructions[0]))).toEqual(true);
expect(isLeft(validateArgumentExistence("no")(config.instructions[0]))).toEqual(true);
expect(isRight(validateArgumentsExistence(args)(config))).toEqual(true);
});
|
var topbar = function () {
var header = $('.Header'),
previousScroll = 0,
originalTop = header.offset().top;
console.log(previousScroll, originalTop);
$(window).scroll(
function(e){
var currentScroll = $(this).scrollTop();
if (currentScroll >= originalTop+800) {
header.addClass('is-scrolled');
console.log('hola amigo');
}
else {
header.removeClass('is-scrolled');
console.log('adios amigo');
}
});
}
module.exports = topbar;
|
<filename>src/components/ShareIcon/ShareIcon.tsx
import React, { useCallback } from 'react';
import { GiShare } from "react-icons/gi";
import { colors } from '../../constants/colors';
import { useGoogleAnalytics } from '../../hooks/useGoogleAnalytics';
import "./ShareIcon.css";
interface ShareIconProps {
title: string;
color?: string;
size?: number;
backgroundColor?: string;
}
const ShareIcon: React.FC<ShareIconProps> = ({ title, color, size, backgroundColor }) => {
const { fireEvent } = useGoogleAnalytics();
const shareHandler = useCallback(async () => {
if (typeof navigator !== "undefined" && navigator.share) {
try {
await navigator.share({
title: title,
url: location.href,
});
fireEvent("Share", {
event_category: "Sharing",
event_label: "Share site URL",
siteURL: location.href,
});
} catch (e) {
console.log(e);
}
}
}, []);
return (
<GiShare
className="shareIcon"
color={color || colors.primary}
size={size || 32}
onClick={shareHandler}
/>
);
};
export default ShareIcon;
|
<gh_stars>10-100
#include "rubynized_rapidjson.hpp"
#include <ruby.h>
void *RubyCrtAllocator::Malloc(size_t size) {
if (size)
return ruby_xmalloc(size);
else
return nullptr;
}
void *RubyCrtAllocator::Realloc(void *originalPtr, size_t, size_t newSize) {
if (newSize == 0) {
ruby_xfree(originalPtr);
return nullptr;
}
return ruby_xrealloc(originalPtr, newSize);
}
void RubyCrtAllocator::Free(void *ptr) {
ruby_xfree(ptr);
}
|
package top.mowang.cloud;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.cloud.netflix.hystrix.dashboard.EnableHystrixDashboard;
/**
* SpringCloud-Demo
*
* @author : <NAME>
* @website : https://mowangblog.top
* @date : 2021/11/01 12:33
**/
@SpringBootApplication
@EnableHystrixDashboard
public class HystrixDashboardMainApplication {
public static void main(String[] args) {
SpringApplication.run(HystrixDashboardMainApplication.class,args);
}
}
|
def armstrong_numbers(lst):
armstrong_nums = []
for num in lst:
sum_of_digits = 0
temp_num = num
while(temp_num > 0):
digit = temp_num % 10
sum_of_digits += digit ** 3
temp_num //= 10
if sum_of_digits == num:
armstrong_nums.append(num)
return armstrong_nums
if __name__ == '__main__':
lst = [153, 371, 974]
print("Armstrong numbers:", armstrong_numbers(lst))
|
#!/bin/sh
echo "**Install jq**"
sudo yum install -y jq
echo "**Installing git, compiler, depends...**"
sudo yum install -y git
sudo yum -y install git gcc make automake libtool openssl-devel ncurses-compat-libs
echo "Installing MySQL Client"
sudo yum install -y https://dev.mysql.com/get/mysql57-community-release-el7-11.noarch.rpm
sudo yum install -y mysql-community-client
sudo yum -y install mysql-community-devel mysql-community-client mysql-community-common
echo "compile sysbench"
git clone https://github.com/akopytov/sysbench
cd sysbench
./autogen.sh
./configure
make
sudo make install
echo "Install python"
sudo yum install -y python3-pip
echo "Install Apache and PHP"
sudo usermod -a -G apache ec2-user
sudo chown -R ec2-user:apache /var/www
sudo chmod 2775 /var/www && find /var/www -type d -exec sudo chmod 2775 {} \;
find /var/www -type f -exec sudo chmod 0664 {} \;
sudo yum install -y httpd httpd-tools mod_ssl
sudo yum install amazon-linux-extras -y
sudo amazon-linux-extras enable php7.4
sudo yum clean metadata
sudo yum install -y php php-common php-pear
sudo yum install -y php-{cgi,curl,mbstring,gd,mysqlnd,gettext,json,xml,fpm,intl,zip}
echo "<?php phpinfo(); ?>" > info.php
sudo mv info.php /var/www/html/
echo "Install MyPHPAdmin"
cd /var/www/html
wget https://www.phpmyadmin.net/downloads/phpMyAdmin-latest-all-languages.tar.gz
mkdir phpMyAdmin && tar -xvzf phpMyAdmin-latest-all-languages.tar.gz -C phpMyAdmin --strip-components 1
rm phpMyAdmin-latest-all-languages.tar.gz
echo "**Install PHP Benchmark**"
cd
git clone https://github.com/mysqlonarm/benchmark-suites
git clone https://github.com/vanilla-php/benchmark-php.git
cd benchmark-php
sudo mv benchmark.php /var/www/html/
echo "**Start Web Server**"
sudo systemctl start httpd
|
export const REPO_NAME_TAKEN_ERROR_MESSAGE = 'Repository is already exist';
export const INVALID_SOURCE_CONTROL_ERROR_MESSAGE =
'Invalid source control service';
export const MISSING_TOKEN_ERROR = `App Missing a Github token. You should first complete the authorization process`;
export const GIT_REPOSITORY_EXIST =
'Git Repository already connected to an other App';
export const INVALID_GIT_REPOSITORY_ID = 'Git Repository does not exist';
export const UNSUPPORTED_GIT_ORGANIZATION_TYPE =
'Creation of repositories in a personal account is not supported';
|
from flask import Flask, render_template
from flask_wtf import FlaskForm
from wtforms import StringField
from wtforms.validators import DataRequired
app = Flask(__name__)
app.config['SECRET_KEY'] = 'secret_key'
# Complete the form class
class MyForm(FlaskForm):
# Create a StringField called 'input_string' that requires data to be filled out
input_string = StringField('input_string', validators=[DataRequired()])
@app.route('/', methods=['GET', 'POST'])
def index():
form = MyForm()
if form.validate_on_submit():
# If the form is submitted and valid, display the entered string
return f'The entered string is: {form.input_string.data}'
return render_template('index.html', form=form)
if __name__ == '__main__':
app.run(debug=True)
|
# Since these tests test a multitude of microservices, offer
# the ability to choose which app to test from the feature.
# (possibly do same as https://github.com/alphagov/smokey/blob/master/features/support/base_urls.rb)
Given(/^app (.*?)$/) do |app|
case app
when /^[Hh]ome.*/ then @app = ENV['URL_HOME']
when /^[Ll]ocations.*/ then @app = ENV['URL_LOCATIONS']
when /^[Pp]eople.*/ then @app = ENV['URL_PEOPLE']
when /^[Pp]rototypes.*/ then @app = ENV['URL_PROTOTYPES']
# when /^[Xx]xx.*/ then @app = ENV['URL_XXX']
else
raise 'Invalid app ' + app
end
end
|
<filename>cupcakes/db/migrate/20191204155430_create_users.rb
class CreateUsers < ActiveRecord::Migration
def change
create_table :cupcakes do |t|
t.string :name
t.string :url
end
end
|
#!/bin/bash
set -euo pipefail
ready_file="${1:-}"
shift
proxy_type="${1:-}"
shift
echo "launching a '${proxy_type}' sidecar proxy"
mode="${1:-}"
shift
# wait until ready
while : ; do
if [[ -f "${ready_file}" ]]; then
break
fi
echo "waiting for system to be ready at ${ready_file}..."
sleep 0.1
done
api_args=()
agent_tls=""
service_register_file=""
case "${mode}" in
direct)
token_file=""
while getopts ":t:r:e" opt; do
case "${opt}" in
e)
agent_tls=1
;;
t)
token_file="$OPTARG"
;;
r)
service_register_file="$OPTARG"
;;
\?)
echo "invalid option: -$OPTARG" >&2
exit 1
;;
:)
echo "invalid option: -$OPTARG requires an argument" >&2
exit 1
;;
esac
done
shift $((OPTIND - 1))
if [[ -z "${token_file}" ]]; then
echo "missing required argument -t <BOOT_TOKEN_FILE>" >&2
exit 1
fi
if [[ -z "${service_register_file}" ]]; then
echo "missing required argument -r <SERVICE_REGISTER_FILE>" >&2
exit 1
fi
api_args+=( -token-file "${token_file}" )
;;
login)
bearer_token_file=""
token_sink_file=""
while getopts ":t:s:r:e" opt; do
case "${opt}" in
e)
agent_tls=1
;;
t)
bearer_token_file="$OPTARG"
;;
s)
token_sink_file="$OPTARG"
;;
r)
service_register_file="$OPTARG"
;;
\?)
echo "invalid option: -$OPTARG" >&2
exit 1
;;
:)
echo "invalid option: -$OPTARG requires an argument" >&2
exit 1
;;
esac
done
shift $((OPTIND - 1))
if [[ -z "${bearer_token_file}" ]]; then
echo "missing required argument -t <BEARER_TOKEN_FILE>" >&2
exit 1
fi
if [[ -z "${token_sink_file}" ]]; then
echo "missing required argument -s <TOKEN_SINK_FILE>" >&2
exit 1
fi
if [[ -z "${service_register_file}" ]]; then
echo "missing required argument -r <SERVICE_REGISTER_FILE>" >&2
exit 1
fi
#TODO: handle api_args[@] here somehow
consul login \
-method=minikube \
-bearer-token-file="${bearer_token_file}" \
-token-sink-file="${token_sink_file}" \
-meta "host=$(hostname)"
echo "Wrote new token to ${token_sink_file}"
api_args+=( -token-file "${token_sink_file}" )
;;
*)
echo "unknown mode: $mode" >&2
exit 1
;;
esac
grpc_args=()
if [[ -n "$agent_tls" ]]; then
api_args+=(
-ca-file /tls/consul-agent-ca.pem
-http-addr https://127.0.0.1:8501
)
grpc_args+=( -grpc-addr https://127.0.0.1:8502 )
else
api_args+=( -http-addr http://127.0.0.1:8500 )
grpc_args+=( -grpc-addr http://127.0.0.1:8502 )
fi
while : ; do
if consul acl token read "${api_args[@]}" -self &> /dev/null ; then
break
fi
echo "waiting for ACLs to work..."
sleep 0.1
done
echo "Registering service..."
consul services register "${api_args[@]}" "${service_register_file}"
echo "Launching proxy..."
case "${proxy_type}" in
envoy)
consul connect envoy -bootstrap "${grpc_args[@]}" "${api_args[@]}" "$@" > /tmp/envoy.config
exec consul connect envoy "${grpc_args[@]}" "${api_args[@]}" "$@"
;;
builtin)
# TODO: handle agent tls?
exec consul connect proxy "${api_args[@]}" "$@"
;;
*)
echo "unknown proxy type: ${proxy_type}" >&2
exit 1
esac
|
<gh_stars>0
import React, { Fragment } from "react";
import { Route, Switch } from "react-router-dom";
import Home from "../views/home/App";
const HomeRouter = () => (
<Fragment>
<Switch>
<Route exact path="/" component={Home} />
</Switch>
</Fragment>
);
export default HomeRouter;
|
This code has a time complexity of O(N^2), as the inner loop runs N times when the outer loop runs once. This means that the complexity increases with the square of the size of the input which makes it an inefficient solution.
|
<filename>packages/preact/lib/index.js<gh_stars>10-100
/**
* @typedef {import('preact').ComponentChildren} ComponentChildren
* @typedef {import('mdx/types').MDXComponents} Components
*
* @typedef Props
* Configuration.
* @property {Components} [components]
* Mapping of names for JSX components to Preact components.
* @property {boolean} [disableParentContext=false]
* Turn off outer component context.
* @property {ComponentChildren} [children]
* Children.
*
* @callback MergeComponents
* @param {Components} currentComponents
* Current components from the context.
* @returns {Components}
* Merged components.
*/
import {createContext, h} from 'preact'
import {useContext} from 'preact/hooks'
/**
* @type {import('preact').Context<Components>}
* @deprecated
* This export is marked as a legacy feature.
* That means it’s no longer recommended for use as it might be removed
* in a future major release.
*
* Please use `useMDXComponents` to get context based components and
* `MDXProvider` to set context based components instead.
*/
export const MDXContext = createContext({})
/**
* @param {import('react').ComponentType<any>} Component
* @deprecated
* This export is marked as a legacy feature.
* That means it’s no longer recommended for use as it might be removed
* in a future major release.
*
* Please use `useMDXComponents` to get context based components instead.
*/
export function withMDXComponents(Component) {
return boundMDXComponent
/**
* @param {Record<string, unknown> & {components?: Components}} props
* @returns {JSX.Element}
*/
function boundMDXComponent(props) {
const allComponents = useMDXComponents(props.components)
// @ts-expect-error: React + Preact in this repo mess with TS.
return h(Component, {...props, allComponents})
}
}
/**
* Get current components from the MDX Context.
*
* @param {Components|MergeComponents} [components]
* Additional components to use or a function that takes the current
* components and filters/merges/changes them.
* @returns {Components}
* Current components.
*/
export function useMDXComponents(components) {
const contextComponents = useContext(MDXContext)
// Custom merge via a function prop
if (typeof components === 'function') {
return components(contextComponents)
}
return {...contextComponents, ...components}
}
/**
* Provider for MDX context
*
* @param {Props} props
* @returns {JSX.Element}
*/
export function MDXProvider({components, children, disableParentContext}) {
let allComponents = useMDXComponents(components)
if (disableParentContext) {
allComponents = components || {}
}
// @ts-expect-error: preact types are wrong.
return h(MDXContext.Provider, {value: allComponents}, children)
}
|
<filename>setup.py
import json
from setuptools import setup, find_packages
with open('devilry/version.json') as versionfile:
version = json.load(versionfile)
setup(
name="devilry",
version=version,
url='http://devilry.org',
license='BSD',
zip_safe=False,
author=('<NAME>, <NAME>, <NAME>, <NAME>, '
'<NAME>, <NAME>, <NAME>, <NAME>\u00F8rken, <NAME>'),
author_email='<EMAIL>',
include_package_data=True,
description="A system for handling electronic deliveries. See https://github.com/devilry/devilry-django.",
packages=find_packages(exclude=["devilry_rest"]),
install_requires=[
'setuptools',
'pyyaml==5.4.*',
'Markdown==2.6.*',
'Pygments==2.7.*',
'flup==1.0.*',
'gunicorn==19.9.*',
'django-crispy-forms==1.10.*',
'openpyxl==1.6.*',
'django==3.2.*',
'URLObject==2.4.*',
'mimeparse==0.1.*',
'numpy==1.19.*',
'anyjson==0.3.*',
'rq==1.8.*',
'django-rq==2.4.*',
'redis==3.5.*',
'python-dateutil==2.8.*',
'pytz==2018.9.*',
'httplib2>=0.19.0<1.0.0',
'dj-static==0.0.*',
'dj-database-url==0.3.*',
'html5lib==0.9999999',
'psycopg2==2.8.*',
'django_cradmin>=9.0.0,<10.0.0',
'cradmin_legacy>=4.0.1,<5.0.0',
'ievv_opensource>=8.0.0,<9.0.0',
'xlsxwriter==1.1.*',
'arrow==0.12.*',
'detektor==1.1.0-beta.012',
'html2text==2018.1.*',
'djangorestframework==3.12.*',
# For django-allauth
'django-allauth>=0.44.*',
'certifi==2017.11.*',
'chardet==3.0.*',
'idna==2.6.*',
'oauthlib==2.0.*',
'python-openid==2.2.*',
'requests==2.19.*',
'requests-oauthlib==0.8.*',
'urllib3==1.22.*',
'pycountry==17.9.*',
'six==1.15.*'
]
)
|
<filename>filters/coalesce_test.go<gh_stars>0
package filters
import (
"testing"
"github.com/abesto/easyssh/target"
"github.com/abesto/easyssh/util"
"github.com/stretchr/testify/assert"
)
func TestCoalesceStringViaMake(t *testing.T) {
util.WithLogAssertions(t, func(l *util.MockLogger) {
input := "(coalesce ip host hostname)"
structs := "[coalesce ip host hostname]"
final := "<coalesce [ip host hostname]>"
l.ExpectDebugf("MakeFromString %s -> %s", input, structs)
l.ExpectDebugf("Make %s -> %s", structs, final)
Make(input)
})
}
func TestCoalesceMakeWithoutArgument(t *testing.T) {
util.WithLogAssertions(t, func(l *util.MockLogger) {
l.ExpectDebugf("MakeFromString %s -> %s", "(coalesce)", "[coalesce]")
util.ExpectPanic(t, "<coalesce []> requires at least 1 argument(s), got 0: []",
func() { Make("(coalesce)") })
})
}
func TestCoalesceFilterWithoutSetArgs(t *testing.T) {
util.WithLogAssertions(t, func(l *util.MockLogger) {
util.ExpectPanic(t, "<coalesce []> requires at least 1 argument(s), got 0: []",
func() { (&coalesce{}).Filter([]target.Target{}) })
})
}
func TestCoalesceSetArgs(t *testing.T) {
input := "(coalesce host hostname)"
f := Make(input).(*coalesce)
assert.Equal(t, f.coalesceOrder, []string{"host", "hostname"})
}
func TestUnknownCoalescer(t *testing.T) {
util.ExpectPanic(t, "Unknown target coalescer foobar (index 1) in filter <coalesce [ip foobar hostname barbaz]>", func() {
Make("(coalesce ip foobar hostname barbaz)")
})
}
func TestCoalesceOperation(t *testing.T) {
f := Make("(coalesce host)").(*coalesce)
cases := []struct {
expected string
target target.Target
}{
{"foo", target.Target{Host: "foo", IP: "0.0.0.0", Hostname: "notfoo"}},
{"bar", target.Target{Host: "bar"}},
}
for _, c := range cases {
target := f.Filter([]target.Target{c.target})[0]
assert.Equal(t, []string{"host"}, target.CoalesceOrder)
assert.Equal(t, c.expected, target.SSHTarget())
}
}
|
package com.acgist.snail.net.torrent.peer;
import java.util.Random;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.acgist.snail.config.PeerConfig;
import com.acgist.snail.config.SystemConfig;
import com.acgist.snail.utils.NumberUtils;
import com.acgist.snail.utils.PeerUtils;
import com.acgist.snail.utils.StringUtils;
/**
* <p>Peer Service</p>
* <p>管理客户端的PeerId</p>
*
* @author acgist
*/
public final class PeerService {
private static final Logger LOGGER = LoggerFactory.getLogger(PeerService.class);
private static final PeerService INSTANCE = new PeerService();
public static final PeerService getInstance() {
return INSTANCE;
}
/**
* <p>版本信息长度:{@value}</p>
*/
private static final int VERSION_LENGTH = 4;
/**
* <p>PeerId前缀:{@value}</p>
* <p>AS=ACGIST Snail</p>
*/
private static final String PEER_ID_PREFIX = "AS";
/**
* <p>PeerId</p>
* <p>20位系统ID</p>
*/
private final byte[] peerId;
/**
* <p>HTTP传输编码PeerId</p>
*/
private final String peerIdUrl;
private PeerService() {
this.peerId = this.buildPeerId();
this.peerIdUrl = this.buildPeerIdUrl();
LOGGER.debug("PeerId:{}", new String(this.peerId));
LOGGER.debug("PeerIdUrl:{}", this.peerIdUrl);
}
/**
* <p>生成PeerId</p>
*
* @return PeerId
*/
private byte[] buildPeerId() {
final byte[] peerIds = new byte[PeerConfig.PEER_ID_LENGTH];
final StringBuilder builder = new StringBuilder(8);
// 前缀:-ASXXXX-
builder.append("-").append(PEER_ID_PREFIX);
final String version = SystemConfig.getVersion().replace(".", "");
if(version.length() > VERSION_LENGTH) {
builder.append(version.substring(0, VERSION_LENGTH));
} else {
builder.append(version);
builder.append("0".repeat(VERSION_LENGTH - version.length()));
}
builder.append("-");
// 后缀:随机
final String peerIdPrefix = builder.toString();
System.arraycopy(peerIdPrefix.getBytes(), 0, peerIds, 0, peerIdPrefix.length());
final Random random = NumberUtils.random();
for (int index = peerIdPrefix.length(); index < PeerConfig.PEER_ID_LENGTH; index++) {
peerIds[index] = (byte) random.nextInt(SystemConfig.UNSIGNED_BYTE_MAX);
}
return peerIds;
}
/**
* <p>生成PeerIdUrl</p>
*
* @return PeerIdUrl
*/
private String buildPeerIdUrl() {
// 标准编码
return PeerUtils.urlEncode(this.peerId);
// 全部编码
// return PeerUtils.urlEncode(this.peerIdHex());
}
/**
* <p>获取PeerId</p>
*
* @return PeerId
*/
public byte[] peerId() {
return this.peerId;
}
/**
* <p>获取16进制PeerId</p>
*
* @return 16进制PeerId
*/
public String peerIdHex() {
return StringUtils.hex(this.peerId);
}
/**
* <p>获取PeerIdUrl</p>
*
* @return PeerIdUrl
*/
public String peerIdUrl() {
return this.peerIdUrl;
}
}
|
#!/bin/bash
# Copyright 2016 The Bazel Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Wrapper script for collecting code coverage during test execution.
#
# Expected environment:
# COVERAGE_MANIFEST - mandatory, location of the instrumented file manifest
# LCOV_MERGER - mandatory, location of the LcovMerger
# COVERAGE_DIR - optional, location of the coverage temp directory
# COVERAGE_OUTPUT_FILE - optional, location of the final lcov file
# VERBOSE_COVERAGE - optional, print debug info from the coverage scripts
#
# Script expects that it will be started in the execution root directory and
# not in the test's runfiles directory.
if [[ -n "$VERBOSE_COVERAGE" ]]; then
set -x
fi
function resolve_links() {
local name="$1"
if [ -e "$name" ]; then
# resolve all links, keep path absolute
while [ -L "$name" ]; do
local target=$(readlink "$name")
if [ "$(echo "$target" | head -c1)" = "/" ]; then
name="$target"
else
name="$(dirname "$name")/$target"
fi
done
echo "$name"
else
false # fail the function
fi
}
if [[ -z "$COVERAGE_MANIFEST" ]]; then
echo --
echo Coverage runner: \$COVERAGE_MANIFEST is not set
echo Current environment:
env | sort
exit 1
fi
# When collect_coverage.sh is used, test runner must be instructed not to cd
# to the test's runfiles directory.
export ROOT="$PWD"
if [[ "$COVERAGE_MANIFEST" != /* ]]; then
# Canonicalize the path to coverage manifest so that tests can find it.
export COVERAGE_MANIFEST="$ROOT/$COVERAGE_MANIFEST"
fi
# write coverage data outside of the runfiles tree
export COVERAGE_DIR=${COVERAGE_DIR:-"$ROOT/coverage"}
# make COVERAGE_DIR an absolute path
if ! [[ $COVERAGE_DIR == $ROOT* ]]; then
COVERAGE_DIR=$ROOT/$COVERAGE_DIR
fi
mkdir -p "$COVERAGE_DIR"
COVERAGE_OUTPUT_FILE=${COVERAGE_OUTPUT_FILE:-"$COVERAGE_DIR/_coverage.dat"}
# make COVERAGE_OUTPUT_FILE an absolute path
if ! [[ $COVERAGE_OUTPUT_FILE == $ROOT* ]]; then
COVERAGE_OUTPUT_FILE=$ROOT/$COVERAGE_OUTPUT_FILE
fi
# Java
# --------------------------------------
export JAVA_COVERAGE_FILE=$COVERAGE_DIR/jvcov.dat
# Let tests know that it is a coverage run
export COVERAGE=1
export BULK_COVERAGE_RUN=1
for name in "$LCOV_MERGER"; do
if [[ ! -e $name ]]; then
echo --
echo Coverage runner: cannot locate file $name
exit 1
fi
done
# Setting up the environment for executing the C++ tests.
if [[ -z "$GCOV_PREFIX_STRIP" ]]; then
# TODO: GCOV_PREFIX_STRIP=3 is incorrect on MacOS in the default setup
export GCOV_PREFIX_STRIP=3
fi
export GCOV_PREFIX="${COVERAGE_DIR}"
export LLVM_PROFILE_FILE="${COVERAGE_DIR}/%h-%p-%m.profraw"
# In coverage mode for Java, we need to merge the runtime classpath before
# running the tests. JacocoCoverageRunner uses this merged jar in order
# to get coverage data.
#
# Merge the classpath using SingleJar and save it in the environment
# variable JACOCO_METADATA_JAR. The jars on the runtime classpath are listed
# in the file $JAVA_RUNTIME_CLASSPATH_FOR_COVERAGE.
#
# We need to merge the jars here because the merged jar can be an input
# too large (the combined merged jars for several big tests in a run
# can go over 10G). Not merging the jars and making
# JacocoCoverageRunner read every individual jar goes over the shutdown hook
# time limit in the coverage runner (~few seconds).
#
# SINGLE_JAR_TOOL Exec path of SingleJar.
#
# JAVA_RUNTIME_CLASSPATH_FOR_COVERAGE Exec path of a file that contains the
# relative paths of the jars on the runtime
# classpath delimited by newline.
if [[ ! -z "${JAVA_RUNTIME_CLASSPATH_FOR_COVERAGE}" ]]; then
JAVA_RUNTIME_CLASSPATH_FOR_COVERAGE="${PWD}/${JAVA_RUNTIME_CLASSPATH_FOR_COVERAGE}"
SINGLE_JAR_TOOL="${PWD}/${SINGLE_JAR_TOOL}"
# Create a paramsfile for invoking SingleJar.
mkdir -p "${COVERAGE_DIR}"
single_jar_params_file="${COVERAGE_DIR}/runtime_classpath.paramsfile"
touch "$single_jar_params_file"
# Export JACOCO_METADATA_JAR in order for JacocoCoverageRunner to be able
# to read it.
export JACOCO_METADATA_JAR="${COVERAGE_DIR}/coverage-runtime_merged_instr.jar"
echo -e "--output ${JACOCO_METADATA_JAR}\n--sources" >> "$single_jar_params_file"
# Append the runfiles prefix to all the relative paths found in
# JAVA_RUNTIME_CLASSPATH_FOR_COVERAGE, to invoke SingleJar with the
# absolute paths.
RUNFILES_PREFIX="$TEST_SRCDIR/$TEST_WORKSPACE/"
cat "$JAVA_RUNTIME_CLASSPATH_FOR_COVERAGE" | sed "s@^@$RUNFILES_PREFIX@" >> "$single_jar_params_file"
# Invoke SingleJar. This will create JACOCO_METADATA_JAR.
"${SINGLE_JAR_TOOL}" "@$single_jar_params_file"
fi
if [[ "$IS_COVERAGE_SPAWN" == "0" ]]; then
# TODO(bazel-team): cd should be avoided.
cd "$TEST_SRCDIR/$TEST_WORKSPACE"
# Always create the coverage report.
if [[ "$SPLIT_COVERAGE_POST_PROCESSING" == "0" ]]; then
touch $COVERAGE_OUTPUT_FILE
fi
# Execute the test.
"$@"
TEST_STATUS=$?
if [[ $TEST_STATUS -ne 0 ]]; then
echo --
echo Coverage runner: Not collecting coverage for failed test.
echo The following commands failed with status $TEST_STATUS
echo "$@"
exit $TEST_STATUS
fi
fi
# ------------------EXPERIMENTAL---------------------
# After this point we can run the code necessary for the coverage spawn
if [[ "$SPLIT_COVERAGE_POST_PROCESSING" == "1" && "$IS_COVERAGE_SPAWN" == "0" ]]; then
exit 0
fi
if [[ "$SPLIT_COVERAGE_POST_PROCESSING" == "1" && "$IS_COVERAGE_SPAWN" == "1" ]]; then
touch $COVERAGE_OUTPUT_FILE
fi
# TODO(bazel-team): cd should be avoided.
cd $ROOT
# Call the C++ code coverage collection script.
if [[ "$CC_CODE_COVERAGE_SCRIPT" ]]; then
eval "${CC_CODE_COVERAGE_SCRIPT}"
fi
# Export the command line that invokes LcovMerger with the flags:
# --coverage_dir The absolute path of the directory where the
# intermediate coverage reports are located.
# CoverageOutputGenerator will search for files with
# the .dat and .gcov extension under this directory and
# will merge everything it found in the output report.
#
# --output_file The absolute path of the merged coverage report.
#
# --filter_sources Filters out the sources that match the given regexes
# from the final coverage report. This is needed
# because some coverage tools (e.g. gcov) do not have
# any way of specifying what sources to exclude when
# generating the code coverage report (in this case the
# syslib sources).
#
# --source_file_manifest The absolute path of the coverage source file
# manifest. CoverageOutputGenerator uses this file to
# keep only the sources found in the manifest (that is,
# only the sources of targets matched by
# --instrumentation_filter, excluding test targets
# unless --instrument_test_targets).
if [[ "$IS_COVERAGE_SPAWN" == "1" ]]; then
COVERAGE_DIR=$(resolve_links $COVERAGE_DIR)
COVERAGE_MANIFEST=$(resolve_links $COVERAGE_MANIFEST)
fi
LCOV_MERGER_CMD="${LCOV_MERGER} --coverage_dir=${COVERAGE_DIR} \
--output_file=${COVERAGE_OUTPUT_FILE} \
--filter_sources=/usr/bin/.+ \
--filter_sources=/usr/lib/.+ \
--filter_sources=/usr/include.+ \
--filter_sources=.*external/.+ \
--source_file_manifest=${COVERAGE_MANIFEST}"
if [[ $COVERAGE_REPORTED_TO_ACTUAL_SOURCES_FILE ]]; then
LCOV_MERGER_CMD="$LCOV_MERGER_CMD\
--sources_to_replace_file=$ROOT/$COVERAGE_REPORTED_TO_ACTUAL_SOURCES_FILE"
fi
if [[ $DISPLAY_LCOV_CMD ]] ; then
echo "Running lcov_merger"
echo $LCOV_MERGER_CMD
echo "-----------------"
fi
# JAVA_RUNFILES is set to the runfiles of the test, which does not necessarily
# contain a JVM (it does only if the test has a Java binary somewhere). So let
# the LCOV merger discover where its own runfiles tree is.
JAVA_RUNFILES= exec $LCOV_MERGER_CMD
|
#!/bin/sh
docker rm -f restsql
echo "running service-sdk, expecting mysql, container-based /etc/opt/restsql, mapped /var/log/restsql"
docker run --restart=always -d --link mysqld:mysql -p 8080:8080 --name restsql --volume /private/var/log/restsql:/var/log/restsql restsql/service-sdk
echo "sleeping 6s"
sleep 6s
docker logs restsql
|
<reponame>palmerhargreaves/sp2backend
/**
* Created by kostet on 09.10.2018.
*/
var ActivityCompanyTypeImage = function(config) {
$.extend(this, config);
this.form = '#form-activity-company-image';
}
ActivityCompanyTypeImage.prototype = {
start: function() {
this.initEvents();
return this;
},
initEvents: function() {
$(document).on('click', '.js-show-activity-company-type-upload-image', $.proxy(this.onShowActivityCompanyTypeImage, this));
},
onShowActivityCompanyTypeImage: function(e) {
var element = $(e.currentTarget),
activity_id = $('[name*="activity_id"]', this.getForm()),
company_id = $('[name*="company_type_id"]', this.getForm()),
id = $('[name*="id"]:last', this.getForm());
activity_id.val(element.data('activity-id'));
company_id.val(element.data('company-id'));
id.val(element.data('company-type-image-id'));
},
getForm: function() {
return $(this.form);
}
}
|
<filename>blingfirecompile.library/inc/FAMergeSets.h
/**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License.
*/
#ifndef _FA_MERGE_SETS_H_
#define _FA_MERGE_SETS_H_
#include "FAConfig.h"
#include "FAArray_cont_t.h"
namespace BlingFire
{
class FAAllocatorA;
///
/// Creates a system of non-intersecting sets by merging pairs of sets.
/// Can be used to find connected components.
///
class FAMergeSets {
public:
FAMergeSets (FAAllocatorA * pAlloc);
public:
// makes the object ready, creates Count sets of one element each
void Prepare (const int Count);
// merges two sets by two elements
void Merge (const int e1, const int e2);
// returns set idx for the element e
const int GetSet (const int E);
private:
/// faster m_e2p access
int * m_pE2P;
/// maps e -> p
/// if p > 0 then p is parent index + 1
/// if p <= 0 then e is the root of a set and |p| is a rank value
FAArray_cont_t < int > m_e2p;
/// stack for path compression
FAArray_cont_t < int > m_stack;
};
}
#endif
|
void RemoveElement(int[] arr, int element)
{
int len = arr.Length;
int[] result = new int[len];
int index = 0;
for(int i=0; i<len; i++)
{
if(arr[i] != element)
{
result[index++] = arr[i];
}
}
for(int i=0; i<index; i++)
{
arr[i] = result[i];
}
}
|
#!/bin/bash
set -oue pipefail
if [ ! $(which clang-format) ]
then
echo "Error: program 'clang-format' not found!"
exit 1
fi
if [ "$(clang-format --version | sed 's/.*version //;s/\..*//')" -lt "7" ]
then
echo "Error: program 'clang-format' must be version 7 or later!"
exit 1
fi
# Some of the git tools can set this variable, which can cause problems
# https://magit.vc/manual/magit/My-Git-hooks-work-on-the-command_002dline-but-not-inside-Magit.html
unset GIT_LITERAL_PATHSPECS
#Change the delimiter used by 'for' so we can handle spaces in names
IFS=$'\n'
for FILE in $(git diff --cached --name-only "*.cpp" "*.h")
do
# Store the current version in case it has been modified
TMPFILE="$(mktemp)"
cp "$FILE" "$TMPFILE"
# Temporarily remove modified versions, if they exist
git checkout -q "$FILE"
# Format
clang-format -i -style=file "$FILE"
# Add the formatted file
git add "$FILE"
# Restore the old, possibly modified version
mv "$TMPFILE" "$FILE"
# Format the uncached version to keep it in sync with the cached
clang-format -i -style=file "$FILE"
done
# Don't commit if there are no changes to commit
test "$(git diff --cached)" != ""
|
function sort(arr) {
for (let i = 0; i < arr.length; i++) {
// Find the minimum element in unsorted array
let minIdx = i;
for (let j = i+1; j < arr.length; j++) {
if (arr[minIdx] > arr[j]) {
minIdx = j;
}
}
// Swap the found minimum element with the first element
let temp = arr[minIdx];
arr[minIdx] = arr[i];
arr[i] = temp;
}
return arr;
}
let sortedArray = sort([1, 8, 3, 4, 9]);
console.log(sortedArray); // [1, 3, 4, 8, 9]
|
#! /bin/bash
#SBATCH -o /home/hpc/pr63so/di69fol/workspace/SWEET_2015_12_26/benchmarks_performance/rexi_tests_lrz_freq_waves/2016_01_03_scalability_rexi_fd_high_res/run_rexi_fd_par_m0512_t014_n0128_r0168_a1.txt
###SBATCH -e /home/hpc/pr63so/di69fol/workspace/SWEET_2015_12_26/benchmarks_performance/rexi_tests_lrz_freq_waves/2016_01_03_scalability_rexi_fd_high_res/run_rexi_fd_par_m0512_t014_n0128_r0168_a1.err
#SBATCH -J rexi_fd_par_m0512_t014_n0128_r0168_a1
#SBATCH --get-user-env
#SBATCH --clusters=mpp2
#SBATCH --ntasks=168
#SBATCH --cpus-per-task=14
#SBATCH --exclusive
#SBATCH --export=NONE
#SBATCH --time=03:00:00
#declare -x NUMA_BLOCK_ALLOC_VERBOSITY=1
declare -x KMP_AFFINITY="granularity=thread,compact,1,0"
declare -x OMP_NUM_THREADS=14
echo "OMP_NUM_THREADS=$OMP_NUM_THREADS"
echo
. /etc/profile.d/modules.sh
module unload gcc
module unload fftw
module unload python
module load python/2.7_anaconda_nompi
module unload intel
module load intel/16.0
module unload mpi.intel
module load mpi.intel/5.1
module load gcc/5
cd /home/hpc/pr63so/di69fol/workspace/SWEET_2015_12_26/benchmarks_performance/rexi_tests_lrz_freq_waves/2016_01_03_scalability_rexi_fd_high_res
cd ../../../
. local_software/env_vars.sh
# force to use FFTW WISDOM data
declare -x SWEET_FFTW_LOAD_WISDOM_FROM_FILE="FFTW_WISDOM_nofreq_T0"
time -p mpiexec.hydra -genv OMP_NUM_THREADS 14 -envall -ppn 2 -n 168 ./build/rexi_fd_par_m_tno_a1 --initial-freq-x-mul=2.0 --initial-freq-y-mul=1.0 -f 1 -g 1 -H 1 -X 1 -Y 1 --compute-error 1 -t 50 -R 4 -C 0.3 -N 128 -U 0 -S 0 --use-specdiff-for-complex-array 0 --rexi-h 0.8 --timestepping-mode 1 --staggering 0 --rexi-m=512 -C -5.0
|
#!/bin/bash
#
# Copyright 2020- IBM Inc. All rights reserved
# SPDX-License-Identifier: Apache2.0
#
. ./uninstall-cp4waiops-props.sh
export OPERATORS_NAMESPACE=openshift-operators
export IBM_COMMON_SERVICES_NAMESPACE=ibm-common-services
export KNATIVE_SERVING_NAMESPACE=knative-serving
export KNATIVE_EVENTING_NAMESPACE=knative-eventing
export ZENSERVICE_CR_NAME=iaf-zen-cpdservice
# SLEEP TIMES
SLEEP_SHORT_LOOP=5s
SLEEP_MEDIUM_LOOP=15s
SLEEP_LONG_LOOP=30s
SLEEP_EXTRA_LONG_LOOP=40s
# Tracing prefixes
INFO="[INFO]"
WARNING="[WARNING]"
ERROR="[ERROR]"
log () {
local log_tracing_prefix=$1
local log_message=$2
local log_options=$3
if [[ ! -z $log_options ]]; then
echo $log_options "$log_tracing_prefix $log_message"
else
echo "$log_tracing_prefix $log_message"
fi
}
display_help() {
echo "**************************************** Usage ********************************************"
echo ""
echo " This script is used to uninstall Cloud Pak for Watson AIOps."
echo " The following prereqs are required before you run this script: "
echo " - oc CLI is installed and you have logged into the cluster using oc login"
echo " - Update uninstall-cp4waiops-props.sh with components that you want to uninstall"
echo ""
echo " Usage:"
echo " ./uninstall-cp4waiops.sh -h -s"
echo " -h Prints out the help message"
echo " -s Skip asking for confirmations"
echo ""
echo "*******************************************************************************************"
}
check_oc_resource_exists() {
local resource=$1
local resource_name=$2
local namespace=$3
if oc get $resource $resource_name -n $namespace > /dev/null 2>&1; then
resource_exists="true"
else
resource_exists="false"
fi
echo "$resource_exists"
}
unsubscribe () {
local operator_name=$1
local dest_namespace=$2
local operator_label=$3
if [[ ( -z "$operator_name" ) && ( ! -z "$operator_label" ) ]]; then
operator_name=$(oc get subscription.operators.coreos.com -n $dest_namespace -l $operator_label -o name)
if [[ ! -z "$operator_name" ]]; then
operator_exists="true"
else
operator_exists="false"
fi
elif [[ ( ! -z "$operator_name" ) ]]; then
operator_exists=$( check_oc_resource_exists "subscription.operators.coreos.com" $operator_name $dest_namespace )
else
log $ERROR "operator_name and operator_label are empty, please provide one of them and try again"
exit 1
fi
if [[ "$operator_exists" == "true" ]]; then
if [[ "$operator_name" != "subscription.operators.coreos.com"* ]]; then
operator_name="subscription.operators.coreos.com/"$operator_name
fi
# Get CluserServiceVersion
CSV=$(oc get $operator_name -n $dest_namespace --ignore-not-found --output=jsonpath={.status.installedCSV})
# Delete Subscription
log $INFO "Deleting the subscription $operator_name"
#oc delete subscription.operators.coreos.com $operator_name -n $dest_namespace
oc delete $operator_name -n $dest_namespace
# Delete the Installed ClusterServiceVersion
if [[ ! -z "$CSV" ]]; then
log $INFO "Deleting the clusterserviceversion $CSV"
oc delete clusterserviceversion $CSV -n $dest_namespace
log $INFO "Waiting for the deletion of all the ClusterServiceVersions $CSV for the subscription of the operator $operator_name"
# Wait for the Copied ClusterServiceVersions to cleanup
if [ -n "$CSV" ] ; then
LOOP_COUNT=0
while [ `oc get clusterserviceversions --all-namespaces --field-selector=metadata.name=$CSV --ignore-not-found | wc -l` -gt 0 ]
do
sleep $SLEEP_LONG_LOOP
LOOP_COUNT=`expr $LOOP_COUNT + 1`
if [ $LOOP_COUNT -gt 10 ] ; then
log $ERROR "There was an error in deleting the ClusterServiceVersions $CSV for the subscription of the operator $operator_name "
break
fi
done
fi
log $INFO "Deletion of all the ClusterServiceVersions $CSV for the subscription of the operator $operator_name completed successfully."
else
log $WARNING "The ClusterServiceVersion for the operator $operator_name does not exists, skipping the deletion of the ClusterServiceVersion for operator $operator_name"
fi
else
log $WARNING "The subscription for the operator $operator_name does not exists, skipping the unsubscription of the operator $operator_name"
fi
}
delete_installation_instance () {
local installation_name=$1
local project=$2
if [ `oc get installations.orchestrator.aiops.ibm.com $installation_name -n $project --ignore-not-found | wc -l` -gt 0 ] ; then
log $INFO "Found installation CR $installation_name to delete."
log $INFO "Waiting for $resource instances to be deleted. This will take a while...."
oc delete installations.orchestrator.aiops.ibm.com $installation_name -n $project --ignore-not-found;
LOOP_COUNT=0
while [ `oc get installations.orchestrator.aiops.ibm.com $installation_name -n $project --ignore-not-found | wc -l` -gt 0 ]
do
sleep $SLEEP_EXTRA_LONG_LOOP
LOOP_COUNT=`expr $LOOP_COUNT + 1`
if [ $LOOP_COUNT -gt 20 ] ; then
log $ERROR "Timed out waiting for installation instance $installation_name to be deleted"
exit 1
else
log $INFO "Waiting for installation instance to get deleted... Checking again in $SLEEP_LONG_LOOP seconds"
fi
done
log $INFO "$installation_name instance got deleted successfully!"
log $INFO "Checking if operandrequests are all deleted "
while [ `oc get operandrequests ibm-aiops-ai-manager -n $project --ignore-not-found --no-headers | wc -l` -gt 0 ] ||
[ `oc get operandrequests ibm-aiops-aiops-foundation -n $project --ignore-not-found --no-headers | wc -l` -gt 0 ] ||
[ `oc get operandrequests ibm-aiops-application-manager -n $project --ignore-not-found --no-headers | wc -l` -gt 0 ] ||
[ `oc get operandrequests iaf-system-common-service -n $project --ignore-not-found --no-headers | wc -l` -gt 0 ]
do
sleep $SLEEP_LONG_LOOP
LOOP_COUNT=`expr $LOOP_COUNT + 1`
if [ $LOOP_COUNT -gt 20 ] ; then
log $ERROR "Timed out waiting for operandrequests to be deleted"
exit 1
else
log $INFO "Found following operandrequests in the project: "
log $INFO "$(oc get operandrequests -n $project --no-headers)"
log $INFO "Waiting for operandrequests instances to get deleted... Checking again in $SLEEP_LONG_LOOP seconds"
fi
done
log $INFO "Expected operandrequests got deleted successfully!"
else
log $INFO "The $installation_name installation instance is not found, skipping the deletion of $installation_name."
fi
}
delete_zenservice_instance () {
local zenservice_name=$1
local project=$2
if [ `oc get zenservice $zenservice_name -n $project --ignore-not-found | wc -l` -gt 0 ] ; then
log $INFO "Found zenservice CR $zenservice_name to delete."
oc delete zenservice $zenservice_name -n $project --ignore-not-found;
log $INFO "Waiting for $resource instances to be deleted...."
LOOP_COUNT=0
while [ `oc get zenservice $zenservice_name -n $project --ignore-not-found | wc -l` -gt 0 ]
do
sleep $SLEEP_EXTRA_LONG_LOOP
LOOP_COUNT=`expr $LOOP_COUNT + 1`
if [ $LOOP_COUNT -gt 20 ] ; then
log $ERROR "Timed out waiting for zenservice instance $zenservice_name to be deleted"
exit 1
else
log $INFO "Waiting for zenservice instance to get deleted... Checking again in $SLEEP_LONG_LOOP seconds"
fi
done
log $INFO "$zenservice_name instance got deleted successfully!"
log $INFO "Checking if operandrequests are all deleted "
while [ `oc get operandrequests ibm-commonui-request -n ibm-common-services --ignore-not-found --no-headers | wc -l` -gt 0 ] ||
[ `oc get operandrequests ibm-iam-request -n ibm-common-services --ignore-not-found --no-headers | wc -l` -gt 0 ] ||
[ `oc get operandrequests ibm-mongodb-request -n ibm-common-services --ignore-not-found --no-headers | wc -l` -gt 0 ] ||
[ `oc get operandrequests management-ingress -n ibm-common-services --ignore-not-found --no-headers | wc -l` -gt 0 ] ||
[ `oc get operandrequests platform-api-request -n ibm-common-services --ignore-not-found --no-headers| wc -l` -gt 0 ] ||
[ `oc get operandrequests ibm-iam-service -n ${project} --ignore-not-found --no-headers | wc -l` -gt 0 ]
do
sleep $SLEEP_LONG_LOOP
LOOP_COUNT=`expr $LOOP_COUNT + 1`
if [ $LOOP_COUNT -gt 10 ] ; then
log $ERROR "Timed out waiting for operandrequests to be deleted"
exit 1
else
log $INFO "Found following operandrequests in the project: "
log $INFO "$(oc get operandrequests -n ibm-common-services --no-headers)"
log $INFO "Waiting for zenservice related operandrequests instances to get deleted... Checking again in $SLEEP_LONG_LOOP seconds"
fi
done
log $INFO "Expected operandrequests got deleted successfully!"
else
log $INFO "The $zenservice_name zenservice instance is not found, skipping the deletion of $zenservice_name."
fi
}
delete_project () {
local project=$1
if [ `oc get project $project --ignore-not-found | wc -l` -gt 0 ] ; then
log $INFO "Found project $project to delete."
if [ `oc get operandrequests -n $project --ignore-not-found --no-headers| wc -l` -gt 0 ]; then
log $ERROR "Found operandrequests in the project. Please review the remaining operandrequests before deleting the project."
exit 0
fi
if [ `oc get cemprobes -n $project --ignore-not-found --no-headers| wc -l` -gt 0 ]; then
log $ERROR "Found cemprobes in the project. Please review the remaining cemprobes before deleting the project."
exit 0
fi
oc patch -n $project rolebinding/admin -p '{"metadata": {"finalizers":null}}'
oc delete ns $project --ignore-not-found;
log $INFO "Waiting for $project to be deleted...."
LOOP_COUNT=0
while [ `oc get project $project --ignore-not-found | wc -l` -gt 0 ]
do
sleep $SLEEP_EXTRA_LONG_LOOP
LOOP_COUNT=`expr $LOOP_COUNT + 1`
if [ $LOOP_COUNT -gt 20 ] ; then
log $ERROR "Timed out waiting for project $project to be deleted"
exit 1
else
log $INFO "Waiting for project $project to get deleted... Checking again in $SLEEP_LONG_LOOP seconds"
fi
done
log $INFO "Project $project got deleted successfully!"
else
log $INFO "Project $project is not found, skipping the deletion of $project."
fi
}
delete_iaf_bedrock () {
log $INFO "Starting uninstall of IAF & Bedrock components"
oc patch -n ibm-common-services rolebinding/admin -p '{"metadata": {"finalizers":null}}'
oc delete rolebinding admin -n ibm-common-services --ignore-not-found
unsubscribe "" $OPERATORS_NAMESPACE "operators.coreos.com/ibm-automation.openshift-operators"
unsubscribe "" $OPERATORS_NAMESPACE "operators.coreos.com/ibm-automation-ai.openshift-operators"
unsubscribe "" $OPERATORS_NAMESPACE "operators.coreos.com/ibm-automation-core.openshift-operators"
unsubscribe "" $OPERATORS_NAMESPACE "operators.coreos.com/ibm-automation-elastic.openshift-operators"
unsubscribe "" $OPERATORS_NAMESPACE "operators.coreos.com/ibm-automation-eventprocessing.openshift-operators"
unsubscribe "" $OPERATORS_NAMESPACE "operators.coreos.com/ibm-automation-flink.openshift-operators"
unsubscribe "" $OPERATORS_NAMESPACE "operators.coreos.com/ibm-automation.openshift-operators"
unsubscribe "" $OPERATORS_NAMESPACE "operators.coreos.com/ibm-common-service-operator.openshift-operators"
oc delete operandrequest iaf-operator -n openshift-operators --ignore-not-found
oc delete operandrequest iaf-core-operator -n openshift-operators --ignore-not-found
# Note : Verify there are no operandrequests & operandbindinfo at this point before proceeding. It may take a few minutes for them to go away.
log $INFO "Checking if operandrequests are all deleted "
while [ `oc get operandrequests -A --ignore-not-found --no-headers| wc -l` -gt 0 ]
do
sleep $SLEEP_LONG_LOOP
LOOP_COUNT=`expr $LOOP_COUNT + 1`
if [ $LOOP_COUNT -gt 30 ] ; then
log $ERROR "Timed out waiting for all operandrequests to be deleted. Cannot proceed with uninstallation til all operandrequests in ibm-common-services project are deleted."
exit 1
else
log $INFO "Found following operandrequests in the project: $(oc get operandrequests -A --ignore-not-found --no-headers)"
log $INFO "Waiting for operandrequests instances to get deleted... Checking again in $SLEEP_LONG_LOOP seconds"
fi
done
log $INFO "Expected operandrequests got deleted successfully!"
# Deleting operandbindinfo before namespacescopes as seen in iaf internal uninstall script
oc delete operandbindinfo --all -n ibm-common-services --ignore-not-found
oc delete namespacescopes common-service -n ibm-common-services --ignore-not-found
oc delete namespacescopes nss-managedby-odlm -n ibm-common-services --ignore-not-found
oc delete namespacescopes odlm-scope-managedby-odlm -n ibm-common-services --ignore-not-found
oc delete namespacescopes nss-odlm-scope -n ibm-common-services --ignore-not-found
unsubscribe "ibm-cert-manager-operator" $IBM_COMMON_SERVICES_NAMESPACE ""
unsubscribe "ibm-namespace-scope-operator" $IBM_COMMON_SERVICES_NAMESPACE ""
unsubscribe "operand-deployment-lifecycle-manager-app" $IBM_COMMON_SERVICES_NAMESPACE ""
oc delete deployment cert-manager-cainjector -n ibm-common-services --ignore-not-found
oc delete deployment cert-manager-controller -n ibm-common-services --ignore-not-found
oc delete deployment cert-manager-webhook -n ibm-common-services --ignore-not-found
oc delete deployment configmap-watcher -n ibm-common-services --ignore-not-found
oc delete deployment ibm-common-service-webhook -n ibm-common-services --ignore-not-found
oc delete deployment meta-api-deploy -n ibm-common-services --ignore-not-found
oc delete deployment secretshare -n ibm-common-services --ignore-not-found
oc delete service cert-manager-webhook -n ibm-common-services --ignore-not-found
oc delete service ibm-common-service-webhook -n ibm-common-services --ignore-not-found
oc delete service meta-api-svc -n ibm-common-services --ignore-not-found
oc delete apiservice v1beta1.webhook.certmanager.k8s.io --ignore-not-found
oc delete apiservice v1.metering.ibm.com --ignore-not-found
oc delete ValidatingWebhookConfiguration cert-manager-webhook --ignore-not-found
oc delete MutatingWebhookConfiguration cert-manager-webhook ibm-common-service-webhook-configuration namespace-admission-config --ignore-not-found
delete_project $IBM_COMMON_SERVICES_NAMESPACE
delete_crd_group "IAF_CRDS"
delete_crd_group "BEDROCK_CRDS"
}
delete_crd_group () {
local crd_group=$1
case "$crd_group" in
"CP4WAIOPS_CRDS")
for CRD in ${CP4WAIOPS_CRDS[@]}; do
log $INFO "Deleting CRD $CRD.."
oc delete crd $CRD --ignore-not-found
done
;;
"KONG_CRDS")
for CRD in ${KONG_CRDS[@]}; do
log $INFO "Deleting CRD $CRD.."
oc delete crd $CRD --ignore-not-found
done
;;
"CAMELK_CRDS")
for CRD in ${CAMELK_CRDS[@]}; do
log $INFO "Deleting CRD $CRD.."
oc delete crd $CRD --ignore-not-found
done
;;
"IAF_CRDS")
for CRD in ${IAF_CRDS[@]}; do
log $INFO "Deleting CRD $CRD.."
oc delete crd $CRD --ignore-not-found
done
;;
"BEDROCK_CRDS")
for CRD in ${BEDROCK_CRDS[@]}; do
log $INFO "Deleting CRD $CRD.."
oc delete crd $CRD --ignore-not-found
done
;;
esac
}
analyze_script_properties(){
if [[ $DELETE_ALL == "true" ]]; then
DELETE_PVCS="true"
DELETE_SECRETS="true"
DELETE_CONFIGMAPS="true"
DELETE_KONG_CRDS="true"
DELETE_CAMELK_CRDS="true"
DELETE_ZENSERVICE="true"
DELETE_AIOPS_PROJECT="true"
DELETE_IAF="true"
fi
}
display_script_properties(){
log $INFO "##### Properties in uninstall-cp4waiops-props.sh #####"
log $INFO
if [[ $DELETE_ALL == "true" ]]; then
log $INFO "The script uninstall-cp4waiops-props.sh has 'DELETE_ALL=true', hence the script will execute wih below values: "
else
log $INFO "The script uninstall-cp4waiops-props.sh has the properties with below values: "
fi
log $INFO "AIOPS_PROJECT=$AIOPS_PROJECT"
log $INFO "INSTALLATION_CR_NAME=$INSTALLATION_CR_NAME"
log $INFO "DELETE_PVCS=$DELETE_PVCS"
log $INFO "DELETE_SECRETS=$DELETE_SECRETS"
log $INFO "DELETE_CONFIGMAPS=$DELETE_CONFIGMAPS"
log $INFO "DELETE_KONG_CRDS=$DELETE_KONG_CRDS"
log $INFO "DELETE_CAMELK_CRDS=$DELETE_CAMELK_CRDS"
log $INFO "DELETE_ZENSERVICE=$DELETE_ZENSERVICE"
log $INFO "DELETE_AIOPS_PROJECT=$DELETE_AIOPS_PROJECT"
log $INFO "DELETE_IAF=$DELETE_IAF"
log $INFO
log $INFO "##### Properties in uninstall-cp4waiops-props.sh #####"
}
check_additional_installation_exists(){
log $INFO "Checking if any additional installation resources found in the cluster."
installation_returned_value=$(oc get installations.orchestrator.aiops.ibm.com -A)
if [[ ! -z $installation_returned_value ]] ; then
log $ERROR "Some additional installation cr found in the cluster, please delete the installation cr's and try again."
log $ERROR "Remaining installation cr found : "
oc get installations.orchestrator.aiops.ibm.com -A
exit 1
else
log $INFO "No additional installation resources found in the cluster."
fi
}
|
export default class ServerGatewayDD35 {
constructor(url) {
this.url = url;
}
get = async () => {
const response = await fetch(this.url, { headers: { 'Content-type': 'application/json' } });
if (response.ok) {
return await response.json();
} else
console.error(response);
};
createCharacter = async (character) => {
const headers = new Headers({ 'Content-type': 'application/json' });
await fetch(this.url, { method: 'post', headers: headers, body: JSON.stringify(character) });
};
deleteCharacter = async (id) => {
await fetch(this.url + id, { method: 'delete', headers: { 'Content-type': 'application/json' } });
};
}
|
<gh_stars>1-10
package nrsc
import (
"fmt"
"io/ioutil"
"net/http"
"os"
"os/exec"
"strings"
"testing"
"time"
)
const (
port = 9888
)
var root string
func testDir() string {
host, err := os.Hostname()
if err != nil {
host = "localhost"
}
return fmt.Sprintf("%s/nrsc-test-%s-%s", os.TempDir(), os.Getenv("USER"), host)
}
func TestMask(t *testing.T) {
resp := getResp(t, "/static/i.gif")
if resp.StatusCode != http.StatusUnauthorized {
t.Fatalf("got masked resource - %d\n", resp.StatusCode)
}
}
func TestText(t *testing.T) {
expected := map[string]string{
"Content-Size": "12",
"Content-Type": "text/plain",
}
checkPath(t, "ht.txt", expected)
}
func TestSub(t *testing.T) {
expected := map[string]string{
"Content-Size": "1150",
"Content-Type": "image/",
}
checkPath(t, "sub/favicon.ico", expected)
}
// / serves a template
func TestTempalte(t *testing.T) {
server := startServer(t)
if server == nil {
t.Fatalf("can't start server")
}
defer server.Process.Kill()
url := fmt.Sprintf("http://localhost:%d", port)
resp, err := http.Get(url)
if err != nil {
t.Fatalf("can't GET / - %s", err)
}
data, err := ioutil.ReadAll(resp.Body)
if err != nil {
t.Fatalf("can't read body - %s", err)
}
if string(data) != "The number is 7\n" {
t.Fatalf("bad template reply - %s", string(data))
}
}
func createMain() error {
filename := fmt.Sprintf("%s/main.go", root)
file, err := os.Create(filename)
if err != nil {
return err
}
defer file.Close()
fmt.Fprintf(file, code, port)
return nil
}
func initDir() error {
// Ignore error value, since it might not be there
os.RemoveAll(root)
err := os.Mkdir(root, 0777)
if err != nil {
return err
}
return createMain()
}
func get(path string) (*http.Response, error) {
url := fmt.Sprintf("http://localhost:%d/static/%s", port, path)
return http.Get(url)
}
func startServer(t *testing.T) *exec.Cmd {
cmd := exec.Command(fmt.Sprintf("%s/nrsc-test", root))
// Ignore errors, test will fail anyway if server not running
cmd.Start()
// Wait for server
url := fmt.Sprintf("http://localhost:%d", port)
start := time.Now()
for time.Since(start) < time.Duration(2*time.Second) {
_, err := http.Get(url)
if err == nil {
return cmd
}
time.Sleep(time.Second / 10)
}
if cmd.Process != nil {
cmd.Process.Kill()
}
t.Fatalf("can't connect to server")
return nil
}
func init() {
root = testDir()
if err := initDir(); err != nil {
panic(err)
}
cwd, _ := os.Getwd()
path := func(name string) string {
return fmt.Sprintf("%s/%s", cwd, name)
}
os.Chdir("nrsc")
cmd := exec.Command("go", "build")
if err := cmd.Run(); err != nil {
fmt.Printf("error building nrsc: %s\n", err)
panic(err)
}
os.Chdir(root)
defer os.Chdir(cwd)
testExe := "nrsc-test"
cmd = exec.Command("go", "build", "-o", testExe)
if err := cmd.Run(); err != nil {
fmt.Printf("error building: %s\n", err)
panic(err)
}
cmd = exec.Command(path("nrsc/nrsc"), testExe, path("test-resources"))
if err := cmd.Run(); err != nil {
fmt.Printf("error packing: %s\n", err)
panic(err)
}
}
func checkHeaders(t *testing.T, expected map[string]string, headers http.Header) {
for key := range expected {
v1 := expected[key]
v2 := headers.Get(key)
if !strings.HasPrefix(v2, v1) {
t.Fatalf("bad header %s: %s <-> %s", key, v1, v2)
}
}
key := "Last-Modified"
value := headers.Get(key)
if value == "" {
t.Fatalf("no %s header", key)
}
}
func getResp(t *testing.T, path string) *http.Response {
server := startServer(t)
if server == nil {
return nil
}
defer server.Process.Kill()
resp, err := get(path)
if err != nil {
t.Fatalf("%s\n", err)
return nil
}
return resp
}
func checkPath(t *testing.T, path string, expected map[string]string) {
resp := getResp(t, path)
if resp.StatusCode != http.StatusOK {
t.Fatalf("bad reply - %s", resp.Status)
}
checkHeaders(t, expected, resp.Header)
}
const code = `
package main
import (
"fmt"
"net/http"
"os"
"regexp"
"bitbucket.org/tebeka/nrsc"
)
type params struct {
Number int
}
func indexHandler(w http.ResponseWriter, req *http.Request) {
t, err := nrsc.LoadTemplates(nil, "t.html")
if err != nil {
http.NotFound(w, req)
}
if err = t.Execute(w, params{7}); err != nil {
http.NotFound(w, req)
}
}
func main() {
nrsc.Handle("/static/")
nrsc.Mask(regexp.MustCompile(".gif$"))
http.HandleFunc("/", indexHandler)
if err := http.ListenAndServe(":%d", nil); err != nil {
fmt.Fprintf(os.Stderr, "error: %%s\n", err)
os.Exit(1)
}
}
`
|
#!/usr/bin/env bash
# Check how XML/JSON associates to YANG spec in different situations
# In more detail, xml/json usually goes through these steps:
# 1. Parse syntax, eg map JSON/XML concrete syntax to cxobj trees
# 2. Populate/match cxobj tree X with yang statements, ie bind each cxobj node to yang_stmt nodes
# a. X is a top-level node (XML and JSON)
# b. X is a not a top-level node (XML and JSON)
# 3. Sort children
# 4. Validation (optional)
# These tests are for cases 2a and 2b primarily. They occur somewhat differently in XML and JSON.
# Magic line must be first in script (see README.md)
s="$_" ; . ./lib.sh || if [ "$s" = $0 ]; then exit 0; else return 0; fi
: ${clixon_util_xml:="clixon_util_xml"}
: ${clixon_util_json:="clixon_util_json"}
APPNAME=example
cfg=$dir/conf_match.xml
fyang=$dir/match.yang
fxml=$dir/x.xml
fjson=$dir/x.json
ftop=$dir/top.xml
cat <<EOF > $cfg
<clixon-config xmlns="http://clicon.org/config">
<CLICON_CONFIGFILE>$cfg</CLICON_CONFIGFILE>
<CLICON_YANG_DIR>/usr/local/share/clixon</CLICON_YANG_DIR>
<CLICON_YANG_DIR>$dir</CLICON_YANG_DIR>
<CLICON_YANG_DIR>$IETFRFC</CLICON_YANG_DIR>
<CLICON_YANG_MAIN_FILE>$fyang</CLICON_YANG_MAIN_FILE>
<CLICON_CLISPEC_DIR>/usr/local/lib/$APPNAME/clispec</CLICON_CLISPEC_DIR>
<CLICON_CLI_DIR>/usr/local/lib/$APPNAME/cli</CLICON_CLI_DIR>
<CLICON_NETCONF_DIR>/usr/local/lib/$APPNAME/netconf</CLICON_NETCONF_DIR>
<CLICON_BACKEND_DIR>/usr/local/lib/$APPNAME/backend</CLICON_BACKEND_DIR>
<CLICON_CLI_MODE>$APPNAME</CLICON_CLI_MODE>
<CLICON_SOCK>/usr/local/var/$APPNAME/$APPNAME.sock</CLICON_SOCK>
<CLICON_BACKEND_PIDFILE>/usr/local/var/$APPNAME/$APPNAME.pidfile</CLICON_BACKEND_PIDFILE>
<CLICON_XMLDB_DIR>$dir</CLICON_XMLDB_DIR>
<CLICON_MODULE_LIBRARY_RFC7895>true</CLICON_MODULE_LIBRARY_RFC7895>
</clixon-config>
EOF
cat <<EOF > $fyang
module match{
yang-version 1.1;
prefix m;
namespace "urn:example:match";
container a {
description "Top level";
presence true;
list a {
description "Note same as parent to catch false positives 2a/2b";
key k;
leaf k{
type uint32;
}
}
anyxml any;
}
}
EOF
new "test params: -f $cfg"
cat <<EOF > $ftop
<a xmlns="urn:example:match"><a><k>0</k></a></a>
EOF
cat <<EOF > $fxml
<a xmlns="urn:example:match"><a><k>43</k></a></a>
EOF
new "2a XML Add a/a/k on top"
expectpart "$($clixon_util_xml -vy $fyang -f $fxml)" 0 '^$'
# Subtree without namespace (maybe shouldnt work?)
cat <<EOF > $fxml
<a><k>42</k></a>
EOF
new "2b XML Add a/k under a without namespace"
expectpart "$($clixon_util_xml -vy $fyang -f $fxml -t $ftop -T m:a)" 0 '^$'
# Subtree with namespace
cat <<EOF > $fxml
<a xmlns="urn:example:match"><k>42</k></a>
EOF
new "2b XML Add a/k under a"
expectpart "$($clixon_util_xml -vy $fyang -f $fxml -t $ftop -T m:a)" 0 '^$'
new "XML Add a/k on top, should fail"
expectpart "$($clixon_util_xml -vy $fyang -f $fxml 2> /dev/null)" 255 '^$'
cat <<EOF > $fxml
<a xmlns="urn:example:match"><a><k>43</k></a></a>
EOF
new "2b XML Add a/a/k under a should fail"
expectpart "$($clixon_util_xml -vy $fyang -f $fxml -t $ftop -T m:a 2> /dev/null)" 255 '^$'
# Anyxml
cat <<EOF > $fxml
<any xmlns="urn:example:match"><kalle>hej</kalle></any>
EOF
new "XML Add any under a"
expectpart "$($clixon_util_xml -vy $fyang -f $fxml -t $ftop -T m:a)" 0 '^$'
cat <<EOF > $fxml
<a xmlns="urn:example:match"><any><kalle>hej</kalle></any></a>
EOF
new "XML Add any on top"
expectpart "$($clixon_util_xml -vy $fyang -f $fxml)" 0 '^$'
# OK, same thing with JSON!
cat <<EOF > $fjson
{"match:a":{"a":{"k":43}}}
EOF
new "2a JSON Add a/a/k on top"
expectpart "$($clixon_util_xml -Jvy $fyang -f $fjson)" 0 '^$'
# Subtree with namespace
cat <<EOF > $fjson
{"match:a":{"k":43}}
EOF
new "2b JSON Add a/k under a"
expectpart "$($clixon_util_xml -Jvy $fyang -f $fjson -t $ftop -T m:a)" 0 '^$'
new "JSON Add a/k on top, should fail"
expectpart "$($clixon_util_xml -Jvy $fyang -f $fjson 2> /dev/null)" 255 '^$'
cat <<EOF > $fjson
{"match:a":{"a":{"k":43}}}
EOF
new "2b JSON Add a/a/k under a should fail"
expectpart "$($clixon_util_xml -Jvy $fyang -f $fjson -t $ftop -T m:a 2> /dev/null)" 255 '^$'
# Anyxml
cat <<EOF > $fjson
{"match:any":{"kalle":"hej"}}
EOF
new "JSON Add any under a"
expectpart "$($clixon_util_xml -Jvy $fyang -f $fjson -t $ftop -T m:a)" 0 '^$'
cat <<EOF > $fjson
{"match:a":{"any":{"kalle":"hej"}}}
EOF
new "JSON Add any on top"
expectpart "$($clixon_util_xml -Jvy $fyang -f $fjson)" 0 '^$'
rm -rf $dir
|
import os
import pickle
import tempfile
from dagster import ModeDefinition, execute_pipeline, graph, op, pipeline, solid
from dagster.core.definitions.version_strategy import VersionStrategy
from dagster.core.execution.api import create_execution_plan
from dagster.core.instance import DagsterInstance
from dagster.core.storage.fs_io_manager import fs_io_manager
from dagster.core.test_utils import instance_for_test
def define_pipeline(io_manager):
@solid
def solid_a(_context):
return [1, 2, 3]
@solid
def solid_b(_context, _df):
return 1
@pipeline(mode_defs=[ModeDefinition("local", resource_defs={"io_manager": io_manager})])
def asset_pipeline():
solid_b(solid_a())
return asset_pipeline
def test_fs_io_manager():
with tempfile.TemporaryDirectory() as tmpdir_path:
io_manager = fs_io_manager.configured({"base_dir": tmpdir_path})
pipeline_def = define_pipeline(io_manager)
result = execute_pipeline(pipeline_def)
assert result.success
handled_output_events = list(filter(lambda evt: evt.is_handled_output, result.event_list))
assert len(handled_output_events) == 2
filepath_a = os.path.join(tmpdir_path, result.run_id, "solid_a", "result")
assert os.path.isfile(filepath_a)
with open(filepath_a, "rb") as read_obj:
assert pickle.load(read_obj) == [1, 2, 3]
loaded_input_events = list(filter(lambda evt: evt.is_loaded_input, result.event_list))
assert len(loaded_input_events) == 1
assert "solid_a" == loaded_input_events[0].event_specific_data.upstream_step_key
filepath_b = os.path.join(tmpdir_path, result.run_id, "solid_b", "result")
assert os.path.isfile(filepath_b)
with open(filepath_b, "rb") as read_obj:
assert pickle.load(read_obj) == 1
def test_fs_io_manager_base_dir():
with tempfile.TemporaryDirectory() as tmpdir_path:
instance = DagsterInstance.ephemeral(tempdir=tmpdir_path)
io_manager = fs_io_manager
pipeline_def = define_pipeline(io_manager)
result = execute_pipeline(pipeline_def, instance=instance)
assert result.success
assert result.result_for_solid("solid_a").output_value() == [1, 2, 3]
with open(
os.path.join(instance.storage_directory(), result.run_id, "solid_a", "result"), "rb"
) as read_obj:
assert pickle.load(read_obj) == [1, 2, 3]
def test_fs_io_manager_memoization():
recorder = []
@op
def my_op():
recorder.append("entered")
@graph
def my_graph():
my_op()
class MyVersionStrategy(VersionStrategy):
def get_solid_version(self, solid_def):
return "foo"
with tempfile.TemporaryDirectory() as temp_dir:
with instance_for_test(temp_dir=temp_dir) as instance:
my_job = my_graph.to_job(version_strategy=MyVersionStrategy())
unmemoized_plan = create_execution_plan(my_job, instance=instance)
assert len(unmemoized_plan.step_keys_to_execute) == 1
result = my_job.execute_in_process(instance=instance)
assert result.success
assert len(recorder) == 1
execution_plan = create_execution_plan(my_job, instance=instance)
assert len(execution_plan.step_keys_to_execute) == 0
result = my_job.execute_in_process(instance=instance)
assert result.success
assert len(recorder) == 1
|
#!/bin/bash
#
# coverage.sh
#
# Generate coverage figures
#
# @author Kealan McCusker <kealanmccusker@gmail.com>
# ------------------------------------------------------------------------------
# NOTES:
CURRENTDIR=${PWD}
function coverage()
{
echo "coverage"
cd $CURRENTDIR/target/Coverage
mkdir coverage
lcov --capture --initial --directory ./src --output-file coverage/libmpc.info
lcov --no-checksum --directory ./src --capture --output-file coverage/libmpc.info
genhtml -o coverage -t "LIBPAILLIER Test Coverage" coverage/libmpc.info
}
coverage
|
#!/bin/sh
#***********************************************************************
#* GNU Lesser General Public License
#*
#* This file is part of the GFDL Flexible Modeling System (FMS).
#*
#* FMS is free software: you can redistribute it and/or modify it under
#* the terms of the GNU Lesser General Public License as published by
#* the Free Software Foundation, either version 3 of the License, or (at
#* your option) any later version.
#*
#* FMS is distributed in the hope that it will be useful, but WITHOUT
#* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
#* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
#* for more details.
#*
#* You should have received a copy of the GNU Lesser General Public
#* License along with FMS. If not, see <http://www.gnu.org/licenses/>.
#***********************************************************************
# This is part of the GFDL FMS package. This is a shell script to
# execute tests in the test_fms/fms2_io directory.
# Set common test settings.
. ../test_common.sh
# make an input.nml for mpp_init to read
printf "EOF\n&dummy\nEOF" | cat > input.nml
# run the tests
run_test test_atmosphere_io 6 $netcdf_version_skip
|
///<reference path='.\rule.ts' />
///<reference path='.\consequences\consequence.ts' />
///<reference path='..\compilation\conditionVisitor.ts' />
module Treaty {
export module Rules {
export interface IBuildRule {
named(name: string): IBuildRule;
when(instanceType: string, expression: (instance) => bool): IBuildRule;
then(instanceType: string, expression: (instance) => void): IBuildRule;
build(): Rule;
}
export class RuleFactory {
private expressionParser: Treaty.Compilation.ExpressionParser = new Treaty.Compilation.ExpressionParser();
public rule(): IBuildRule {
return new RuleBuilder(this.expressionParser);
}
}
export class RuleBuilder implements IBuildRule {
private name: string;
private conditionBuilders: ConditionBuilder[] = [];
private consequenceBuilders: ConsequenceBuilder[] = [];
constructor (private expressionParser: Treaty.Compilation.ExpressionParser) { }
public named(name: string): IBuildRule {
this.name = name;
return this;
}
public when(instanceType: string, expression: (instance) => bool): IBuildRule {
this.conditionBuilders.push(new ConditionBuilder(Type.create(instanceType), expression));
return this;
}
public then(instanceType: string, expression: (instance) => void ): IBuildRule {
this.consequenceBuilders.push(new ConsequenceBuilder(Type.create(instanceType), expression));
return this;
}
public build(): Rule {
var conditions: ICondition[] = [];
var consequences: IConsequence[] = [];
this.conditionBuilders.forEach(builder => {
builder.build(this.expressionParser).forEach(condition => {
conditions.push(condition);
});
});
this.consequenceBuilders.forEach(builder => {
builder.build().forEach(consequence => {
consequences.push(consequence);
});
});
return new Rule(this.name, conditions, consequences);
}
}
export class ConditionBuilder {
private conditionParser = new Treaty.Compilation.ConditionParser();
constructor (private instanceType: Treaty.Type, private expression: (instance) => bool) { }
public build(expressionParser: Treaty.Compilation.ExpressionParser): ICondition[] {
var script = expressionParser.parse(this.expression);
return this.conditionParser.parse(this.instanceType, script);
}
}
export class ConsequenceBuilder {
constructor (private instanceType: Treaty.Type, private consequence: (instance) => void) { }
public build(): IConsequence[] {
var consequences = new IConsequence[];
consequences.push(new Treaty.Rules.Consequences.DelegateConsequence(this.instanceType, this.consequence));
return consequences;
}
}
}
}
|
#
# _ _ ___ _____ _ _____________ _____ _ _ _____ ___ _ _
# | | | | / _ \/ __ \| | / /_ _| ___ \ _ | \ | |_ _|/ _ \ | \ | |
# | |_| |/ /_\ \ / \/| |/ / | | | |_/ / | | | \| | | | / /_\ \| \| |
# | _ || _ | | | \ | | | /| | | | . ` | | | | _ || . ` |
# | | | || | | | \__/\| |\ \ | | | |\ \\ \_/ / |\ |_| |_| | | || |\ |
# \_| |_/\_| |_/\____/\_| \_/ \_/ \_| \_|\___/\_| \_/\___/\_| |_/\_| \_/
#
# ~ Tools For Hacking by Σαμθελ ισονγ (Admin of Termux Android Hackers) ~
clear
sudo chmod +x /etc/
clear
sudo chmod +x /usr/share/doc
clear
sudo rm -rf /usr/share/doc/hacktronian/
clear
cd /etc/
clear
sudo rm -rf /etc/TermuxHackz
clear
mkdir TermuxHackz
clear
cd TermuxHackz
clear
git clone https://github.com/TermuxHackz/hacktronian
clear
cd hacktronian
clear
sudo chmod +x install.sh
clear
./install.sh
clear
|
# to calc FID score in SimGAN
DATADIR=../../datasets/pytorch_models
mkdir -p $DATADIR
for PTH in 'inception_v3_google-1a9a5a14.pth' 'inception_v3_google-0cc3c7bd.pth'
do
wget -O $DATADIR/$PTH -c https://download.pytorch.org/models/$PTH --no-check-certificate
done
|
#!/bin/bash
var_maintainer="localbuild"
var_imagename="postgres14"
var_tag="latest"
if [[ ! -e ./postgres/tls/postgres ]]; then
mkdir -p ./postgres/tls/postgres
fi
# relative to ./docker/db
cp ../../certs/tls/postgres/* ./postgres/tls/postgres/
cd postgres || return
var_image_build_type="${1}"
var_extra_image_name="${2}"
if [[ "${var_image_build_type}" == "" ]]; then
var_image_build_type="default"
fi
var_use_image_name="${var_maintainer}/${var_imagename}"
function yellow() { printf "\x1b[38;5;227m%s\e[0m " "${@}"; printf "\n"; }
function warn() { printf "\x1b[38;5;208m%s\e[0m " "${@}"; printf "\n"; }
function green() { printf "\x1b[38;5;048m%s\e[0m " "${@}"; printf "\n"; }
function red() { printf "\x1b[38;5;196m%s\e[0m " "${@}"; printf "\n"; }
export VERBOSE="0"
export DATE_FMT="%Y-%m-%dT%H:%M:%SZ"
# debug - only print if VERBOSE != 0
function debug() {
cur_date="$(date -u +"${DATE_FMT}")"
local log_str="${cur_date} DEBUG ${*}"
if [[ ${VERBOSE} -ne 0 ]]; then
echo "${log_str}"
fi
} # debug - end
function info() {
cur_date="$(date -u +"${DATE_FMT}")"
local log_str="${cur_date} INFO ${*}"
if [[ ${VERBOSE} -ne 0 ]]; then
echo "${log_str}"
fi
} # info - end
function err() {
cur_date="$(date -u +"${DATE_FMT}")"
local log_str="${cur_date} ERROR ${*}"
red "${log_str}"
} # err - end
function trace() {
cur_date="$(date -u +"${DATE_FMT}")"
local log_str="${cur_date} TRACE ${*}"
if [[ ${VERBOSE} -ne 0 ]]; then
echo "${log_str}"
fi
} # trace - end
function crit() {
cur_date="$(date -u +"${DATE_FMT}")"
local log_str="${cur_date} CRITICAL ${*}"
warn "${log_str}"
} # crit - end
function good() {
cur_date="$(date -u +"${DATE_FMT}")"
local log_str="${cur_date} SUCCESS ${*}"
if [[ ${SILENT} -eq 0 ]]; then
green "${log_str}"
fi
} # good - end
function banner_log() {
cur_date="$(date -u +"${DATE_FMT}")"
local log_str="${cur_date} HEADER ${*}"
yellow "${log_str}"
} # banner_log - end
function build_docker_image_with_tags() {
banner_log "--------------------------------------------------------"
banner_log "building new docker image=${var_use_image_name}:${var_tag}"
docker build --rm -t "${var_use_image_name}" .
var_last_status=$?
if [[ "${var_last_status}" == "0" ]]; then
if [[ "${var_tag}" != "" ]]; then
info "docker images | grep \"${var_use_image_name} \" | grep latest | awk '{print \$3}'"
var_image_csum=$(docker images | grep "${var_use_image_name} " | grep latest | awk '{print $3}' | head -1)
if [[ "${var_image_csum}" != "" ]]; then
docker tag "${var_image_csum}" "${var_use_image_name}:${var_tag}"
var_last_status=$?
if [[ "${var_last_status}" != "0" ]]; then
err "failed to tag image=${var_use_image_name} with tag=${var_tag} with command:"
echo "docker tag ${var_image_csum} ${var_use_image_name}:${var_tag}"
cd ..
exit 1
else
info "build successful tagged image=${var_use_image_name} with tag=${var_tag}"
fi
if [[ "${var_extra_image_name}" != "" ]]; then
info "setting the docker tag"
info "docker tag ${var_image_csum} ${var_extra_image_name}"
docker tag "${var_image_csum}" "${var_extra_image_name}"
var_last_status=$?
if [[ "${var_last_status}" != "0" ]]; then
err "failed to tag image=${var_use_image_name} with tag=${var_tag} with command:"
echo "docker tag ${var_image_csum} ${var_extra_image_name}"
cd ..
exit 1
else
info "added additional docker tag: ${var_extra_image_name}"
fi
fi
else
err "build failed to find latest image=${var_use_image_name} with tag=${var_tag}"
cd ..
exit 1
fi
fi
else
err "build failed with exit code: ${var_last_status}"
cd ..
exit 1
fi
if [[ "${var_extra_image_name}" != "" ]]; then
good "docker build successful build_type=${var_image_build_type} ${var_use_image_name}:${var_tag} extra_name=${var_extra_image_name}"
else
good "docker build successful build_type=${var_image_build_type} ${var_use_image_name}:${var_tag}"
fi
} # build_docker_image_with_tags - end
function run_main() {
info "run_main - begin"
build_docker_image_with_tags
info "run_main - end"
} # run_main - end
run_main
cd ..
exit 0
|
<gh_stars>1-10
// (C) 2019-2020 GoodData Corporation
import { IDataset } from "../fromModel/ldm/datasets";
/**
* Service for querying workspace datasets
*
* @public
*/
export interface IWorkspaceDatasetsService {
/**
* Receive all workspace datasets
*
* @returns promise of workspace datasets
*/
getDatasets(): Promise<IDataset[]>;
}
|
#!/bin/bash
function usage() {
echo "uasge: $0 {start|restart|kill|toc|build|publish|release_src|release|help|-h}"
}
function kill_teedoc() {
ps aux | grep teedoc | grep python3 | awk '{print $2}' | xargs kill -9 >/dev/null
}
function teedoc_build() {
teedoc build
}
function restart() {
teedoc_build
kill_teedoc
teedoc serve &
}
function start() {
teedoc serve &
}
function creat_toc() {
# 读取所有的 sidebar.yml 生成目录
echo "生成目录"
python3 script/auto_creat_toc.py --out_file Quecpython_toc.yml --input_file docs/Quecpython_intro/zh/config.json --action toc
}
function teedoc_release() {
# 检查是否存在 out文件
release_filename=Community-document-$(date "+%Y%m%d-%H%M")
if [ ! -d "out" ]; then
teedoc_build
fi
tar cf ${release_filename}.tar out
tar jcf ${release_filename}.tar.bz2 ${release_filename}.tar
rm -rf ${release_filename}.tar
mv ${release_filename}.tar.bz2 ..
}
function teedoc_src_release() {
release_filename=Community-document-src-$(date "+%Y%m%d-%H%M")
/home/dist/teedoc
}
function teedoc_publish_to_server() {
OUT_DOC_TOP_DIC=./out/doc/
cp ${OUT_DOC_TOP_DIC}/doc/*.html ${OUT_DOC_TOP_DIC}
cp ${OUT_DOC_TOP_DIC}/doc/*.ico ${OUT_DOC_TOP_DIC}
cp ${OUT_DOC_TOP_DIC}/doc/*.json ${OUT_DOC_TOP_DIC}
case $1 in
"root@192.168.25.215:/home/dist/doc")
echo "确认发布到服务器: "
sudo chmod -R 777 ${OUT_DOC_TOP_DIC}
rsync ${OUT_DOC_TOP_DIC}/ $1 -r -i
;;
*)
echo "发布到 /www/wwwroot/test.com/doc 文件夹"
sudo chmod -R 777 ${OUT_DOC_TOP_DIC}
rsync ${OUT_DOC_TOP_DIC}/ /www/wwwroot/test.com/doc -r -i > /dev/null
;;
esac
}
function copy_file() {
python3 script/auto_creat_toc.py --input_file pages/index/zh/config.json --action copy
}
case $1 in
"start")
start
;;
"restart")
restart
;;
"kill")
kill_teedoc
;;
"--help")
usage
;;
"build")
teedoc_build
;;
"-h")
usage
;;
"release")
teedoc_release
;;
"release_src")
teedoc_src_release
;;
"publish")
teedoc_publish_to_server $2
;;
"toc")
# 生成目录
creat_toc
;;
"copy")
# 覆盖相同的文档
copy_file
;;
*)
if [ $# = 0 ]; then
# 没有参数,默认
teedoc_build
else
# 错误的参数
echo "ERROR: $0 $1 错误的参数"
usage
exit -1
fi
;;
esac
|
def stringArrayToObjects(array):
arrayObjects = []
for elem in array:
obj = {
string: elem,
length: len(elem)
}
arrayObjects.append(obj)
return arrayObjects
result = stringArrayToObjects([“Hello”, “World”])
print(result)
|
import random
def generatePassword(n, charset):
password = ''
for i in range(n):
random_index = random.randint(0, len(charset) - 1)
password += charset[random_index]
return password
alnum = '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
randompassword = generatePassword(20, alnum)
print(randompassword)
|
#!/bin/bash
mkdir -p spigot_bin
pushd spigot_bin
wget "https://hub.spigotmc.org/jenkins/job/BuildTools/lastSuccessfulBuild/artifact/target/BuildTools.jar" -O BuildTools.jar
java -jar ./BuildTools.jar --rev 1.13.2
java -jar ./BuildTools.jar --rev 1.12.2
popd
pushd YamlUpgrader_v1_12_R1
gradle clean build
popd
pushd YamlUpgrader_v1_13_R2
gradle clean build
popd
cp YamlUpgrader_*/build/libs/*.jar .
|
<gh_stars>0
package model;
public class Happening implements Comparable<Happening>{
private int orderNum;
private HappeningType type;
private Task task;
public Happening(int orderNum, HappeningType type, Task task){
this.orderNum = orderNum;
this.type = type;
this.task = task;
}
public int getOrderNum(){
return orderNum;
}
public HappeningType getType(){
return type;
}
public Task getTask(){
return task;
}
@Override
public int compareTo(Happening h2) {
return this.orderNum-h2.orderNum; //Lower orderNum --> earlier
}
@Override
public String toString(){
return "Happening:{id:"+orderNum+",type="+type.toString()+",task:"+task+"}";
}
}
|
<filename>src/mleko/brzdac/crawler/pojo/Directory.java
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package mleko.brzdac.crawler.pojo;
/**
*
* @author mleko
*/
public class Directory {
private String path;
public Directory(String path) {
if (!path.endsWith("/")) {
path = path + "/";
}
this.path = path;
}
@Override
public int hashCode() {
int hash = 5;
hash = 97 * hash + (this.getPath() != null ? this.getPath().toLowerCase().hashCode() : 0);
return hash;
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final Directory other = (Directory) obj;
return !((this.path == null) ? (other.path != null) : !this.path.equalsIgnoreCase(other.path));
}
/**
* @return the path
*/
public String getPath() {
return path;
}
}
|
#include "reversiview.h"
#define ButtonHeight 40
#define ButtonWidth 160
#define ButtonSpacing 30
#define ButtonXPos 300
#define InitialButtonPosition 100
/**
* \file reversiview.cpp
* \brief Reversi View class defintion
*
* Contains the iniatialization of the reversi game view and and the implementation of its functions.
* \author <NAME>
*/
ReversiView::ReversiView()
{
this->setFixedSize(750,500);
this->setHorizontalScrollBarPolicy((Qt::ScrollBarAlwaysOff));
this->setVerticalScrollBarPolicy((Qt::ScrollBarAlwaysOff));
titleLabel = new QLabel("Reversi",this);
titleLabel->setGeometry(ButtonXPos,10,500,50);
QFont titleFont( "Arial", 32, QFont::Bold);
titleLabel->setFont(titleFont);
titleLabel->setAttribute(Qt::WA_TranslucentBackground);
titleLabel->setStyleSheet("QLabel { color : red; }");
enterGame = positionButton("Play",InitialButtonPosition);
gameSettings = positionButton("Game Settings",InitialButtonPosition+ButtonHeight+ButtonSpacing);
howToPlay = positionButton("How To Play",InitialButtonPosition+ButtonHeight*2+2*ButtonSpacing);
exit = positionButton("Exit",InitialButtonPosition+ButtonHeight*3+3*ButtonSpacing);
connect(enterGame, SIGNAL(clicked(bool)), this, SLOT(StartGame()));
connect(exit, SIGNAL(clicked(bool)), this, SLOT(ExitGame()));
connect(howToPlay, SIGNAL(clicked(bool)), this, SLOT(showHelp()));
}
QPushButton* ReversiView::positionButton(QString buttonText,int ypos){
auto button = new QPushButton(buttonText,this);
button->setObjectName(buttonText);
button->setGeometry(QRect(ButtonXPos, ypos, ButtonWidth, ButtonHeight));
return button;
}
void ReversiView::setGameScene(ReversiScene* scene){
gameScene = scene;
this->setScene(scene);
}
void ReversiView::StartGame(){
titleLabel->hide();
howToPlay->hide();
exit->hide();
gameSettings->hide();
enterGame->hide();
gameScene->StartGame();
}
void ReversiView::ExitGame(){
emit gameExited();
}
void ReversiView::MainMenu(){
titleLabel->show();
howToPlay->show();
exit->show();
gameSettings->show();
enterGame->show();
gameScene->MainMenu();
}
void ReversiView::resizeEvent(QResizeEvent *event){
QGraphicsView::resizeEvent(event);
fitInView(sceneRect(), Qt::IgnoreAspectRatio);
}
void ReversiView::showHelp(){
QMessageBox *messageBox = new QMessageBox();
messageBox->setWindowTitle("Help");
messageBox->setText("Press on the cell to place your stone, if the move is not valid a popup will show up. Your stone should outflank some of your"
" opponent stones, and if no move is valid, your turn is skipped. The game ends when the 2 players cant make a move, and the player with "
"the bigger number of stones wins. Enjoy!");
messageBox->exec();
}
|
<gh_stars>0
package fr.unice.polytech.si3.qgl.soyouz.classes.marineland.entities.onboard;
import com.fasterxml.jackson.annotation.JsonIgnore;
public abstract class DeckEntity extends OnboardEntity
{
/**
* Constructor.
*
* @param x Abscissa of the entity.
* @param y Ordinate of the entity.
*/
protected DeckEntity(int x, int y)
{
super(x, y);
}
/**
* For the boat ToString.
*
* @return the char associated to the entity.
*/
@JsonIgnore
public char getChar()
{
return 'E';
}
}
|
import numpy as np
import skimage.io as iio
from imlib import dtype
def imread(path, as_gray=False, **kwargs):
"""Return a float64 image in [-1.0, 1.0]."""
image = iio.imread(path, as_gray, **kwargs)
if image.dtype == np.uint8:
image = image / 127.5 - 1
elif image.dtype == np.uint16:
image = image / 32767.5 - 1
elif image.dtype in [np.float32, np.float64]:
image = image * 2 - 1.0
else:
raise Exception("Inavailable image dtype: %s!" % image.dtype)
return image
def imwrite(image, path, quality=95, **plugin_args):
"""Save a [-1.0, 1.0] image Why ??""" #Rather safe a 0,1 grayscale img as png
#Normalizing between 0 and 1
img_min, img_max = np.amin(image), np.amax(image)
img = image * (1/np.sqrt(np.square(img_max-img_min)))
iio.imsave(path, dtype.im2uint(img), **plugin_args)
def imshow(image):
"""Show a [-1.0, 1.0] image."""
iio.imshow(dtype.im2uint(image))
show = iio.show
|
<filename>lib/car/obj/src/cals_r.c
/* **** Notes
Go for months
//*/
# define CALEND
# define CAR
# include "../../../incl/config.h"
signed(__cdecl cals_r(signed(arg),cals_t(*argp))) {
auto time_t t;
auto signed i,r;
auto signed short mo;
auto signed short flag;
if(!argp) return(0x00);
if(!arg) return(0x00);
if(arg<(0x00)) arg = (0x01+(~arg));
r = cv_wk_mo(*(THEFIRST+(R(day,*argp))),CLI_BASE+(R(t,*argp)),*(CLI_BASE+(R(t,*argp))));
if(!r) return(0x00);
t = (*(CLI_BASE+(R(t,*argp))));
i = (CALS_OBJS);
while(i) *(--i+(R(t,*argp))) = (t);
t = (*(CLI_BASE+(R(wk1,*argp))));
i = (CALS_OBJS);
while(i) *(--i+(R(wk1,*argp))) = (t);
arg++;
mo = (~0x00);
r = cals_r_r(mo,arg,argp);
return(r);
}
|
#!/usr/bin/env bash
# stop on errors
set -eu
if [[ $PACKER_BUILDER_TYPE == "qemu" ]]; then
DISK='/dev/vda'
else
DISK='/dev/sda'
fi
FQDN='tinkerbell'
KEYMAP='us'
LANGUAGE='en_US.UTF-8'
PASSWORD=$(/usr/bin/openssl passwd -crypt 'tinkerbell')
TIMEZONE='UTC'
CONFIG_SCRIPT='/usr/local/bin/arch-config.sh'
ROOT_PARTITION="${DISK}1"
TARGET_DIR='/mnt'
COUNTRY=${COUNTRY:-US}
MIRRORLIST="https://archlinux.org/mirrorlist/?country=${COUNTRY}&protocol=http&protocol=https&ip_version=4&use_mirror_status=on"
echo ">>>> install-base.sh: Clearing partition table on ${DISK}.."
/usr/bin/sgdisk --zap ${DISK}
echo ">>>> install-base.sh: Destroying magic strings and signatures on ${DISK}.."
/usr/bin/dd if=/dev/zero of=${DISK} bs=512 count=2048
/usr/bin/wipefs --all ${DISK}
echo ">>>> install-base.sh: Creating /root partition on ${DISK}.."
/usr/bin/sgdisk --new=1:0:0 ${DISK}
echo ">>>> install-base.sh: Setting ${DISK} bootable.."
/usr/bin/sgdisk ${DISK} --attributes=1:set:2
echo ">>>> install-base.sh: Creating /root filesystem (ext4).."
/usr/bin/mkfs.ext4 -O ^64bit -F -m 0 -q -L root ${ROOT_PARTITION}
echo ">>>> install-base.sh: Mounting ${ROOT_PARTITION} to ${TARGET_DIR}.."
/usr/bin/mount -o noatime,errors=remount-ro ${ROOT_PARTITION} ${TARGET_DIR}
echo ">>>> install-base.sh: Setting pacman ${COUNTRY} mirrors.."
curl -s "$MIRRORLIST" | sed 's/^#Server/Server/' > /etc/pacman.d/mirrorlist
echo ">>>> install-base.sh: Bootstrapping the base installation.."
/usr/bin/pacstrap ${TARGET_DIR} base base-devel linux
# Need to install netctl as well: https://github.com/archlinux/arch-boxes/issues/70
# Can be removed when Vagrant's Arch plugin will use systemd-networkd: https://github.com/hashicorp/vagrant/pull/11400
echo ">>>> install-base.sh: Installing basic packages.."
/usr/bin/arch-chroot ${TARGET_DIR} pacman -S --noconfirm gptfdisk openssh syslinux dhcpcd netctl
echo ">>>> install-base.sh: Configuring syslinux.."
/usr/bin/arch-chroot ${TARGET_DIR} syslinux-install_update -i -a -m
/usr/bin/sed -i "s|sda3|${ROOT_PARTITION##/dev/}|" "${TARGET_DIR}/boot/syslinux/syslinux.cfg"
/usr/bin/sed -i 's/TIMEOUT 50/TIMEOUT 10/' "${TARGET_DIR}/boot/syslinux/syslinux.cfg"
echo ">>>> install-base.sh: Generating the filesystem table.."
/usr/bin/genfstab -p ${TARGET_DIR} >> "${TARGET_DIR}/etc/fstab"
echo ">>>> install-base.sh: Generating the system configuration script.."
/usr/bin/install --mode=0755 /dev/null "${TARGET_DIR}${CONFIG_SCRIPT}"
CONFIG_SCRIPT_SHORT=`basename "$CONFIG_SCRIPT"`
cat <<-EOF > "${TARGET_DIR}${CONFIG_SCRIPT}"
echo ">>>> ${CONFIG_SCRIPT_SHORT}: Configuring hostname, timezone, and keymap.."
echo '${FQDN}' > /etc/hostname
/usr/bin/ln -s /usr/share/zoneinfo/${TIMEZONE} /etc/localtime
echo 'KEYMAP=${KEYMAP}' > /etc/vconsole.conf
echo ">>>> ${CONFIG_SCRIPT_SHORT}: Configuring locale.."
/usr/bin/sed -i 's/#${LANGUAGE}/${LANGUAGE}/' /etc/locale.gen
/usr/bin/locale-gen
echo ">>>> ${CONFIG_SCRIPT_SHORT}: Creating initramfs.."
/usr/bin/mkinitcpio -p linux
echo ">>>> ${CONFIG_SCRIPT_SHORT}: Setting root pasword.."
/usr/bin/usermod --password ${PASSWORD} root
echo ">>>> ${CONFIG_SCRIPT_SHORT}: Configuring network.."
# Disable systemd Predictable Network Interface Names and revert to traditional interface names
# https://wiki.archlinux.org/index.php/Network_configuration#Revert_to_traditional_interface_names
/usr/bin/ln -s /dev/null /etc/udev/rules.d/80-net-setup-link.rules
/usr/bin/systemctl enable dhcpcd@eth0.service
echo ">>>> ${CONFIG_SCRIPT_SHORT}: Configuring sshd.."
/usr/bin/sed -i 's/#UseDNS yes/UseDNS no/' /etc/ssh/sshd_config
/usr/bin/systemctl enable sshd.service
# Workaround for https://bugs.archlinux.org/task/58355 which prevents sshd to accept connections after reboot
echo ">>>> ${CONFIG_SCRIPT_SHORT}: Adding workaround for sshd connection issue after reboot.."
/usr/bin/pacman -S --noconfirm rng-tools
/usr/bin/systemctl enable rngd
# Vagrant-specific configuration
echo ">>>> ${CONFIG_SCRIPT_SHORT}: Creating tinkerbell user.."
/usr/bin/useradd --password ${PASSWORD} --comment 'Vagrant User' --create-home --user-group tinkerbell
echo ">>>> ${CONFIG_SCRIPT_SHORT}: Configuring sudo.."
echo 'Defaults env_keep += "SSH_AUTH_SOCK"' > /etc/sudoers.d/10_tinkerbell
echo 'tinkerbell ALL=(ALL) NOPASSWD: ALL' >> /etc/sudoers.d/10_tinkerbell
/usr/bin/chmod 0440 /etc/sudoers.d/10_tinkerbell
echo ">>>> ${CONFIG_SCRIPT_SHORT}: Configuring ssh access for tinkerbell.."
/usr/bin/install --directory --owner=tinkerbell --group=tinkerbell --mode=0700 /home/tinkerbell/.ssh
/usr/bin/curl --output /home/tinkerbell/.ssh/authorized_keys --location https://raw.github.com/mitchellh/vagrant/master/keys/vagrant.pub
/usr/bin/chown tinkerbell:tinkerbell /home/tinkerbell/.ssh/authorized_keys
/usr/bin/chmod 0600 /home/tinkerbell/.ssh/authorized_keys
echo ">>>> ${CONFIG_SCRIPT_SHORT}: Cleaning up.."
/usr/bin/pacman -Rcns --noconfirm gptfdisk
EOF
echo ">>>> install-base.sh: Entering chroot and configuring system.."
/usr/bin/arch-chroot ${TARGET_DIR} ${CONFIG_SCRIPT}
rm "${TARGET_DIR}${CONFIG_SCRIPT}"
# http://comments.gmane.org/gmane.linux.arch.general/48739
echo ">>>> install-base.sh: Adding workaround for shutdown race condition.."
/usr/bin/install --mode=0644 /root/poweroff.timer "${TARGET_DIR}/etc/systemd/system/poweroff.timer"
echo ">>>> install-base.sh: Completing installation.."
/usr/bin/sleep 3
/usr/bin/umount ${TARGET_DIR}
# Turning network interfaces down to make sure SSH session was dropped on host.
# More info at: https://www.packer.io/docs/provisioners/shell.html#handling-reboots
echo '==> Turning down network interfaces and rebooting'
for i in $(/usr/bin/netstat -i | /usr/bin/tail +3 | /usr/bin/awk '{print $1}'); do /usr/bin/ip link set ${i} down; done
/usr/bin/systemctl reboot
echo ">>>> install-base.sh: Installation complete!"
|
#!/bin/bash
set -e
yarn install --ignore-engines
mkdir public
yarn documentation:build
|
/*
*
*/
package net.community.chest.javaagent.dumper;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.Writer;
import java.net.MalformedURLException;
import java.net.URL;
import java.text.DateFormat;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
import net.community.chest.io.FileUtil;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
/**
* <P>Copyright as per GPLv2</P>
* @author <NAME>.
* @since Jul 25, 2011 1:47:00 PM
*/
public class DumperClassFileTransformerTest extends Assert {
private final Map<String,String> optsMap=new HashMap<String,String>();
public DumperClassFileTransformerTest ()
{
super();
}
@Before
public void setUp ()
{
if (optsMap.size() > 0)
optsMap.clear();
System.clearProperty(DumperClassFileTransformer.OUTPUT_ROOTFOLDER_PROP);
}
@Test
public void testResolveOutputFileValueFromOptionsMap ()
{
final String TEST_NAME="testResolveOutputFileValueFromOptionsMap";
optsMap.put(DumperClassFileTransformer.OUTPUT_ROOTFOLDER_PROP, TEST_NAME);
assertEquals("Mismatched option value", TEST_NAME, DumperClassFileTransformer.resolveOutputFileValue(optsMap));
}
@Test
public void testResolveOutputFileValueFromSysProp ()
{
final String TEST_NAME="testResolveOutputFileValueFromSysProp";
assertNull("Unexpected definition of property=" + DumperClassFileTransformer.OUTPUT_ROOTFOLDER_PROP,
System.getProperty(DumperClassFileTransformer.OUTPUT_ROOTFOLDER_PROP));
System.setProperty(DumperClassFileTransformer.OUTPUT_ROOTFOLDER_PROP, TEST_NAME);
assertEquals("Mismatched property value", TEST_NAME, DumperClassFileTransformer.resolveOutputFileValue(optsMap));
}
@Test
public void testResolveDefaultOutputFileValue ()
{
assertEquals("Mismatched default value",
System.getProperty("user.dir") + File.separator + DumperClassFileTransformer.class.getSimpleName(),
DumperClassFileTransformer.resolveOutputFileValue(optsMap));
}
@Test(expected=IllegalStateException.class)
public void testResolveExistingOutputFilePath () throws IOException
{
File dummyFile=new File(System.getProperty("java.io.tmpdir"), "testResolveExistingOutputFilePath.txt");
Writer w=new FileWriter(dummyFile);
try
{
w.write(DateFormat.getDateTimeInstance().format(new Date(System.currentTimeMillis())));
}
finally
{
w.close();
}
File result=DumperClassFileTransformer.resolveOutputRootFolder(dummyFile.getAbsolutePath());
fail("Unexpected output file path resolution: " + result.getAbsolutePath());
}
@Test
public void testResolveOutputFilePath ()
{
File tmpDir=new File(System.getProperty("java.io.tmpdir")),
tstDir=new File(tmpDir, "testResolveOutputFilePath");
if (tstDir.exists() && (!tstDir.delete()))
fail("Failed to clear test folder: " + tstDir.getAbsolutePath());
File rootFolder=DumperClassFileTransformer.resolveOutputRootFolder(tstDir.getAbsolutePath());
assertEquals("Resolved folder does not match original", tstDir.getAbsolutePath(), rootFolder.getAbsolutePath());
assertTrue("Test folder not re-created: " + tstDir.getAbsolutePath(), tstDir.exists() && tstDir.canRead() && tstDir.canWrite());
}
@Test
public void testResolveConfigurationLocation () throws MalformedURLException
{
final Class<?> anchor=getClass();
{
final URL defaultURL=anchor.getResource(DumperClassFileTransformer.DEFAULT_CONFIG_RESOURCE);
assertNotNull("Not found default configuration resource", defaultURL);
final URL resURL=DumperClassFileTransformer.resolveConfigurationLocation(anchor, optsMap);
assertNotNull("Not resolved default configuration resource", resURL);
assertEquals("Mismatched default resource location", defaultURL.toExternalForm(), resURL.toExternalForm());
}
final File tmpDir=new File(System.getProperty("java.io.tmpdir"));
final URL tmpURL=FileUtil.toURL(tmpDir);
{
optsMap.put(DumperClassFileTransformer.CONFIG_URL_PROP, tmpDir.getAbsolutePath());
final URL resURL=DumperClassFileTransformer.resolveConfigurationLocation(anchor, optsMap);
assertNotNull("Not resolved file configuration resource", resURL);
assertEquals("Mismatched file resource location", tmpURL.toExternalForm(), resURL.toExternalForm());
}
{
optsMap.put(DumperClassFileTransformer.CONFIG_URL_PROP, tmpURL.toExternalForm());
final URL resURL=DumperClassFileTransformer.resolveConfigurationLocation(anchor, optsMap);
assertNotNull("Not resolved URL configuration resource", resURL);
assertEquals("Mismatched URL resource location", tmpURL.toExternalForm(), resURL.toExternalForm());
}
}
}
|
#!/bin/bash
hostapd hostapd.conf $@
|
def calculate_weighted_centroid(points):
total_weight = sum(point[2] for point in points)
x_sum = sum(point[0] * point[2] for point in points)
y_sum = sum(point[1] * point[2] for point in points)
x_centroid = round(x_sum / total_weight, 2)
y_centroid = round(y_sum / total_weight, 2)
return x_centroid, y_centroid
|
<reponame>KathiaRangel/bluelatex
[
{
"key":"_Username_",
"value":"Username",
"description":"Username"
},
{
"key":"_Password_",
"value":"Password",
"description":"Password"
},
{
"key":"_Login_",
"value":"Login",
"description":"Login"
},
{
"key":"_LoginTitle_",
"value":"Login",
"description":"The title of the login page"
},
{
"key":"_RegisterTitle_",
"value":"Register now",
"description":"The title of the register page"
},
{
"key":"_Required_",
"value":"Required",
"description":"Required"
},
{
"key":"_FirstName_",
"value":"First-name",
"description":"First-name"
},
{
"key":"_LastName_",
"value":"Last-name",
"description":"Last-name"
},
{
"key":"_Email_",
"value":"Email",
"description":"Email"
},
{
"key": "_Error_format_",
"value": "Invalid format",
"description": "Field of a form is not correct"
},
{
"key": "_Username_error_format_",
"value": "Invalid format: \":\" is not allowed in user names",
"description": "Field of a form is not correct"
},
{
"key":"_Register_",
"value":"Register",
"description":"Register"
},
{
"key":"_Affiliation_",
"value":"Affiliation",
"description":"Affiliation"
},
{
"key":"_Registration_Password_will_sent_in_email_",
"value":"A confirmation email will be sent with instructions to define your password",
"description":"Registration Password will be sent in email"
},
{
"key":"_Registration_Success_",
"value":"You are now registered.",
"description":"Registration success"
},
{
"key":"_Check_Mailbox_",
"value":"Please check your mailbox to define your password.",
"description":"Check mailbox"
},
{
"key":"_Registration_Some_parameters_are_missing_",
"value":"Some parameters are missing",
"description":"Error registration"
},
{
"key":"_Registration_The_captcha_did_not_verify_",
"value":"The captcha did not verify",
"description":"Error registration"
},
{
"key":"_Registration_User_with_the_same_username_already_exists_",
"value":"A user with the same name already exists",
"description":"Error registration"
},
{
"key":"_Registration_Something_wrong_happened_",
"value":"Something wrong happened on the server side and the user could not be registered",
"description":"Error registration"
},
{
"key":"_Login_Some_parameters_are_missing_",
"value":"Some parameters are missing",
"description":"Login error"
},
{
"key":"_Reset_Wait_email_confirm_request_",
"value":"Please check your mailbox",
"description":"Reset password succed"
},
{
"key":"_Login_Wrong_username_and_or_password_",
"value":"Wrong username and/or password",
"description":"Login error"
},
{
"key":"_Login_Something_wrong_happened_",
"value":"Something wrong happened on the server side",
"description":"Login error"
},
{
"key":"_Password_Lost_",
"value":"<PASSWORD>",
"description":"Login page"
},
{
"key": "_Confirm_Password_",
"value":"Confirm password",
"description":"Reset password page"
},
{
"key":"_ChangePasswordTitle_",
"value":"Change password",
"description":"Reset password page"
},
{
"key":"_Reset_User_not_found",
"value":"User not found",
"description":"Reset page"
},
{
"key":"_ResetTitle_",
"value":"Forgot your password?",
"description":"Reset page title"
},
{
"key":"_Reset_",
"value":"Reset",
"description":"Reset password page"
},
{
"key":"_Submit_",
"value":"Submit",
"description":"Send a form"
},
{
"key":"_Cancel_",
"value":"Cancel",
"description":"Reset a form"
},
{
"key":"_Yes_",
"value":"Yes",
"description":"Yes"
},
{
"key":"_No_",
"value":"No",
"description":"No"
},
{
"key":"_On_",
"value":"On",
"description":"On"
},
{
"key":"_Off_",
"value":"Off",
"description":"Off"
},
{
"key":"_New_paper_",
"value":"New paper",
"description":"New paper"
},
{
"key":"_Title_",
"value":"Title",
"description":"Title"
},
{
"key":"_Paper_name_",
"value":"Paper name",
"description":"Paper name"
},
{
"key":"_Name_",
"value":"Name",
"description":"Name"
},
{
"key":"_Template_",
"value":"Template",
"description":"Template"
},
{
"key":"_Visibility_",
"value":"Visibility",
"description":"Visibility"
},
{
"key":"_Create_",
"value":"Create",
"description":"Create"
},
{
"key":"_Edit_paper_",
"value":"Edit paper",
"description":"Edit paper"
},
{
"key":"_Edit_",
"value":"Edit",
"description":"Edit"
},
{
"key":"_Display_",
"value":"Display",
"description":"Editor settings: Display"
},
{
"key":"_Theme_",
"value":"Theme",
"description":"Ace settings: Theme"
},
{
"key":"_Font_size_",
"value":"Font size",
"description":"Ace settings: Font size"
},
{
"key":"_Key_binding_",
"value":"Key binding",
"description":"Ace settings: Key binding"
},
{
"key":"_Wrap_mode_",
"value":"Wrap mode",
"description":"Ace settings: Show wrap"
},
{
"key":"_Full_line_selection_",
"value":"Full line selection",
"description":"Ace settings: Full lline selection"
},
{
"key":"_Highlight_active_line_",
"value":"Highlight active line",
"description":"Ace settings: Highlight active line"
},
{
"key":"_Show_invisibles_",
"value":"Show invisibles",
"description":"Ace settings: Show invisibles"
},
{
"key":"_Show_indent_guides_",
"value":"Show indent guides",
"description":"Ace settings: Show indent guides"
},
{
"key":"_Show_print_margin_",
"value":"Show print margin",
"description":"Ace settings: Show print margin"
},
{
"key":"_Use_soft_tab_",
"value":"User soft tab",
"description":"Ace settings: User soft tab"
},
{
"key":"_Highlight_selected_word_",
"value":"Highlight selected word",
"description":"Ace settings: Highlight selected word"
},
{
"key":"_Enable_behaviours_",
"value":"Enable auto-pairing of special characters (', \", ...)",
"description":"Ace settings: Enable behaviours"
},
{
"key":"_Fade_fold_widgets_",
"value":"Fade fold widgets",
"description":"Ace settings: Fade fold widgets"
},
{
"key":"_Incremental_search_",
"value":"Incremental search",
"description":"Ace settings: Incremental search"
},
{
"key":"_Profile_",
"value":"Profile",
"description":"Profile"
},
{
"key":"_Logout_",
"value":"Logout",
"description":"Logout"
},
{
"key":"_Papers_",
"value":"Papers",
"description":"Papers"
},
{
"key":"_Search..._",
"value":"Search...",
"description":"Search placeholder"
},
{
"key":"_Id_",
"value":"Id",
"description":"id"
},
{
"key":"_Date_",
"value":"Date",
"description":"Date"
},
{
"key":"_Creation_date_",
"value":"Creation date",
"description":"Creation date"
},
{
"key":"_Authors_",
"value":"Authors",
"description":"Authors"
},
{
"key":"_Author_",
"value":"Author",
"description":"Author"
},
{
"key":"_User_",
"value":"User",
"description":"User"
},
{
"key":"_No_author_",
"value":"No author",
"description":"No author"
},
{
"key":"_Sort_by_",
"value":"Sort by",
"description":"Sort by"
},
{
"key":"_Delete_",
"value":"Delete",
"description":"Delete"
},
{
"key":"_Delete_paper_confirm_",
"value":"Are you sure you want to remove the paper: \"\\s\"?",
"description":"Delete paper confirmation"
},
{
"key":"_Delete_synchronized_file_confirm_",
"value":"Are you sure you want to remove the file: \"\\s\"?",
"description":"Delete synchronized file confirmation"
},
{
"key":"_Delete_resource_confirm_",
"value":"Are you sure you want to remove the resource: \"\\s\"?",
"description":"Delete resource confirmation"
},
{
"key":"_Ascending_",
"value":"Ascending",
"description":"Ascending"
},
{
"key":"_Descending_",
"value":"Descending",
"description":"Descending"
},
{
"key":"_Date_filter_",
"value":"Date filter",
"description":"Date filter"
},
{
"key":"_No_paper_found_",
"value":"No paper found",
"description":"No paper found"
},
{
"key":"_Today_",
"value":"Today",
"description":"Today"
},
{
"key":"_Yesterday_",
"value":"Yesterday",
"description":"Yesterday"
},
{
"key":"_This_week_",
"value":"This week",
"description":"This week"
},
{
"key":"_Last_week_",
"value":"Last week",
"description":"Last week"
},
{
"key":"_This_month_",
"value":"This month",
"description":"This month"
},
{
"key":"_This_year_",
"value":"This year",
"description":"This year"
},
{
"key":"_Role_filter_",
"value":"Role filter",
"description":"Role filter"
},
{
"key":"_Tags_",
"value":"Tags",
"description":"Tags"
},
{
"key":"_Tag_filter_",
"value":"Tag filter",
"description":"Tag filter"
},
{
"key":"_All_",
"value":"All",
"description":"All"
},
{
"key":"_Reviewer_",
"value":"Reviewer",
"description":"Reviewer"
},
{
"key":"_No_reviewer_",
"value":"No reviewer",
"description":"No reviewer"
},
{
"key":"_Reviewers_",
"value":"Reviewers",
"description":"Reviewers"
},
{
"key":"_Options_",
"value":"Options",
"description":"Options"
},
{
"key":"_Delete_paper_Something_wrong_happened_",
"value":"Something wrong happened",
"description":"Delete paper: Something wrong happened"
},
{
"key":"_Delete_paper_Authenticated_user_has_no_sufficient_rights_to_delete_the_paper_",
"value":"Authenticated user has no sufficient rights to delete the paper",
"description":"Delete paper: Authenticated user has no sufficient rights to delete the paper"
},
{
"key":"_Delete_paper_User_must_be_authentified",
"value":"User must be authentified",
"description":"Delete paper: Delete paper User must be authentified"
},
{
"key":"_List_Papers_Not_connected_",
"value":"User must be authentified",
"description":"Delete paper: Delete paper User must be authentified"
},
{
"key":"_List_Papers_Something_wrong_happened_",
"value":"Something wrong happened",
"description":"List paper: Something wrong happened"
},
{
"key":"_Edit_profile_",
"value":"Edit profile",
"description":"Edit profile"
},
{
"key":"_Save_",
"value":"Save",
"description":"Save"
},
{
"key":"_Reset_password_",
"value":"Reset password",
"description":"Reset password"
},
{
"key":"_Goodbye_",
"value":"Goodbye",
"description":"Goodbye"
},
{
"key":"_Role_",
"value":"Role",
"description":"Role"
},
{
"key":"_Exit_paper_confirm_",
"value":"Are you sure you want to exit the paper?",
"description":"Exit paper confirm"
},{
"key":"_Install_itsalltext_",
"value":"Please install <a href='https://addons.mozilla.org/fr/firefox/addon/its-all-text/'>It's all text</a>",
"description":"itsalltext not present invit to install it"
},
{
"key":"_New_resource_",
"value":"Upload a resource",
"description":"Upload a new Resource tiltle"
},
{
"key":"_Resources_",
"value":"Resources",
"description":"Resources"
},
{
"key":"_Resource_",
"value":"Resource",
"description":"Resource"
},
{
"key":"_File_",
"value":"File",
"description":"File"
},
{
"key":"_Line_",
"value":"Line",
"description":"Line"
},
{
"key":"_No_resource_found_",
"value":"No resource found",
"description":"No resource found"
},
{
"key":"_Compile_",
"value":"Compile",
"description":"Compile"
},
{
"key":"_Compiler_",
"value":"Compiler",
"description":"Compiler"
},
{
"key":"_Download_",
"value":"Download",
"description":"Download"
},
{
"key":"_Cleanup_",
"value":"Cleanup",
"description":"Cleanup"
},
{
"key":"_Compilation_dir_cleaned_up_",
"value":"Compilation directory cleaned up",
"description":"Cleaned up"
},
{
"key":"_Share_",
"value":"Share",
"description":"Share"
},
{
"key":"_Download_ZIP_",
"value":"Download ZIP",
"description":"Download ZIP"
},
{
"key":"_Download_Log_",
"value":"Download Log",
"description":"Download log"
},
{
"key":"_Download_PDF_",
"value":"Download PDF",
"description":"Download PDF"
},
{
"key":"_Emacs_users_",
"value":"Emacs users",
"description":"Emacs users"
},
{
"key":"_Interval_",
"value":"Interval",
"description":"Interval"
},
{
"key":"_Compiler_Interval_",
"value":"Compiler interval",
"description":"Compiler Interval"
},
{
"key":"_Upload_",
"value":"Upload",
"description":"Upload"
},
{
"key":"_Cancel_upload_",
"value":"Cancel upload",
"description":"Cancel upload"
},
{
"key":"_Select_a_file_",
"value":"Select a file",
"description":"Select a file"
},
{
"key":"_or_drag_a_file_",
"value":"or drag a file",
"description":"or drag a file"
},
{
"key":"_New_file_",
"value":"New file",
"description":"New file"
},
{
"key":"_Filename_",
"value":"Filename",
"description":"Filename"
},
{
"key":"_Get_info_paper_Not_connected_",
"value":"Your are not connected",
"description":"User not connected"
},
{
"key":"_Get_info_paper_Paper_not_found_",
"value":"Paper not found",
"description":"Paper not found"
},
{
"key":"_Get_info_paper_Something_wrong_happened_",
"value":"Unable to get paper info",
"description":"Unable to get paper info"
},
{
"key":"_Get_roles_paper_Not_connected_",
"value":"Your are not connected",
"description":"User not connected"
},
{
"key":"_Get_roles_paper_Paper_not_found_",
"value":"Paper not found",
"description":"Paper not found"
},
{
"key":"_Get_roles_paper_Something_wrong_happened_",
"value":"Unable to get paper roles",
"description":"Unable to get paper roles"
},
{
"key":"_Get_users_Not_connected_",
"value":"Your are not connected",
"description":"User not connected"
},
{
"key":"_Get_users_Something_wrong_happened_",
"value":"Unable to list users",
"description":"Unable to list users"
},
{
"key":"_Edit_paper_Some_parameters_are_missing_",
"value":"Please complete all fields",
"description":"Some fields are missing"
},
{
"key":"_Edit_paper_Not_connected_",
"value":"Your are not connected",
"description":"User not connected"
},
{
"key":"_Edit_paper_Paper_not_found_",
"value":"Unable to find the paper",
"description":"Edit paper: Paper not found"
},
{
"key":"_Edit_paper_Something_wrong_happened_",
"value":"Unable to edit paper",
"description":"Unable to edit paper"
},
{
"key":"_New_paper_Some_parameters_are_missing_",
"value":"Please complete all fields",
"description":"New paper: some fields are missing"
},
{
"key":"_New_paper_Not_connected_",
"value":"Your are not connected",
"description":"User not connected"
},
{
"key":"_New_paper_Something_wrong_happened_",
"value":"Unable to create the new paper",
"description":"Unable to create the new paper"
},
{
"key":"_No_synchronized_file_found_",
"value":"No synchronized file found",
"description":"No synchronized file found"
},{
"key":"_Get_synchronized_resource_Not_connected_",
"value":"Your are not connected",
"description":"User not connected"
},
{
"key":"_Get_synchronized_resource_Something_wrong_happened_",
"value":"Unable to get synchronized resource",
"description":"Unable to get synchronized resource"
},
{
"key":"_Get_resources_Not_connected_",
"value":"Your are not connected",
"description":"User not connected"
},
{
"key":"_Get_resources_Something_wrong_happened_",
"value":"Unable to list resources",
"description":"Unable to list resources"
},
{
"key":"_Get_compiler_Unable_to_get_compiler_info_",
"value":"Unable to get compiler info",
"description":"Unable to get compiler info"
},
{
"key":"_Get_compilers_Unable_to_get_compiler_list_",
"value":"Unable to list compilers",
"description":"Unable to list compilers"
},
{
"key":"_Upload_resource_Some_parameters_are_missing_",
"value":"Please complete all fields",
"description":"New resource: some fields are missing"
},
{
"key":"_Upload_resource_Not_connected_",
"value":"Your are not connected",
"description":"User not connected"
},
{
"key":"_Upload_resource_Something_wrong_happened_",
"value":"Unable to upload resource",
"description":"Unable to upload resource"
},
{
"key":"_Delete_resource_Some_parameters_are_missing_",
"value":"Please complete all fields",
"description":"Delete resource: some fields are missing"
},
{
"key":"_Delete_resource_Not_connected_",
"value":"Your are not connected",
"description":"User not connected"
},
{
"key":"_Delete_resource_Something_wrong_happened_",
"value":"Unable to delete the resource",
"description":"Unable to delete the resource"
},
{
"key":"_Logout_Something_wrong_happened_",
"value":"Unable to logout",
"description":"Unable to logout"
},
{
"key":"_Logout_Not_connected_",
"value":"Your are not connected",
"description":"User not connected"
},
{
"key":"_Edit_profile_Some_parameters_are_missing_",
"value":"Please complete all fields",
"description":"Edit profile: some fields are missing"
},
{
"key":"_Edit_profile_Not_connected_",
"value":"Your are not connected",
"description":"User not connected"
},
{
"key":"_Edit_profile_Something_wrong_happened_",
"value":"Unable to edit profile",
"description":"Unable to edit profile"
},
{
"key":"_Delete_account_",
"value":"Delete account",
"description":"Delete account"
},
{
"key":"_Delete_account_confirm_",
"value":"Are you sure you want to delete your account?",
"description":"Delete account: confirmation message"
},
{
"key":"_Remove_user_Captcha_not_verify_or_user_not_authenticated_",
"value":"Your are not connected",
"description":"User not connected"
},
{
"key":"_Remove_user_The_captcha_did_not_verify_",
"value":"Captcha veritication not correct",
"description":"Captcha veritication not correct"
},
{
"key":"_Remove_user_The_user_still_owns_papers_",
"value":"You can't remove your account because you have still papers",
"description":"Remove user: The user still has owns papers"
},
{
"key":"_Remove_user_Something_wrong_happened_",
"value":"Unable to remove user",
"description":"Unable to remove user"
},
{
"key":"_Edit_profile_success_",
"value":"Profile edited",
"description":"Edit profile: success"
},
{
"key":"_Edit_profile_No_enough_data_",
"value":"Please complete all fields",
"description":"Edit profile: some fields are missing"
},
{
"key":"_Edit_profile_User_must_be_authenticated_",
"value":"Your are not connected",
"description":"User not connected"
},
{
"key":"_Edit_profile_Not_authorized_to_modifiy_the_user_data_",
"value":"Your are not authorized to modifify the data of this user",
"description":"_Edit_profile_Not_authorized_to_modifiy_the_user_data_"
},
{
"key":"_Edit_profile_User_does_not_exist_",
"value":"User not found",
"description":"User not found"
},
{
"key":"_Edit_profile_No_revision_obsolete_revision_was_provided_in_the_request_",
"value":"The current revision is obsolete. Please refresh the page.",
"description":"_Edit_profile_No_revision_obsolete_revision_was_provided_in_the_request_"
},
{
"key":"_Errors_",
"value":"Errors",
"description":"Errors"
},
{
"key":"_Warnings_",
"value":"Warnings",
"description":"Warnings"
},
{
"key":"_Clone_me_",
"value":"Clone me!",
"description":"Clone me in the header"
},
{
"key":"_Report_an_issue_",
"value":"Report an issue!",
"description":"Report an issue in the header"
},
{
"key":"_Captcha_",
"value":"Captcha",
"description":"Registration captcha label"
},
{
"key":"_username_tooltip_",
"value":"The unique name of the user.",
"description":"tooltip: Username"
},
{
"key":"_first_name_tooltip_",
"value":"The first name of the user.",
"description":"tooltip: Firstname"
},
{
"key":"_last_name_tooltip_",
"value":"The last name of the user.",
"description":"tooltip: lastname"
},
{
"key":"_password_tooltip_",
"value":"Choose your password.",
"description":"tooltip: password"
},
{
"key":"_confirm_password_tooltip_",
"value":"Confirm your chosen password.",
"description":"tooltip: confirm password"
},
{
"key":"_files_tooltip_",
"value":"All files of the paper.",
"description":"tooltip paper page: Files tab"
},
{
"key":"_toc_tooltip_",
"value":"Table of content of the current file.",
"description":"tooltip paper page: TOC tab"
},
{
"key":"_debug_tooltip_",
"value":"Debug.",
"description":"tooltip paper page: debug tab"
},
{
"key":"_compile_tooltip_",
"value":"Compile the document.",
"description":"tooltip paper page: compile"
},
{
"key":"_cleanup_tooltip_",
"value":"Cleanup the compilation directory.",
"description":"tooltip paper page: cleanup"
},
{
"key":"_share_tooltip_",
"value":"Manage paper permissions",
"description":"tooltip share"
},
{
"key":"_download_pdf_tooltip_",
"value":"Download the generated PDF.",
"description":"tooltip paper page: download pdf"
},
{
"key":"_download_zip_tooltip_",
"value":"Download the document source files as zip archive.",
"description":"tooltip paper page: download zip"
},
{
"key":"_download_log_tooltip_",
"value":"Download document compilation log.",
"description":"tooltip paper page: compile"
},
{
"key":"_scale_tooltip_",
"value":"Change the scale factor of the preview.",
"description":"tooltip paper page: scale"
},
{
"key":"_new_file_tooltip_",
"value":"Create a new synchronized file.",
"description":"tooltip paper page: new file"
},
{
"key":"_upload_file_tooltip_",
"value":"Upload a resource.",
"description":"tooltip paper page: upload resource"
},
{
"key":"_delete_resource_tooltip_",
"value":"Delete the resource.",
"description":"tooltip paper page: delete resource"
},
{
"key":"_view_resource_tooltip_",
"value":"Display the resource.",
"description":"tooltip paper page: view resource"
},
{
"key":"_download_resource_tooltip_",
"value":"Download the resource.",
"description":"tooltip paper page: download resource"
},
{
"key":"_delete_file_tooltip_",
"value":"Delete the file.",
"description":"tooltip paper page: delete file"
},
{
"key":"_paper_name_tooltip_",
"value":"The paper name allows you to identify it quickly in the paper list.",
"description":"tooltip new paper page: paper name"
},
{
"key":"_paper_title_tooltip_",
"value":"The title of the paper.",
"description":"tooltip new paper page: paper title"
},
{
"key":"_paper_template_tooltip_",
"value":"The template of the paper.",
"description":"tooltip paper page: paper template"
},
{
"key":"_paper_visbility_tooltip_",
"value":"private papers can only be edited by authors, whereas public ones can be edited by anybody (even unregistered people).",
"description":"tooltip paper page: private paper"
},
{
"key":"_email_tooltip_",
"value":"The email address used to contact you and notify you about changes.",
"description":"tooltip profile: email"
},
{
"key":"_affiliation_tooltip_",
"value":"The affiliation is your university/lab or company name.",
"description":"tooltip profile: affiliation"
},
{
"key":"_captcha_tooltip_",
"value":"The captcha is used to verify that you are not a robot.",
"description":"tooltip register: captcha"
},
{
"key":"_paper_authors_tooltip_",
"value":"Authors are users which are allowed to edit and read the paper.",
"description":"tooltip edit paper: authors"
},
{
"key":"_paper_reveiwers_tooltip_",
"value":"Reviewers are users which are allowed to read the paper.",
"description":"tooltip edit paper: reveiwers"
},
{
"key":"_compiler_type_tooltip_",
"value":"The compiler used to compile the paper.",
"description":"tooltip edit paper: compiler"
},
{
"key":"_compiler_interval_tooltip_",
"value":"The interval between two compilations (in seconds).",
"description":"tooltip edit paper: interval"
},
{
"key":"_synctex_tooltip_",
"value":"Whether the compiler generates SyncTeX data.",
"description":"tooltip edit paper: affiliation"
}
]
|
<filename>src/main/java/com/ait/lienzo/ks/client/views/components/AlignDistributeViewComponent.java
/*
* Copyright (c) 2018 Ahome' Innovation Technologies. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.ait.lienzo.ks.client.views.components;
import com.ait.lienzo.client.core.shape.Circle;
import com.ait.lienzo.client.core.shape.Layer;
import com.ait.lienzo.client.core.shape.Rectangle;
import com.ait.lienzo.client.core.shape.Star;
import com.ait.lienzo.client.core.shape.Text;
import com.ait.lienzo.client.core.shape.wires.AlignAndDistribute;
import com.ait.lienzo.ks.client.views.AbstractViewComponent;
import com.ait.lienzo.shared.core.types.ColorName;
public class AlignDistributeViewComponent extends AbstractViewComponent
{
public AlignDistributeViewComponent()
{
final Layer layer = new Layer();
final Rectangle rect1 = new Rectangle(100, 100);
rect1.setDraggable(true);
rect1.setX(100);
rect1.setY(300);
rect1.setStrokeWidth(2);
rect1.setFillColor("#CC0000");
rect1.setFillAlpha(0.75);
rect1.setStrokeColor(ColorName.BLACK);
layer.add(rect1);
final Circle circ1 = new Circle(50);
circ1.setDraggable(true);
circ1.setX(320);
circ1.setY(325);
circ1.setStrokeWidth(2);
circ1.setFillColor("#00CC00");
circ1.setFillAlpha(0.75);
circ1.setStrokeColor(ColorName.BLACK);
layer.add(circ1);
final Rectangle rect3 = new Rectangle(100, 100);
rect3.setDraggable(true);
rect3.setX(500);
rect3.setY(250);
rect3.setStrokeWidth(2);
rect3.setFillColor("#AACC00");
rect3.setFillAlpha(0.75);
rect3.setStrokeColor(ColorName.BLACK);
layer.add(rect3);
final Rectangle rect4 = new Rectangle(300, 150);
rect4.setCornerRadius(8);
rect4.setDraggable(true);
rect4.setX(50);
rect4.setY(50);
rect4.setStrokeWidth(2);
rect4.setFillColor("#55CCAA");
rect4.setFillAlpha(0.75);
rect4.setStrokeColor(ColorName.BLACK);
layer.add(rect4);
final Text text1 = new Text("Align");
text1.setDraggable(true);
text1.setX(500);
text1.setY(500);
text1.setFontSize(96);
text1.setStrokeWidth(2);
text1.setFillColor(ColorName.HOTPINK);
text1.setFontStyle("bold");
text1.setFillAlpha(0.75);
text1.setStrokeColor(ColorName.BLACK);
layer.add(text1);
final Star star1 = new Star(5, 50, 100);
star1.setDraggable(true);
star1.setX(250);
star1.setY(550);
star1.setStrokeWidth(2);
star1.setFillColor(ColorName.DARKORCHID);
star1.setFillAlpha(0.75);
star1.setStrokeColor(ColorName.BLACK);
layer.add(star1);
getLienzoPanel().add(layer);
getLienzoPanel().setBackgroundLayer(getBackgroundLayer());
getWorkingContainer().add(getLienzoPanel());
final AlignAndDistribute index = new AlignAndDistribute(layer);
index.setStrokeWidth(2);
index.setStrokeColor(ColorName.DARKBLUE.getValue());
index.addShape(rect1);
index.addShape(circ1);
index.addShape(rect3);
index.addShape(rect4);
index.addShape(text1);
index.addShape(star1);
}
}
|
/*
* Copyright (c) Open Source Strategies, Inc.
*
* Opentaps is free software: you can redistribute it and/or modify it
* under the terms of the GNU Affero General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Opentaps is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Opentaps. If not, see <http://www.gnu.org/licenses/>.
*/
package org.opentaps.dataimport.netsuite;
import org.ofbiz.base.util.GeneralException;
import org.ofbiz.base.util.UtilMisc;
import org.ofbiz.entity.Delegator;
import org.ofbiz.entity.GenericEntityException;
import org.ofbiz.entity.GenericValue;
import org.ofbiz.service.DispatchContext;
import org.ofbiz.service.ServiceUtil;
import org.opentaps.common.util.UtilMessage;
import org.opentaps.dataimport.ImportDecoder;
import org.opentaps.dataimport.OpentapsImporter;
import java.util.Map;
/**
* Import Net Suite objects into opentaps.
*/
public class NetSuiteImportServices {
public static String module = NetSuiteImportServices.class.getName();
public static Map importItems(DispatchContext dctx, Map context) {
Delegator delegator = dctx.getDelegator();
int imported = 0;
String parentCategoryId = (String) context.get("parentCategoryId");
try {
// find or create the parent category that we'll put all created categories in
if (parentCategoryId != null) {
Map findMap = UtilMisc.toMap("productCategoryId", parentCategoryId);
GenericValue parentCategory = delegator.findByPrimaryKey("ProductCategory", findMap);
if (parentCategory == null) {
return ServiceUtil.returnError("Parent Category ["+parentCategoryId+"] not found.");
}
}
OpentapsImporter importer = new OpentapsImporter("NetSuiteItem", dctx, new NetSuiteItemDecoder(parentCategoryId));
importer.configure(context);
importer.setOrderBy("itemId"); // ordering is important because of dependencies between items
imported += importer.runImport();
} catch (GenericEntityException e) {
return UtilMessage.createAndLogServiceError(e, module);
}
Map result = ServiceUtil.returnSuccess();
result.put("importedRecords", imported);
return result;
}
public static Map importCustomers(DispatchContext dctx, Map context) {
int imported = 0;
try {
// create the customer decoder first, since it performs useful validation
ImportDecoder customerDecoder = new NetSuiteCustomerDecoder(context, dctx.getDelegator());
// import the enumerations first
OpentapsImporter importer = new OpentapsImporter("NetSuiteCustomerType", dctx, new NetSuiteEnumDecoder("PARTY_INDUSTRY"));
importer.runImport();
importer = new OpentapsImporter("NetSuiteSalesOrderType", dctx, new NetSuiteEnumDecoder("ORDER_SALES_CHANNEL"));
importer.runImport();
// import the customers
importer = new OpentapsImporter("NetSuiteCustomer", dctx, customerDecoder);
importer.configure(context);
importer.setOrderBy("customerId"); // ordering is important because of parent relationships
imported += importer.runImport();
} catch (GeneralException e) {
return UtilMessage.createAndLogServiceError(e, module);
}
Map result = ServiceUtil.returnSuccess();
result.put("customersImported", imported);
return result;
}
public static Map importCustomerAddresses(DispatchContext dctx, Map context) {
int imported = 0;
try {
OpentapsImporter importer = new OpentapsImporter("NetSuiteAddressBook", dctx, new NetSuiteAddressDecoder((GenericValue) context.get("userLogin")));
importer.configure(context);
imported = importer.runImport();
} catch (GeneralException e) {
return UtilMessage.createAndLogServiceError(e, module);
}
Map result = ServiceUtil.returnSuccess();
result.put("importedRecords", imported);
return result;
}
}
|
<reponame>sgkandale/garbage-lb
import React from 'react'
export default function Delete(props) {
return <>
Delete Cluster
</>
}
|
for src in $(ls samples/*.c); do ./scanner < $src > $src.lex; done
|
<gh_stars>0
import json
import tkinter
from tkinter import NW
import cv2
from PIL import Image, ImageTk
from pyzbar.pyzbar import decode
from mqtt_client import MqttClient
TOPIC_CONNECT = 'ttm4115/team_1/project/connect'
class QrReader:
def __init__(self, frame, heigth, width, office_name, cap):
self.__mqtt_client = MqttClient("QrReader")
self.cap = cap
self.gui_window = frame
self.image = None
self.canvas = None
self.height = heigth
self.width = width
self.office_name = office_name
def __read_barcodes(self, frame):
barcodes = decode(frame)
for barcode in barcodes:
x, y, w, h = barcode.rect
barcode_info = barcode.data.decode('utf-8')
cv2.rectangle(frame, (x, y), (x + w, y + h), (0, 255, 0), 2)
self.send_msg(barcode_info, self.office_name, TOPIC_CONNECT)
return frame
def capture_video(self):
self.canvas = tkinter.Canvas(self.gui_window, bg='white', borderwidth=0)
self.canvas.pack(fill=tkinter.BOTH, expand=tkinter.YES)
self.update_qr_frame()
def update_qr_frame(self):
# Get the latest frame and convert image format
ret, frame = self.cap.read()
frame = cv2.resize(frame, (self.height, self.width))
frame = self.__read_barcodes(frame)
self.image = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB) # to RGB
self.image = Image.fromarray(self.image) # to PIL format
self.image = ImageTk.PhotoImage(self.image) # to ImageTk format
# Update image
self.canvas.create_image(0, 0, anchor=NW, image=self.image)
self.canvas.after(10, self.update_qr_frame)
def stop_capture(self):
self.cap.release()
cv2.destroyAllWindows()
def send_msg(self, qr, sender, where):
command = {"qr": qr, "sender": sender}
payload = json.dumps(command)
self.__mqtt_client.publish(where, payload)
|
package Kth_Smallest_Element_in_a_BST;
import Others.Tree;
import Others.TreeNode;
import java.util.ArrayList;
import java.util.List;
public class Solution {
public int kthSmallest(TreeNode root, int k) {
List<Integer> list = new ArrayList<>();
dfs(root, list);
return list.get(k - 1);
}
private void dfs(TreeNode node, List<Integer> list){
if (node == null) return;
dfs(node.left, list);
list.add(node.val);
dfs(node.right, list);
}
public static void main(String[] args) {
Solution s = new Solution();
System.out.println(s.kthSmallest(new Tree(new Object[]{4, 2, 6, 1, 3, 5, 7}).root, 5));
}
}
|
#!/bin/bash
# You can use pod template files to define the driver or executor pod’s configurations that Spark configurations do not support.
# see Pod Template (https://spark.apache.org/docs/3.0.0-preview/running-on-kubernetes.html#pod-template).
# INPUT VARIABLES
EMR_ON_EKS_ROLE_ID="aws001-preprod-test-eks-emr-eks-data-team-a" # Replace EMR IAM role with your ID
EKS_CLUSTER_ID='aws001-preprod-test-eks' # Replace cluster id with your id
EMR_ON_EKS_NAMESPACE='emr-data-team-a' # Replace namespace with your namespace
EMR_VIRTUAL_CLUSTER_NAME="$EKS_CLUSTER_ID-$EMR_ON_EKS_NAMESPACE"
JOB_NAME='taxidata'
S3_BUCKET='s3://<enter-your-bucket-name>' # Create your own s3 bucket and replace this value
CW_LOG_GROUP="/emr-on-eks-logs/${EMR_VIRTUAL_CLUSTER_NAME}/${EMR_ON_EKS_NAMESPACE}" # Create CW Log group if not exist
SPARK_JOB_S3_PATH="${S3_BUCKET}/${EMR_VIRTUAL_CLUSTER_NAME}/${EMR_ON_EKS_NAMESPACE}/${JOB_NAME}"
# Step1: COPY POD TEMPLATES TO S3 Bucket
aws s3 sync ./spark-scripts/ "${SPARK_JOB_S3_PATH}/"
# FIND ROLE ARN and EMR VIRTUAL CLUSTER ID
EMR_ROLE_ARN=$(aws iam get-role --role-name $EMR_ON_EKS_ROLE_ID --query Role.Arn --output text)
VIRTUAL_CLUSTER_ID=$(aws emr-containers list-virtual-clusters --query "virtualClusters[?name=='${EMR_VIRTUAL_CLUSTER_NAME}' && state=='RUNNING'].id" --output text)
# Execute Spark job
if [[ $VIRTUAL_CLUSTER_ID != "" ]]; then
echo "Found Cluster $EMR_VIRTUAL_CLUSTER_NAME; Executing the Spark job now..."
aws emr-containers start-job-run \
--virtual-cluster-id $VIRTUAL_CLUSTER_ID \
--name $JOB_NAME \
--execution-role-arn $EMR_ROLE_ARN \
--release-label emr-6.3.0-latest \
--job-driver '{
"sparkSubmitJobDriver": {
"entryPoint": "'"$SPARK_JOB_S3_PATH"'/scripts/spark-taxi-trip-data.py",
"entryPointArguments": ["'"$SPARK_JOB_S3_PATH"'/input/taxi-trip-data/",
"'"$SPARK_JOB_S3_PATH"'/output/taxi-trip-data/", "taxidata"
],
"sparkSubmitParameters": "--conf spark.executor.instances=2 --conf spark.executor.memory=20G --conf spark.executor.cores=6 --conf spark.driver.cores=4"
}
}' \
--configuration-overrides '{
"applicationConfiguration": [
{
"classification": "spark-defaults",
"properties": {
"spark.hadoop.hive.metastore.client.factory.class":"com.amazonaws.glue.catalog.metastore.AWSGlueDataCatalogHiveClientFactory",
"spark.driver.memory":"10G",
"spark.kubernetes.driver.podTemplateFile":"'"$SPARK_JOB_S3_PATH"'/pod-templates/spark-driver-pod-template.yaml",
"spark.kubernetes.executor.podTemplateFile":"'"$SPARK_JOB_S3_PATH"'/pod-templates/spark-executor-pod-template.yaml",
"spark.kubernetes.executor.podNamePrefix":"taxidata",
"spark.dynamicAllocation.enabled":"true",
"spark.dynamicAllocation.shuffleTracking.enabled":"true",
"spark.dynamicAllocation.minExecutors":"5",
"spark.dynamicAllocation.maxExecutors":"100",
"spark.dynamicAllocation.initialExecutors":"10"
}
}
],
"monitoringConfiguration": {
"persistentAppUI":"ENABLED",
"cloudWatchMonitoringConfiguration": {
"logGroupName":"'"$CW_LOG_GROUP"'",
"logStreamNamePrefix":"'"$JOB_NAME"'"
},
"s3MonitoringConfiguration": {
"logUri":"'"$SPARK_JOB_S3_PATH"'/logs/"
}
}
}'
else
echo "Cluster is not in running state $EMR_VIRTUAL_CLUSTER_NAME"
fi
|
package com.evoluta.orders.infrastructure.respository;
import com.evoluta.orders.application.response.OrderLineDto;
import java.util.List;
public class OrderLineRepositoryImpl implements OrderLineRepository{
@Override
public List<OrderLineDto> findAll() {
return null;
}
@Override
public OrderLineDto save(OrderLineDto orderLine) {
return null;
}
@Override
public boolean findById(Long id) {
return false;
}
@Override
public boolean deleteById(Long id) {
return false;
}
}
|
import React from 'react';
import Typography from '@mui/material/Typography';
import Button from '@mui/material/Button';
import { useTheme } from '@mui/material/styles';
interface Props {
text: string;
isSearchBar?: boolean;
isClearAll?: boolean;
[x: string]: any;
}
const ButtonComponent = ({
text,
isSearchBar,
isClearAll,
...rest
}: Props): JSX.Element => {
const theme = useTheme();
return (
<Button
variant="contained"
color="secondary"
sx={{
backgroundColor: isClearAll ? 'transparent' : 'none',
borderRadius: 30,
border: 2,
borderColor: isSearchBar ? 'primary.main' : 'secondary.main',
my: 1,
'&:hover': {
border: 2,
},
}}
{...rest}
>
<Typography
fontFamily={'Inter'}
variant="button"
color={
isClearAll
? theme.palette.getContrastText(theme.palette.primary.light)
: 'text.secondary'
}
sx={{
textTransform: 'uppercase',
letterSpacing: 1.2,
fontWeight: 400,
}}
>
{text}
</Typography>
</Button>
);
};
export default ButtonComponent;
|
def getFirstNCharacters(string, n):
substr = ""
for i in range(n):
substr += string[i]
return substr
|
<filename>routine/channel_range_test.go
package routine
import (
"fmt"
"testing"
)
func rRangeChannel() {
queue := make(chan string, 2)
queue <- "one"
queue <- "two"
close(queue)
for elem := range queue {
fmt.Println(elem)
}
}
func TestRRangeChannel(t *testing.T) {
rRangeChannel()
queue := make(chan string, 3)
queue <- "str"
queue <- "str"
queue <- "str"
close(queue)
for elem := range queue {
assertEq("str",elem)
}
}
|
<reponame>kariminf/KSimpleNLG
/*
* The contents of this file are subject to the Mozilla Public License
* Version 1.1 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
* http://www.mozilla.org/MPL/
*
* Software distributed under the License is distributed on an "AS IS"
* basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
* License for the specific language governing rights and limitations
* under the License.
*
* The Original Code is "Simplenlg".
*
* The Initial Developer of the Original Code is <NAME>, <NAME> and <NAME>.
* Portions created by <NAME>, <NAME> and <NAME> are Copyright (C) 2010-11 The University of Aberdeen. All Rights Reserved.
*
* Contributor(s): <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>.
*/
package simplenlg.framework;
/**
* Enum type listing language constants available along
* with their ISO 639-1 two letter code. It also has methods
* to compare Strings with these constants.
*
* @author vaudrypl
*
*/
public enum Language {
ENGLISH("en"), FRENCH("fr");
final public String code;
final public static Language DEFAULT_LANGUAGE = ENGLISH;
Language(String code)
{
this.code = code;
}
/**
* @param code
* @return true if this language has this code
*/
public boolean isCode(String code) {
return this.code.equals( code.toLowerCase() );
}
/**
* @param code
* @return the language constant corresponding to this code
* null if no constant matches this code
*
* Note : You can use the DEFAULT_LANGUAGE constant if this
* method returns null.
*/
public static Language convertCodeToLanguage(String code) {
Language returnValue = null;
for (Language language : Language.values()) {
if (language.isCode(code)) {
returnValue = language;
break;
}
}
return returnValue;
}
}
|
#!/usr/bin/env bash
#set -x
set -e
set -u
set -o pipefail
SRC_ROOT=$(dirname "$0")/..
SRC_ROOT=$(realpath "$SRC_ROOT")
cd "$SRC_ROOT"
HTTP_PORT=8082
min_acceptable_hit_rate=95
overall_result=success
summary=""
### Begin minio setup.
if [ ! -e minio ]
then
wget https://dl.min.io/server/minio/release/linux-amd64/minio
chmod +x minio
fi
if [ ! -e mc ]
then
wget https://dl.min.io/client/mc/release/linux-amd64/mc
chmod +x mc
fi
rm -rf miniocachedir
for p in $(pidof minio)
do
kill -HUP $p && sleep 2 || true
done
./minio server miniocachedir &
minio_pid=$!
sleep 2
./mc config host add myminio http://127.0.0.1:9000 minioadmin minioadmin
./mc mb myminio/bazel-remote
### End minio setup.
wait_for_startup() {
server_pid="$1"
running=false
for i in $(seq 1 10)
do
sleep 1
ps -p $server_pid > /dev/null || break
if wget --inet4-only -d -O - "http://127.0.0.1:$HTTP_PORT/status"
then
return
fi
done
echo "Error: bazel-remote took too long to start"
kill -9 "$server_pid"
exit 1
}
echo -n "Building test binary (no cache): "
ti=$(date +%s)
bazel build //:bazel-remote 2> /dev/null
tf=$(date +%s)
duration=$((tf - ti))
echo "${duration}s"
# Copy the binary somewhere known, so we can run it manually.
bazel run --run_under "cp -f " //:bazel-remote $(pwd)/
echo "Starting test cache"
test_cache_dir=./bazel-remote-tmp-cache
rm -rf $test_cache_dir
./bazel-remote --max_size 1 --dir "$test_cache_dir" --http_address "0.0.0.0:$HTTP_PORT" \
--s3.endpoint 127.0.0.1:9000 \
--s3.bucket bazel-remote \
--s3.prefix files \
--s3.auth_method access_key \
--s3.access_key_id minioadmin \
--s3.secret_access_key minioadmin \
--s3.disable_ssl \
> log.stdout 2> log.stderr &
test_cache_pid=$!
echo "Test cache pid: $test_cache_pid"
wait_for_startup "$test_cache_pid"
bazel clean 2> /dev/null
echo -n "Build with cold cache (HTTP, populating minio): "
ti=$(date +%s)
bazel build //:bazel-remote "--remote_cache=http://127.0.0.1:$HTTP_PORT" \
2> http_cold
tf=$(date +%s)
duration=$(expr $tf - $ti)
echo "${duration}s"
grep process http_cold
bazel clean 2> /dev/null
echo "Restarting test cache"
kill -9 $test_cache_pid
sleep 1
./bazel-remote --max_size 1 --dir $test_cache_dir --http_address "0.0.0.0:$HTTP_PORT" \
> log.stdout 2> log.stderr &
test_cache_pid=$!
echo "Test cache pid: $test_cache_pid"
wait_for_startup "$test_cache_pid"
testsection="hot HTTP"
echo -n "Build with hot cache ($testsection): "
ti=$(date +%s)
bazel build //:bazel-remote "--remote_cache=http://127.0.0.1:$HTTP_PORT" \
--execution_log_json_file=http_hot.json \
2> http_hot
tf=$(date +%s)
duration=$(expr $tf - $ti)
echo "${duration}s"
grep process http_hot
hits=$(grep -c '"remoteCacheHit": true,' http_hot.json || true) # TODO: replace these with jq one day.
misses=$(grep -c '"remoteCacheHit": false,' http_hot.json || true)
hit_rate=$(awk -vhits=$hits -vmisses=$misses 'BEGIN { printf "%0.2f", hits*100/(hits+misses) }' </dev/null)
result=$(awk -vhit_rate=$hit_rate -vmin=$min_acceptable_hit_rate 'BEGIN {if (hit_rate >= min) print "success" ; else print "failure";}' < /dev/null)
[ "$result" = "failure" ] && overall_result=failure
echo "hit rate: ${hit_rate}% (hits: $hits misses: $misses) => $result"
summary+="\n$testsection: hit rate: ${hit_rate}% (hits: $hits misses: $misses) => $result"
echo "Restarting test cache"
kill -9 $test_cache_pid
sleep 1
rm -rf $test_cache_dir
./bazel-remote --max_size 1 --dir $test_cache_dir --http_address "0.0.0.0:$HTTP_PORT" \
--s3.endpoint 127.0.0.1:9000 \
--s3.bucket bazel-remote \
--s3.prefix files \
--s3.auth_method access_key \
--s3.access_key_id minioadmin \
--s3.secret_access_key minioadmin \
--s3.disable_ssl \
> log.stdout 2> log.stderr &
test_cache_pid=$!
echo "Test cache pid: $test_cache_pid"
wait_for_startup "$test_cache_pid"
bazel clean 2> /dev/null
testsection="cold HTTP, hot minio"
echo -n "Build with hot cache ($testsection): "
ti=$(date +%s)
bazel build //:bazel-remote "--remote_cache=http://127.0.0.1:$HTTP_PORT" \
--execution_log_json_file=http_hot_minio.json \
2> http_hot
tf=$(date +%s)
duration=$(expr $tf - $ti)
echo "${duration}s"
grep process http_hot
hits=$(grep -c '"remoteCacheHit": true,' http_hot_minio.json || true) # TODO: replace these with jq one day.
misses=$(grep -c '"remoteCacheHit": false,' http_hot_minio.json || true)
hit_rate=$(awk -vhits=$hits -vmisses=$misses 'BEGIN { printf "%0.2f", hits*100/(hits+misses) }' </dev/null)
result=$(awk -vhit_rate=$hit_rate -vmin=$min_acceptable_hit_rate 'BEGIN {if (hit_rate >= min) print "success" ; else print "failure";}' < /dev/null)
[ "$result" = "failure" ] && overall_result=failure
echo "hit rate: ${hit_rate}% (hits: $hits misses: $misses) => $result"
summary+="\n$testsection: hit rate: ${hit_rate}% (hits: $hits misses: $misses) => $result"
echo "Restarting test cache"
kill -9 $test_cache_pid
sleep 1
rm -rf $test_cache_dir
./bazel-remote --max_size 1 --dir $test_cache_dir --http_address "0.0.0.0:$HTTP_PORT" \
> log.stdout 2> log.stderr &
test_cache_pid=$!
echo "Test cache pid: $test_cache_pid"
wait_for_startup "$test_cache_pid"
bazel clean 2> /dev/null
echo -n "Build with cold cache (gRPC): "
ti=$(date +%s)
bazel build //:bazel-remote --remote_cache=grpc://127.0.0.1:9092 \
2> grpc_cold
tf=$(date +%s)
duration=$(expr $tf - $ti)
echo "${duration}s"
grep process grpc_cold
bazel clean 2> /dev/null
testsection="hot gRPC"
echo -n "Build with hot cache ($testsection): "
ti=$(date +%s)
bazel build //:bazel-remote --remote_cache=grpc://127.0.0.1:9092 \
--execution_log_json_file=grpc_hot.json \
2> grpc_hot
tf=$(date +%s)
duration=$(expr $tf - $ti)
echo "${duration}s"
grep process grpc_hot
hits=$(grep -c '"remoteCacheHit": true,' grpc_hot.json || true) # TODO: replace these with jq one day.
misses=$(grep -c '"remoteCacheHit": false,' grpc_hot.json || true)
hit_rate=$(awk -vhits=$hits -vmisses=$misses 'BEGIN { printf "%0.2f", hits*100/(hits+misses) }' </dev/null)
result=$(awk -vhit_rate=$hit_rate -vmin=$min_acceptable_hit_rate 'BEGIN {if (hit_rate >= min) print "success" ; else print "failure";}' < /dev/null)
[ "$result" = "failure" ] && overall_result=failure
echo "hit rate: ${hit_rate}% (hits: $hits misses: $misses) => $result"
summary+="\n$testsection: hit rate: ${hit_rate}% (hits: $hits misses: $misses) => $result"
echo "Restarting test cache"
kill -9 $test_cache_pid
sleep 1
rm -rf $test_cache_dir
./bazel-remote --max_size 1 --dir $test_cache_dir --http_address "0.0.0.0:$HTTP_PORT" \
--s3.endpoint 127.0.0.1:9000 \
--s3.bucket bazel-remote \
--s3.prefix files \
--s3.auth_method access_key \
--s3.access_key_id minioadmin \
--s3.secret_access_key minioadmin \
--s3.disable_ssl \
> log.stdout 2> log.stderr &
test_cache_pid=$!
echo "Test cache pid: $test_cache_pid"
wait_for_startup "$test_cache_pid"
bazel clean 2> /dev/null
testsection="cold gRPC, hot minio"
echo -n "Build with hot cache ($testsection): "
ti=$(date +%s)
bazel build //:bazel-remote --remote_cache=grpc://127.0.0.1:9092 \
--execution_log_json_file=grpc_hot.json \
2> grpc_hot
tf=$(date +%s)
duration=$(expr $tf - $ti)
echo "${duration}s"
grep process grpc_hot
hits=$(grep -c '"remoteCacheHit": true,' grpc_hot.json || true) # TODO: replace these with jq one day.
misses=$(grep -c '"remoteCacheHit": false,' grpc_hot.json || true)
hit_rate=$(awk -vhits=$hits -vmisses=$misses 'BEGIN { printf "%0.2f", hits*100/(hits+misses) }' </dev/null)
result=$(awk -vhit_rate=$hit_rate -vmin=$min_acceptable_hit_rate 'BEGIN {if (hit_rate >= min) print "success" ; else print "failure";}' < /dev/null)
[ "$result" = "failure" ] && overall_result=failure
echo "hit rate: ${hit_rate}% (hits: $hits misses: $misses) => $result"
summary+="\n$testsection: hit rate: ${hit_rate}% (hits: $hits misses: $misses) => $result"
kill -9 $test_cache_pid
echo "Stopping minio"
kill -9 $minio_pid
echo -e "\n##########"
echo -e "$summary\n"
echo "Done ($overall_result)"
echo "##########"
if [ "$overall_result" != "success" ]
then
exit 1
fi
|
#!/bin/sh
find main -name '*.bicep' -print -exec cat {} \; | grep /modules/
|
<gh_stars>1-10
package org.bf2.cos.fleetshard.api;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.Objects;
import io.fabric8.kubernetes.api.model.Condition;
public final class ManagedConnectorConditions {
private ManagedConnectorConditions() {
}
public static void clearConditions(ManagedConnector connector) {
if (connector.getStatus().getConditions() != null) {
connector.getStatus().getConditions().clear();
}
}
public static boolean setCondition(ManagedConnector connector, Type type, Status status, String reason, String message) {
Condition condition = new Condition();
condition.setType(type.name());
condition.setStatus(status.name());
condition.setReason(reason);
condition.setMessage(message);
condition.setLastTransitionTime(Conditions.now());
return setCondition(connector, condition);
}
public static boolean setCondition(ManagedConnector connector, Type type, boolean status, String reason, String message) {
return setCondition(connector, type, status ? Status.True : Status.False, reason, message);
}
public static boolean setCondition(ManagedConnector connector, Type type, Status status, String reason) {
return setCondition(connector, type, status, reason, reason);
}
public static boolean setCondition(ManagedConnector connector, Type type, boolean status, String reason) {
return setCondition(connector, type, status ? Status.True : Status.False, reason, reason);
}
public static boolean setCondition(ManagedConnector connector, Type type, Status status) {
Condition condition = new Condition();
condition.setType(type.name());
condition.setStatus(status.name());
condition.setReason(type.name());
condition.setMessage(type.name());
condition.setLastTransitionTime(Conditions.now());
return setCondition(connector, condition);
}
public static boolean setCondition(ManagedConnector connector, Type type, boolean status) {
return setCondition(connector, type, status ? Status.True : Status.False);
}
public static boolean setCondition(ManagedConnector connector, Condition condition) {
if (connector.getStatus().getConditions() == null) {
connector.getStatus().setConditions(new ArrayList<>());
}
for (int i = 0; i < connector.getStatus().getConditions().size(); i++) {
final Condition current = connector.getStatus().getConditions().get(i);
if (Objects.equals(current.getType(), condition.getType())) {
boolean update = !Objects.equals(condition.getStatus(), current.getStatus())
|| !Objects.equals(condition.getReason(), current.getReason())
|| !Objects.equals(condition.getMessage(), current.getMessage());
if (update) {
connector.getStatus().getConditions().set(i, condition);
}
connector.getStatus().getConditions().sort(Comparator.comparing(Condition::getLastTransitionTime));
return update;
}
}
connector.getStatus().getConditions().add(condition);
connector.getStatus().getConditions().sort(Comparator.comparing(Condition::getLastTransitionTime));
return true;
}
public static boolean hasCondition(ManagedConnector connector, Type type) {
if (connector.getStatus().getConditions() == null) {
return false;
}
return connector.getStatus().getConditions().stream().anyMatch(
c -> Objects.equals(c.getType(), type.name()));
}
public static boolean hasCondition(ManagedConnector connector, Type type, Status status) {
if (connector.getStatus().getConditions() == null) {
return false;
}
return connector.getStatus().getConditions().stream().anyMatch(
c -> Objects.equals(c.getType(), type.name())
&& Objects.equals(c.getStatus(), status.name()));
}
public static boolean hasCondition(ManagedConnector connector, Type type, Status status, String reason) {
if (connector.getStatus().getConditions() == null) {
return false;
}
return connector.getStatus().getConditions().stream().anyMatch(c -> {
return Objects.equals(c.getType(), type.name())
&& Objects.equals(c.getStatus(), status.name())
&& Objects.equals(c.getReason(), reason);
});
}
public static boolean hasCondition(ManagedConnector connector, Type type, Status status, String reason, String message) {
if (connector.getStatus().getConditions() == null) {
return false;
}
return connector.getStatus().getConditions().stream().anyMatch(c -> {
return Objects.equals(c.getType(), type.name())
&& Objects.equals(c.getStatus(), status.name())
&& Objects.equals(c.getReason(), reason)
&& Objects.equals(c.getMessage(), message);
});
}
public enum Type {
Error,
Ready,
Initialization,
Augmentation,
Monitor,
Deleting,
Deleted,
Stop,
Stopping,
Migrate,
Resync,
}
public enum Status {
True,
False,
Unknown
}
}
|
import React, { Component } from 'react';
import Header from "./header/header";
import News from "./news/news"
import './App.css';
import menu from "./images/baseline-menu-24px.svg";
import homeIcon from "./images/round-home-24px.svg";
import savedIcon from "./images/round-favorite-24px.svg";
import saveDataIcon from "./images/round-flash_on-24px.svg";
import notificationIcon from "./images/round-notifications-24px.svg";
import doneIcon from "./images/round-done_outline-24px.svg";
import reloadIcon from "./images/round-autorenew-24px.svg";
import Hammer from "hammerjs";
import NewsDescription from "./newsDescription/newsDescription";
import { Router, Route, Link } from 'react-router-dom'
import history from "./history.js";
import Saved from './Saved/saved';
import gIcon from "./images/g.svg";
import firebase from "firebase";
import 'firebase/auth';
class App extends Component {
constructor(props) {
super(props);
this.state = {
menuOpen: false,
dataSaver: null,
notificationEnable: false,
uptodate: false,
isAuth: false,
isDesc: null,
waitAuth: false,
user: null,
dn: false,
}
}
r;
componentDidMount() {
var config = {
apiKey: "<KEY>",
authDomain: "firetestondiwali.firebaseapp.com",
databaseURL: "https://firetestondiwali.firebaseio.com",
projectId: "firetestondiwali",
storageBucket: "firetestondiwali.appspot.com",
messagingSenderId: "480687354738"
};
firebase.initializeApp(config);
this.setState({
waitAuth: true
});
firebase.auth().onAuthStateChanged((user) => {
if (user) {
this.setState({user: user, isAuth: true, waitAuth: false});
} else {
this.setState({waitAuth: false});
}
});
var explore = document.querySelector(".explore");
var manager = new Hammer(explore);
manager.on("panup", (e)=> {
console.log("top" + e.center.y)
explore.style = `transform: translateY(-${e.center.y});`;
})
manager.on("pandown", (e) => {
console.log(e);
console.log("bottom" + e.center.y)
if(e.center.y==0) {
this.closeMenu();
}
else {
explore.style = `transform: translateY(${e.center.y});`;
this.closeMenu();
}
})
if ('serviceWorker' in navigator) {
window.addEventListener('load', () => {
navigator.serviceWorker.register('/sw.js')
.then(registration => {
if (registration.waiting) {
// waiting
this.r = registration;
this.setState({uptodate: false});
} else {
this.setState({
uptodate: true
});
console.log(`Service Worker registered! Scope: ${registration.scope}`);
}
})
.catch(err => {
console.log(`Service Worker registration failed: ${err}`);
});
});
if(localStorage.getItem("ne")=="true") {
this.setState({"notificationEnable": true})
}
else if (localStorage.getItem("ne") == "d") {
this.setState({
"notificationEnable": false,
dn: true
})
}
}
/*
if (window.screen.orientation.type === "landscape-primary" && window.innerWidth < 612) {
document.querySelector(".explore").style = "width: 50%; height: 90vh; overflow: scroll";
}
else {
if(window.innerWidth > 612) {
document.querySelector(".explore").style = "width: 30%; height: auto; overflow: hidden";
}
else {
document.querySelector(".explore").style = "width: 100%; height: auto; overflow: scroll";
}
}
window.screen.orientation.addEventListener('change', (e) => {
if(window.screen.orientation.type==="landscape-primary" && window.innerWidth < 612) {
document.querySelector(".explore").style = "width: 50%; height: 90vh; overflow: scroll";
}
else {
if (window.innerWidth > 612) {
document.querySelector(".explore").style = "width: 30%; height: auto; overflow: hidden";
} else {
document.querySelector(".explore").style = "width: 100%; height: auto; overflow: scroll";
}
}
})
*/
window.addEventListener("scroll", this.handleScroll.bind(this));
if (localStorage.getItem("dsm") == "true") {
this.setState({ "dataSaver": true })
}
if(window.location.pathname == "/") {
this.setState({
isDesc: false
}) }
else {
this.setState({
isDesc: true
});
}
}
handleUpdate = () => {
// registration.waiting.postMessage('skipWaiting');
if(this.r) {
this.r.waiting.postMessage('skipWaiting');
}
window.location.reload();
}
prevStatus = 0;
handleScroll = (e) => {
//...
setTimeout(()=> {
this.prevStatus = window.scrollY;
}, 250)
if (this.prevStatus < 72) {
if(window.location.pathname=="/") {
document.querySelector(".headerMain").style = "position: relative";
}
}
else {
if (this.prevStatus > window.scrollY) {
if(window.location.pathname=="/") {
document.querySelector(".headerMain").style = `position: sticky;top:0;animation: scroll 250ms;`;
}
}
else {
if(window.location.pathname=="/") {
document.querySelector(".headerMain").style = "display: none";
}
else {
document.querySelector(".header").style="display: flex;"
}
}
}
}
closeMenu = () => {
if(this.state.menuOpen) {
this.setState({ menuOpen: false });
document.body.style = "overflow-y: scroll";
// document.querySelector(".newsMain").style = "position: relative;top: 0px;"
}
}
openMenu = () => {
this.setState({ menuOpen: true });
document.body.style = "overflow: hidden";
// document.querySelector(".newsMain").style = "position: relative;"
}
saveData = () => {
let dsmStatus = localStorage.getItem("dsm");
// get data saver
if(dsmStatus==null) {
// first time
localStorage.setItem("dsm", true)
this.setState({"dataSaver": true});
}
else {
// if already
if(localStorage.getItem("dsm")=="true") {
localStorage.setItem("dsm", false)
this.setState({"dataSaver": false});
}
else {
// not true
localStorage.setItem("dsm", true)
this.setState({"dataSaver": true});
}
}
}
askPermission = () => {
return new Promise((res, reject)=>{
Notification.requestPermission()
.then((r)=> {
res(r)
})
.catch((e)=> {
reject("reject")
})
})
}
handleNotification = () => {
if(this.state.notificationEnable) {
// send req to server to remove push key.
localStorage.setItem("ne", false);
this.setState({notificationEnable: false});
}
else {
this.askPermission()
.then((r)=>{
if(r==="denied") {
localStorage.setItem("ne", "d");
this.setState({
notificationEnable: false
});
this.setState({
dn: true
});
}
else {
if(localStorage.getItem("ne")=="true") {
localStorage.setItem("ne", false);
this.setState({ notificationEnable: false });
console.log("ENABLE");
}
else {
localStorage.setItem("ne", true);
this.setState({ notificationEnable: true });
}
}
})
.catch((e)=>{
localStorage.setItem("ne", false);
})
}
}
letSign() {
this.setState({waitAuth: true});
var provider = new firebase.auth.GoogleAuthProvider();
provider.addScope('https://www.googleapis.com/auth/contacts.readonly');
firebase.auth().useDeviceLanguage();
firebase.auth().signInWithRedirect(provider);
firebase.auth().getRedirectResult().then((result) => {
if (result.credential) {
// This gives you a Google Access Token. You can use it to access the Google API.
var token = result.credential.accessToken;
// ...
}
// The signed-in user info.
var user = result.user;
this.setState({waitAuth: false});
}).catch((error) => {
// Handle Errors here.
var errorCode = error.code;
var errorMessage = error.message;
// The email of the user's account used.
var email = error.email;
// The firebase.auth.AuthCredential type that was used.
var credential = error.credential;
// ...
this.setState({
waitAuth: false
});
});
}
logOut() {
this.setState({
waitAuth: true,
isAuth: true
});
firebase.auth().signOut().then(()=> {
this.setState({
waitAuth: false,
isAuth: false
});
}, ()=> {
this.setState({
waitAuth: false,
isAuth: true
});
});
}
render() {
const backMenu = {
transform: "translateY(100%)"
}
const forwardMenu = {
transform: "translateY(0)"
}
const none = {
display: "none"
}
const fix = {
position: "absolute",
overflow: "hidden"
}
const headMove = {
position: "relative",
left: "11px",
background: "#1565C0"
}
const pathMove = {
background: "#95baf8"
}
return (
<>
<div className="app" style={this.state.menuOpen ? fix : null}>
<div className="exploreBackground" onClick={()=>this.closeMenu()} style={this.state.menuOpen?null:none}></div>
<Router history={history}>
<div>
<Route path="/" component={News} exact></Route>
<Route path="/:headline/e/:id" component={NewsDescription}></Route>
<Route path="/saved" component={Saved}></Route>
</div>
</Router>
<button className="menuButton" role="button" onClick={() => this.openMenu()} style={this.state.menuOpen ? none : null}><img src={menu} alt="menu icon" role="img"/><span>Explore</span></button>
<div className="explore" style={this.state.menuOpen?forwardMenu:backMenu}>
<div className="exploreInner">
<div className="exploreList">
{
this.state.waitAuth?<div className="progress" style={{right: "8px"}}>
<div className="indeterminate"></div>
</div>
: null
}
<div className="exploreProfile">
{
this.state.isAuth?<div className="signed" role="Link"><div><img className="profileImage" style={{border: "2px solid blue"}} src={this.state.user.photoURL} alt="Profile Picture"></img><div style={{paddingLeft: "12px"}}>{this.state.user.displayName}</div></div><button className="signOut" style={{marginRight: "17px"}} role="Button" onClick={(e)=> this.logOut()}>Logout</button></div>
:
<div className="notSignin" onClick={(e)=>this.letSign()}><img style={{background: "transparent"}} src={gIcon} className="profileImage notSign" alt="Google logo"></img> <div style={{ paddingLeft: "12px" }}>Signin with Google</div></div>
}
</div>
<Router history={history}><Link to="/" className="othermenubutton"><span className="menuIcon" style={{padding: "0 6px"}}><img src={homeIcon} alt="Home icon"></img></span><span style={{ paddingLeft: "16px" }}>Home</span></Link></Router>
<Router history={history}><Link to = "/saved" menu={(e)=>this.closeMenu()} className="othermenubutton" style={{padding: "0 6px"}}><span className="menuIcon"><img src={savedIcon} alt="offline"></img></span><span style={{ paddingLeft: "16px" }}>Saved</span></Link></Router>
<button className="othermenubutton bb notPlusIcon" onClick={() => this.saveData()}><div><span className="menuIcon"><img src={saveDataIcon} alt="Data saving"></img></span><span style={{ paddingLeft: "16px" }}>Data Saving mode</span></div><div className="switch"><div className="switchHead" style={this.state.dataSaver ? headMove : null}></div><div className="switchPath" style={this.state.dataSaver ? pathMove : null}></div></div></button>
<button className="othermenubutton notPlusIcon" onClick={() => this.handleNotification()}><div><span className="menuIcon"><img src={notificationIcon} alt="Notification"></img></span><span style={{ paddingLeft: "16px" }}>Allow Notifications</span></div><div className="switch"><div className="switchHead" style={this.state.notificationEnable ? headMove : null}></div><div className="switchPath" style={this.state.notificationEnable ? pathMove : null}></div></div></button>
{
this.state.dn?<div className="disableNotification">Please unblock notifications from browser settings</div>: null
}
<button className="othermenubutton bb" onClick={()=>this.handleUpdate()}><span className="menuIcon"><img src={this.state.uptodate?doneIcon:reloadIcon} alt="Reaload"></img></span><span style={{ paddingLeft: "16px" }}>{this.state.uptodate ? "Version 2.2.0": "Reload to Update"}</span></button>
<button className="othermenubutton"><span className="menuIcon"></span><span style={{ paddingLeft: "16px" }}>Your credentials will be deleted after 24 hours.</span></button>
</div>
</div>
</div>
</div>
</>
);
}
}
export default App;
|
def cross_two(a, b):
for x in a:
for y in b:
yield a, b
|
<filename>src/containers/App.js
import { connect } from 'react-redux';
import App from '../components/App';
import { fetchData } from '../actions/data';
const mapDispatchToProps = (dispatch) => {
return {
fetchData: () => {
dispatch(fetchData());
},
};
};
export default connect(null, mapDispatchToProps)(App);
|
function generateForm(formElements) {
let formHtml = '<form>';
formElements.forEach((element, index) => {
formHtml += '<div class="form-group">';
formHtml += `<label for="${element.label.toLowerCase().replace(/\s/g, '-')}">${element.label}</label>`;
if (element.type === "text" || element.type === "email") {
formHtml += `<input type="${element.type}" id="${element.label.toLowerCase().replace(/\s/g, '-')}"`
formHtml += ` class="form-control" value="${element.value}">`;
} else if (element.type === "file") {
formHtml += `<input type="file" id="${element.label.toLowerCase().replace(/\s/g, '-')}"`
formHtml += ` class="form-control">`;
} else {
// Handle other form element types if needed
}
formHtml += '</div>';
});
formHtml += '</form>';
return formHtml;
}
const formElements = [
{ type: "text", label: "Name", value: "John Doe" },
{ type: "email", label: "Email", value: "johndoe@example.com" },
{ type: "file", label: "Profile Picture" }
];
console.log(generateForm(formElements));
|
aws ecr get-login-password --region ${AWS_REGION} | docker login --username AWS --password-stdin ${ECR_REGISTRY}
docker push "${IMAGE_TAG_ADMIN_SERVER}"
docker push "${IMAGE_TAG_API_GATEWAY}"
docker push "${IMAGE_TAG_CONFIG_SERVER}"
docker push "${IMAGE_TAG_CUSTOMERS_SERVICE}"
docker push "${IMAGE_TAG_DISCOVERY_SERVER}"
docker push "${IMAGE_TAG_HYSTRIX_DASHBOARD}"
docker push "${IMAGE_TAG_VETS_SERVICE}"
docker push "${IMAGE_TAG_VISITS_SERVICE}"
docker push "${IMAGE_TAG_GRAFANA_SERVICE}"
docker push "${IMAGE_TAG_PROMETHEUS_SERVICE}"
|
package gov.usgs.traveltime.tables;
import gov.usgs.traveltime.AllBrnRef;
import gov.usgs.traveltime.AuxTtRef;
import gov.usgs.traveltime.TauUtil;
import gov.usgs.traveltime.TtStatus;
/**
* Test main program for travel-time table generation.
*
* @author <NAME>
*/
public class ReModel {
/**
* Test driver for the model generation (replacing Fortran programs Remodl and Setbrn generating
* *.hed and *.tlb files). Note that in the long run, the Java version will directly populate the
* travel-time reference classes so that the model tables, etc. can be generated on the fly.
*
* @param args Command line arguments
* @throws Exception On an illegal integration interval
*/
public static void main(String[] args) throws Exception {
double sysTime;
String earthModel = "ak135";
MakeTables make;
AuxTtRef auxTT = null;
AllBrnRef allRef;
TtStatus status;
TablesUtil.deBugLevel = 1;
sysTime = System.currentTimeMillis();
make = new MakeTables(earthModel);
status = make.buildModel(TauUtil.model("m" + earthModel + ".mod"), TauUtil.model("phases.txt"));
if (status == TtStatus.SUCCESS) {
// Build the branch reference classes.
// NOTE assumes default model path for now, need to figure out
// where to get this path. Cmd line arg?
auxTT = new AuxTtRef(true, false, false, null, null);
allRef = make.fillAllBrnRef(null, auxTT);
System.out.format(
"\n***** Table generation time: %5.3f *****\n",
0.001 * (System.currentTimeMillis() - sysTime));
// allRef.dumpHead();
// allRef.dumpMod('P', true);
// allRef.dumpMod('S', true);
allRef.dumpBrn(false);
// allRef.dumpUp('P');
// allRef.dumpUp('S');
} else {
System.out.println("Read status = " + status);
}
}
}
|
import React, { useState } from "react";
import { Modal, Form } from "react-bootstrap";
import { Link } from "react-router-dom";
import "./modallogin.css";
function LoginButton(props) {
const [show, setShow] = useState(false);
const handleClose = () => setShow(false);
const handleShow = () => setShow(true);
return (
<div className="personal">
<button type="button" className="btnlogin" onClick={handleShow}>
Вход
</button>
<Modal centered show={show} onHide={handleClose}>
<Modal.Header closeButton>
<Modal.Title>Авторизация</Modal.Title>
</Modal.Header>
<Modal.Body>
<Form>
<Form.Group controlId="fromBasicEmail">
<Form.Label>Электронная почта</Form.Label>
<Form.Control
type="email"
placeholder="E-mail"
required="email"
/>
</Form.Group>
<Form.Group controlId="fromBasicPassword">
<Form.Label>Пароль</Form.Label>
<Form.Control type="password" placeholder="Password" />
</Form.Group>
</Form>
</Modal.Body>
<form></form>
<div className="centerbtn">
<input
type="submit"
className="btnlogin"
value="Вход"
onClick={handleClose}
/>
<br></br>
<br></br>
<Link to="/forgot_password=yes" onClick={handleClose}>
Восстановить пароль
</Link>
<br></br>
<Link to="/register=yes" onClick={handleClose}>
Регистрация
</Link>
</div>
</Modal>
</div>
);
}
export default LoginButton;
|
<reponame>yasirabd/api-diagnostic<filename>src/api_v1/utils/s3_utils.py
import io
from urllib.parse import urlparse
import numpy as np
from datetime import timedelta, datetime
import boto3
from botocore.exceptions import ClientError
class S3:
def __init__(self, date, bucket_name, access_key, secret_key, session_token, region_name):
self.date = datetime.strptime(date, "%Y-%m-%d %H:%M:%S")
self.path = self.create_filepath_from_date()[0]
self.filepath = self.create_filepath_from_date()[1]
self.s3_uri = f"s3://{bucket_name}/{self.filepath}"
self.bucket_name = bucket_name
self.client = boto3.client("s3",
aws_access_key_id=access_key,
aws_secret_access_key=secret_key,
aws_session_token=session_token,
region_name=region_name)
def create_filepath_from_date(self):
first_day_of_the_week = self.date - timedelta(days=self.date.weekday() % 7)
year = first_day_of_the_week.year
month = first_day_of_the_week.month
day = first_day_of_the_week.day
path = f"{year}/{month}/"
filepath = f"{year}/{month}/{day}.npy"
return path, filepath
def check_if_file_exists(self):
try:
s3 = self.client
s3.head_object(Bucket=self.bucket_name, Key=self.filepath)
except ClientError as e:
return int(e.response['Error']['Code']) != 404
return True
def load_state_matrix(self, s3_uri=None):
if not s3_uri: s3_uri = self.s3_uri
bytes_ = io.BytesIO()
parsed_s3 = urlparse(s3_uri)
self.client.download_fileobj(
Fileobj=bytes_, Bucket=parsed_s3.netloc, Key=parsed_s3.path[1:]
)
bytes_.seek(0)
return np.load(bytes_)
def create_directory_path(self):
self.client.put_object(Bucket=self.bucket_name, Key=self.path)
def load_previous_state_matrix(self):
# create directory path
self.create_directory_path()
# get last week
first_day_of_the_week = self.date - timedelta(days=self.date.weekday() % 7)
last_week = first_day_of_the_week - timedelta(days=7)
year = last_week.year
month = last_week.month
day = last_week.day
filepath = f"{year}/{month}/{day}.npy"
s3_uri = f"s3://{self.bucket_name}/{filepath}"
state_matrix = self.load_state_matrix(s3_uri)
return state_matrix
def upload_state_matrix(self, state_matrix, s3_uri=None):
if not s3_uri: s3_uri = self.s3_uri
bytes_ = io.BytesIO()
np.save(bytes_, state_matrix)
bytes_.seek(0)
parsed_s3 = urlparse(s3_uri)
self.client.upload_fileobj(
Fileobj=bytes_, Bucket=parsed_s3.netloc, Key=parsed_s3.path[1:]
)
return True
|
import { combineReducers } from 'redux';
import {
SELECT_YEAR,
INVALIDATE_YEAR,
REQUEST_MEETS,
RECEIVE_MEETS,
REQUEST_MEET_DETAILS,
RECEIVE_MEET_DETAILS,
REQUEST_LOGIN,
LOGIN_FAILED,
LOGIN_SUCCESS,
SIGNUP_FAILED,
SIGNUP_SUCCESS,
REQUEST_SIGNUP,
USERS_LIST,
ADD_MESSAGES,
ADD_USER,
MESSAGE_RECEIVED,
VALIDATE_TOKEN,
TOKEN_GOOD,
TOKEN_BAD,
TYPING,
TYPERS_LIST
} from './actions';
function selectedYear(state = '2018', action) {
switch (action.type) {
case SELECT_YEAR:
return action.year;
default:
return state;
}
}
function meets(
state = {
isFetching: false,
didInvalidate: false,
items: []
},
action
) {
switch (action.type) {
case INVALIDATE_YEAR:
return Object.assign({}, state, {
didInvalidate: true
});
case REQUEST_MEETS:
return Object.assign({}, state, {
isFetching: true,
didInvalidate: false
});
case RECEIVE_MEETS:
return Object.assign({}, state, {
isFetching: false,
didInvalidate: false,
items: action.meets,
lastUpdated: action.receivedAt
});
default:
return state;
}
}
function meetsByYear(state = {}, action) {
switch (action.type) {
case INVALIDATE_YEAR:
case RECEIVE_MEETS:
case REQUEST_MEETS:
return Object.assign({}, state, {
[action.year]: meets(state[action.year], action)
});
default:
return state;
}
}
function meetDetails(state = {
isFetching: false,
data: {}
}, action) {
switch (action.type) {
case REQUEST_MEET_DETAILS:
return Object.assign({}, state, {
isFetching: true
});
case RECEIVE_MEET_DETAILS:
return Object.assign({}, state, {
isFetching: false,
data: action.details,
lastUpdated: action.receivedAt
});
default:
return state;
}
}
function loggingIn(state = false, action) {
switch (action.type) {
case REQUEST_LOGIN:
return true;
case LOGIN_FAILED:
case LOGIN_SUCCESS:
return false;
default:
return state;
}
}
function loggedIn(state = {
result: {},
message: '',
failed: false
}, action) {
switch (action.type) {
case LOGIN_SUCCESS:
return Object.assign({}, state, {
result: action.res,
message: 'Login success, please wait to be redireced.',
failed: false
});
case LOGIN_FAILED:
return Object.assign({}, state, {
result: action.res,
message: 'Please try again.',
failed: true
});
default:
return state;
}
}
function signUp(state = {
signup: {
result: {},
signUpLoading: false
}
}, action) {
switch (action.type) {
case REQUEST_SIGNUP:
return Object.assign({}, state, {
signup: {
loading: true,
failed: false
}
});
case SIGNUP_SUCCESS:
return Object.assign({}, state, {
signup: {
result: action.res,
loading: false,
failed: false,
timestamp: action.timestamp
}
});
case SIGNUP_FAILED:
return Object.assign({}, state, {
signup: {
result: action.res,
loading: false,
failed: true,
timestamp: action.timestamp
}
});
default:
return state;
}
}
const messages = (state = [], action) => {
switch (action.type) {
case ADD_MESSAGES:
case MESSAGE_RECEIVED:
return state.concat([
{
message: action.message,
author: action.author,
id: action.id,
timestamp: action.timestamp
}
]);
default:
return state;
}
};
const users = (state = [], action) => {
switch (action.type) {
case ADD_USER:
return state.concat([{
name: action.name,
id: action.id
}]);
case USERS_LIST:
return action.users;
default:
return state;
}
};
const tokenValidation = (state = {
validating: false,
validated: false,
failed: false
}, action) => {
switch (action.type) {
case VALIDATE_TOKEN:
return Object.assign({}, state, {
validating: true,
validated: false,
failed: false
});
case TOKEN_GOOD:
return Object.assign({}, state, {
validated: true,
validating: false,
failed: false,
decoded: action.decoded
});
case TOKEN_BAD:
return Object.assign({}, state, {
validate: false,
validating: false,
failed: true
});
default:
return state;
}
};
const typers = (state = [], action) => {
switch (action.type) {
case TYPERS_LIST:
return action.typers;
default:
return state;
}
};
const rootReducer = combineReducers({
meetsByYear,
selectedYear,
meetDetails,
loggingIn,
loggedIn,
typers,
signUp,
messages,
users,
tokenValidation
});
export default rootReducer;
|
<gh_stars>1-10
/* JPEG class wrapper to ijg jpeg library
Copyright (C) 2000-2012 <NAME>.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software Foundation,
Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA.
*/
#ifndef _JPEGCOMPRESSOR_H
#define _JPEGCOMPRESSOR_H
#include <cstdio>
#include <string>
#include "RawTile.h"
extern "C"{
/* Undefine this to prevent compiler warning
*/
#undef HAVE_STDLIB_H
#include <jpeglib.h>
}
/// Expanded data destination object for buffered output used by IJG JPEG library
typedef struct {
struct jpeg_destination_mgr pub; /**< public fields */
size_t size; /**< size of source data */
JOCTET *buffer; /**< working buffer */
unsigned char* source; /**< source data */
unsigned int strip_height; /**< used for stream-based encoding */
} iip_destination_mgr;
typedef iip_destination_mgr * iip_dest_ptr;
/// Wrapper class to the IJG JPEG library
class JPEGCompressor{
private:
/// the width, height and number of channels per sample for the image
unsigned int width, height, channels;
/// The JPEG quality factor
int Q;
/// Buffer for the JPEG header
unsigned char header[1024];
/// Buffer for the image data
unsigned char *data;
/// Size of the JPEG header
unsigned int header_size;
/// JPEG library objects
struct jpeg_compress_struct cinfo;
struct jpeg_error_mgr jerr;
iip_destination_mgr dest_mgr;
iip_dest_ptr dest;
public:
/// Constructor
/** @param quality JPEG Quality factor (0-100) */
JPEGCompressor( int quality ) { Q = quality; };
/// Set the compression quality
/** @param factor Quality factor (0-100) */
void setQuality( int factor ) {
if( factor < 0 ) Q = 0;
else if( factor > 100 ) Q = 100;
else Q = factor;
};
/// Get the current quality level
int getQuality() { return Q; }
/// Initialise strip based compression
/** If we are doing a strip based encoding, we need to first initialise
with InitCompression, then compress a single strip at a time using
CompressStrip and finally clean up using Finish
@param rawtile tile containing the image to be compressed
@param strip_height pixel height of the strip we want to compress
@return header size
*/
void InitCompression( const RawTilePtr rawtile, unsigned int strip_height ) throw (std::string);
/// Compress a strip of image data
/** @param s source image data
@param o output buffer
@param tile_height pixel height of the tile we are compressing
*/
unsigned int CompressStrip( unsigned char* s, unsigned char* o, unsigned int tile_height ) throw (std::string);
/// Finish the strip based compression and free memory
/** @param output output buffer
@return size of output generated
*/
unsigned int Finish( unsigned char* output ) throw (std::string);
/// Compress an entire buffer of image data at once in one command
/** @param t tile of image data */
int Compress( RawTilePtr t ) throw (std::string);
/// Add metadata to the JPEG header
/** @param m metadata */
void addMetadata( const std::string& m );
/// Return the JPEG header size
unsigned int getHeaderSize() { return header_size; }
/// Return a pointer to the header itself
inline unsigned char* getHeader() { return header; }
};
#endif
|
python train.py \
--dataset colored_mnist \
--method_name erm \
--match_case 0.01 \
--match_flag 1 \
--epochs 100 \
--batch_size 128 \
--pos_metric cos \
--img_c 3 --img_w 128 --img_h 128 \
--train_domains R G \
--test_domains W RGB
|
<filename>packages/app/src/modules/currentAnalyticalObject.js
import {
DIMENSION_ID_ORGUNIT,
layoutGetAxisIdDimensionIdsObject,
} from '@dhis2/analytics'
import { getInverseLayout } from './layout'
export const getPathForOrgUnit = (orgUnit, parentGraphMap) => {
if (parentGraphMap[orgUnit.id] === undefined) {
return undefined
}
// if this is root org unit then in parentGraphMap object
// it has empty string as value and id as key
if (parentGraphMap[orgUnit.id] === '') {
return '/' + orgUnit.id
}
return '/' + parentGraphMap[orgUnit.id] + '/' + orgUnit.id
}
export const appendPathsToOrgUnits = (current, ui) => {
const dimensionIdsByAxis = layoutGetAxisIdDimensionIdsObject(current)
const inverseLayout = getInverseLayout(dimensionIdsByAxis)
const ouAxis = inverseLayout[DIMENSION_ID_ORGUNIT]
const { parentGraphMap } = ui
if (!ouAxis) {
return current
}
return {
...current,
[ouAxis]: current[ouAxis].map(dimension => ({
...dimension,
items: dimension.items.map(item => ({
...item,
path: getPathForOrgUnit(item, parentGraphMap),
})),
})),
}
}
export const removeUnnecessaryAttributesFromAnalyticalObject = current => ({
...current,
id: undefined,
name: undefined,
displayName: undefined,
})
export const appendDimensionItemNamesToAnalyticalObject = (
current,
metadata
) => {
const appendNames = dimension => ({
...dimension,
items: dimension.items.map(item => ({
...item,
name: metadata[item.id] ? metadata[item.id].name : undefined,
})),
})
return {
...current,
columns: current.columns.map(appendNames),
filters: current.filters.map(appendNames),
rows: current.rows.map(appendNames),
}
}
export const appendDimensionItemTypeToAnalyticalObject = (
current,
metadata
) => {
const appendDimensionType = dimension => ({
...dimension,
items: dimension.items.map(item => ({
...item,
dimensionItemType: metadata[item.id]
? metadata[item.id].dimensionItemType
: undefined,
})),
})
return {
...current,
columns: current.columns.map(appendDimensionType),
filters: current.filters.map(appendDimensionType),
rows: current.rows.map(appendDimensionType),
}
}
export const appendCompleteParentGraphMap = (current, { parentGraphMap }) => ({
...current,
parentGraphMap: {
...current.parentGraphMap,
...parentGraphMap,
},
})
export const prepareCurrentAnalyticalObject = (current, metadata, ui) => {
let result
result = removeUnnecessaryAttributesFromAnalyticalObject(current)
result = appendDimensionItemNamesToAnalyticalObject(result, metadata)
result = appendDimensionItemTypeToAnalyticalObject(result, metadata)
result = appendPathsToOrgUnits(result, ui)
result = appendCompleteParentGraphMap(result, ui)
return result
}
|
#!/bin/bash
# this script is used to test the minio docker
# usage: chmod +x ./minio.sh
# usage: ./minio.sh my-bucket my-file.zip
bucket=$1
file=$2
host=localhost:9000
s3_key='Q3AM3UQ867SPQQA43P2F'
s3_secret='zuf+tfteSlswRu7BJ86wekitnifILbZam1KYY3TG'
base_file=`basename ${file}`
resource="/${bucket}/${base_file}"
content_type="application/octet-stream"
date=`date -R`
_signature="PUT\n\n${content_type}\n${date}\n${resource}"
signature=`echo -en ${_signature} | openssl sha1 -hmac ${s3_secret} -binary | base64`
curl -v -X PUT -T "${file}" \
-H "Host: $host" \
-H "Date: ${date}" \
-H "Content-Type: ${content_type}" \
-H "Authorization: AWS ${s3_key}:${signature}" \
http://$host${resource}
|
/**
* Sound
*/
//% color=#f44242 icon="\uf130" weight=100
namespace sound {
}
/**
* Messaging
*/
//% color=#6657b2 icon="\uf003" weight=99
namespace messaging {
}
|
#!/bin/bash
PKGDIR="$(readlink -f $(dirname "${BASH_SOURCE[0]}"))"
export TS_CONFIG_PATH=$(readlink -f ${PKGDIR}/../../mk/tsconfig-literate.json)
node --loader ${PKGDIR}/../../mk/loader.mjs --experimental-specifier-resolution=node ${PKGDIR}/lib/main_node.js "$@"
|
import React from 'react';
export default class ArrowDown extends React.Component {
render() {
const { width, height, color } = this.props;
return (
<svg width={width} height={height} viewBox="0 0 140 140" version="1.1" >
<g id="Icons" stroke="none" strokeWidth="1" fill="none" fillRule="evenodd">
<g transform="translate(-3736.000000, -1300.000000)" fill={color} fillRule="nonzero" id="Path">
<polygon points="3746.87694 1365.02274 3782.43005 1390.35647 3864.91796 1318 3876 1318 3876 1329.12181 3782.43005 1422.05544 3736 1375.36778 3736 1365.02274"></polygon>
</g>
</g>
</svg>
)
}
}
|
package controllers;
import models.Product;
import models.ProductDetail;
import play.data.DynamicForm;
import play.data.FormFactory;
import play.db.jpa.JPAApi;
import play.db.jpa.Transactional;
import play.mvc.Controller;
import play.mvc.Result;
import javax.inject.Inject;
import java.util.List;
public class ProductController extends Controller
{
private FormFactory formFactory;
private JPAApi jpaApi;
@Inject
public ProductController(FormFactory formFactory, JPAApi jpaApi)
{
this.formFactory = formFactory;
this.jpaApi = jpaApi;
}
@Transactional(readOnly = true)
public Result getProducts()
{
List<ProductDetail> products = jpaApi.em().createQuery("SELECT NEW ProductDetail (p.productId, p.productName, p.price, p.ingredients, p.size)" +
"FROM Product p", ProductDetail.class)
.getResultList();
return ok(views.html.products.render(products));
}
@Transactional(readOnly = true)
public Result getProduct(int productId)
{
Product product = jpaApi.em().createQuery("SELECT p FROM Product p WHERE productId = :productId", Product.class).
setParameter("productId", productId).getSingleResult();
return ok(views.html.product.render(product));
}
@Transactional (readOnly = true)
public Result getJams()
{
List<ProductDetail> products = jpaApi.em().createQuery("SELECT NEW ProductDetail (p.productId, p.productName, p.price, p.ingredients, p.size, p.categoryId, p.seasonId, p.quantityInStock)" +
"FROM Product p WHERE productName LIKE '%jam' ORDER BY productName", ProductDetail.class).getResultList();
return ok(views.html.products.render(products));
}
@Transactional (readOnly = true)
public Result getJellies()
{
List<ProductDetail> products = jpaApi.em().createQuery("SELECT NEW ProductDetail (p.productId, p.productName, p.price, p.ingredients, p.size, p.categoryId, p.seasonId, p.quantityInStock)" +
"FROM Product p WHERE productName LIKE '%jelly' ORDER BY productName", ProductDetail.class).getResultList();
return ok(views.html.products.render(products));
}
@Transactional (readOnly = true)
public Result getButters()
{
List<ProductDetail> products = jpaApi.em().createQuery("SELECT NEW ProductDetail (p.productId, p.productName, p.price, p.ingredients, p.size, p.categoryId, p.seasonId, p.quantityInStock)" +
"FROM Product p WHERE productName LIKE '%butter' ORDER BY productName", ProductDetail.class).getResultList();
return ok(views.html.products.render(products));
}
@Transactional (readOnly = true)
public Result getSprays()
{
List<ProductDetail> products = jpaApi.em().createQuery("SELECT NEW ProductDetail (p.productId, p.productName, p.price, p.ingredients, p.size, p.categoryId, p.seasonId, p.quantityInStock) " +
"FROM Product p " +
"JOIN Category c ON c.categoryId = p.categoryId " +
"WHERE c.categoryName " +
"LIKE 'Sprays' " +
"ORDER BY productName", ProductDetail.class).getResultList();
return ok(views.html.products.render(products));
}
@Transactional (readOnly = true)
public Result getRollOns()
{
List<ProductDetail> products = jpaApi.em().createQuery("SELECT NEW ProductDetail (p.productId, p.productName, p.price, p.ingredients, p.size, p.categoryId, p.seasonId, p.quantityInStock) " +
"FROM Product p " +
"JOIN Category c ON c.categoryId = p.categoryId " +
"WHERE c.categoryName " +
"LIKE 'Roll-Ons' " +
"ORDER BY productName", ProductDetail.class).getResultList();
return ok(views.html.products.render(products));
}
@Transactional (readOnly = true)
public Result getInStock()
{
List<ProductDetail> products = jpaApi.em().createQuery("SELECT NEW ProductDetail (p.productId, p.productName, p.price, p.ingredients, p.size, p.categoryId, p.seasonId, p.quantityInStock) " +
"FROM Product p " +
"JOIN Category c ON c.categoryId = p.categoryId " +
"WHERE c.categoryName LIKE 'Jam' AND p.quantityInStock > 0 " +
"OR c.categoryName LIKE 'Jelly' AND p.quantityInStock > 0 " +
"OR c.categoryName LIKE 'Butter' AND p.quantityInStock > 0 " +
"ORDER BY productName", ProductDetail.class).getResultList();
return ok(views.html.products.render(products));
}
@Transactional (readOnly = true)
public Result getVegan()
{
List<ProductDetail> products = jpaApi.em().createQuery("SELECT NEW ProductDetail (p.productId, p.productName, p.price, p.ingredients, p.size, p.categoryId, p.seasonId, p.quantityInStock) " +
"FROM Product p " +
"WHERE vegan = true " +
"ORDER BY productName", ProductDetail.class).getResultList();
return ok(views.html.products.render(products));
}
@Transactional (readOnly = true)
public Result getPreOrders()
{
List<ProductDetail> products = jpaApi.em().createQuery("SELECT NEW ProductDetail (p.productId, p.productName, p.price, p.ingredients, p.size, p.categoryId, p.seasonId, p.quantityInStock) " +
"FROM Product p " +
"JOIN Category c ON c.categoryId = p.categoryId " +
"WHERE c.categoryName LIKE 'Jam' AND p.quantityInStock = 0 " +
"OR c.categoryName LIKE 'Jelly' AND p.quantityInStock = 0 " +
"OR c.categoryName LIKE 'Butter' AND p.quantityInStock = 0 " +
"ORDER BY productName", ProductDetail.class).getResultList();
return ok(views.html.products.render(products));
}
@Transactional (readOnly = true)
public Result getBeardOils()
{
List<ProductDetail> products = jpaApi.em().createQuery("SELECT NEW ProductDetail (p.productId, p.productName, p.price, p.ingredients, p.size, p.categoryId, p.seasonId, p.quantityInStock) " +
"FROM Product p " +
"JOIN Category c ON c.categoryId = p.categoryId " +
"WHERE c.categoryName " +
"LIKE 'Beard Oil' " +
"ORDER BY productName", ProductDetail.class).getResultList();
return ok(views.html.products.render(products));
}
@Transactional (readOnly = true)
public Result getBeardBalms()
{
List<ProductDetail> products = jpaApi.em().createQuery("SELECT NEW ProductDetail (p.productId, p.productName, p.price, p.ingredients, p.size, p.categoryId, p.seasonId, p.quantityInStock) " +
"FROM Product p " +
"JOIN Category c ON c.categoryId = p.categoryId " +
"WHERE c.categoryName " +
"LIKE 'Beard Balm' " +
"ORDER BY productName", ProductDetail.class).getResultList();
return ok(views.html.products.render(products));
}
@Transactional (readOnly = true)
public Result getJamminJellies()
{
DynamicForm form = formFactory.form().bindFromRequest();
String search = form.get("search");
if(search == null)
{
search = "";
}
search = "%" + search + "%";
List<ProductDetail> products = jpaApi.em().createQuery("SELECT NEW ProductDetail (p.productId, p.productName, p.price, p.ingredients, p.size, p.categoryId, p.seasonId, p.quantityInStock) " +
"FROM Product p " +
"JOIN Category c ON c.categoryId = p.categoryId " +
"WHERE c.categoryName LIKE 'Jam' " +
"OR c.categoryName LIKE 'Jelly' " +
"OR c.categoryName LIKE 'Butter' " +
"AND c.categoryName LIKE :search " +
"AND p.productName LIKE :search " +
"ORDER BY productName", ProductDetail.class)
.setParameter("search", search)
.getResultList();
return ok(views.html.orderjamminjellies.render(products));
}
@Transactional
public Result getNaturals()
{
List<ProductDetail> products = jpaApi.em().createQuery("SELECT NEW ProductDetail (p.productId, p.productName, p.price, p.ingredients, p.size, p.categoryId, p.seasonId, p.quantityInStock) " +
"FROM Product p " +
"JOIN Category c ON c.categoryId = p.categoryId " +
"WHERE c.categoryName LIKE 'Beard Balm' " +
"OR c.categoryName LIKE 'Beard Oil' " +
"OR c.categoryName LIKE 'Sprays' " +
"OR c.categoryName LIKE 'Roll-ons' " +
"ORDER BY categoryName", ProductDetail.class).getResultList();
return ok(views.html.ordernaturals.render(products));
}
@Transactional(readOnly = true)
public Result getPicture(int id)
{
Product product = jpaApi.em().
createQuery("SELECT p FROM Product p WHERE productId = :productId", Product.class).
setParameter("productId", id).
getSingleResult();
Result picture;
if(product.getPicture() == null)
{
picture = notFound();
}
else
{
picture = ok(product.getPicture()).as("image/jpg");
}
return picture;
}
}
|
#!/bin/sh
set -e -u -o pipefail
if [ -z "${EMAIL:-}" ]; then
echo "EMAIL environment variable for Let's Encrypt is not found." 2>&1
exit 1
fi
if [ "x${AGREEMENT:-}" != "xyes" ]; then
echo "you should agree to Terms of Services on Let's Encrypt." 2>&1
exit 1
fi
{{ range $host, $container := groupByMulti $ "Env.VIRTUAL_HOST" "," }}
{{ if (ne $host "") }}
[ -d "/opt/data/{{ $host }}" ] || mkdir -p "/opt/data/{{ $host }}"
letsencrypt certonly \
--webroot \
--webroot-path "/opt/data/{{ $host }}" \
--email $EMAIL \
--domain "{{ $host }}" \
--agree-tos \
--non-interactive \
--keep \
${EXTRA_ARGS:-} \
&& sh /opt/app/run generate
{{ end }}
{{ end }}
|
<filename>src/views/Bridge.js
import React from 'react';
import Navbar from "./Navbar";
import BridgeData from "../bridgeData";
import '.././App.scss';
class Bridge extends React.Component {
render(){
const { match: { params } } = this.props;
const selectedBridge = BridgeData[params.bridgeId-1];
const logo = selectedBridge.sponsors.map(i=>{
return ( <div key={Math.random()*Math.random()} className="sponsorLogoImg"><img alt="Sponsora logo" src={require("../img/bridges/"+selectedBridge.orderId+"/sponsor"+i.id+".png")}/></div> );
})
let bridgeDesc = <div><span className="bridgeDescription" dangerouslySetInnerHTML={{__html: selectedBridge.description}}></span><br/></div>;
if(selectedBridge.description.length <1) bridgeDesc = "";
return (
<div className="container">
<div className="bridgeItem bridgeItem--no-padding">
<img src={require("../img/bridges/"+params.bridgeId+"/i.jpg")} className="bridgeViewImage" alt="Tilta attēls" />
<div className="paddingContainer">
<span className="bridgeIdLabel">{selectedBridge.orderId}</span>
<h1>{selectedBridge.title}</h1><br/>
{bridgeDesc}
<h3>Pasākumi šajā vietā</h3>
<span className="bridgeActivities" dangerouslySetInnerHTML={{__html: selectedBridge.activities}}></span>
<h3>Atbalstītāji</h3>
<div className="sponsorLogos">
{logo}
</div>
</div>
</div>
<Navbar simple="true" />
</div>
);
}
}
// function Bridges() {
// return (
// );
// }
export default Bridge;
|
#!/bin/sh
# Get parameters from the CLI.
getParamsFromCli()
{
USAGE="usage: ${0##*/} [-f <JSON File Name> | -m <\"Message\">] <Topic Name>"
echo
echo "# arguments called with ----> ${@} "
echo "# \$1 ----------------------> $1 "
echo "# \$2 ----------------------> $2 "
echo "# \$3 ----------------------> $3 "
echo "# \$4 ----------------------> $4 "
echo "# \$5 ----------------------> $5 "
echo "# path to me ---------------> ${0} "
echo "# parent path --------------> ${0%/*} "
echo "# my name ------------------> ${0##*/} "
echo
if [ $# -eq 0 ] ; then
echo "No arguments supplied - $USAGE"
exit 1
fi
if [ $# -ne 3 ] ; then
echo "Incorrect # of arguments - $USAGE"
exit 1
fi
while getopts "m::f::" opt "$@"; do
case $opt in
m)
#echo "-m was triggered, Parameter: $OPTARG" >&2
MESSAGE_FROM_CLI=$OPTARG
;;
f)
#echo "-f was triggered, Parameter: $OPTARG" >&2
JSON_MESSAGE_FILE_NAME_FROM_CLI=$OPTARG
;;
\?)
echo "Invalid option: -$OPTARG $USAGE" >&2
exit 1
;;
:)
echo "Option -$OPTARG requires an argument. $USAGE" >&2
exit 1
;;
*)
echo "Unimplemented option: -$OPTARG - $USAGE" >&2
exit 1
;;
esac
done
if [[ ! -z $MESSAGE_FROM_CLI ]] ; then
echo "Text Message from CLI = [$MESSAGE_FROM_CLI]"
fi
if [[ ! -z $JSON_MESSAGE_FILE_NAME_FROM_CLI ]] ; then
echo "JSON Message file name from CLI = [$JSON_MESSAGE_FILE_NAME_FROM_CLI]"
fi
if [[ -z $MESSAGE_FROM_CLI && -z $JSON_MESSAGE_FILE_NAME_FROM_CLI ]] ; then
echo "No message to publish - [$USAGE]"
fi
}
getParamsFromCli $@
echo
echo "# arguments called with ----> ${@} "
echo "# \$1 ----------------------> $1 "
echo "# \$2 ----------------------> $2 "
echo "# \$3 ----------------------> $3 "
echo "# \$4 ----------------------> $4 "
echo "# \$5 ----------------------> $5 "
echo "# path to me ---------------> ${0} "
echo "# parent path --------------> ${0%/*} "
echo "# my name ------------------> ${0##*/} "
echo
exit 0
|
<gh_stars>1-10
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { BaseAction } from './base_action';
import { ACTION_TYPES, ERROR_CODES } from '../../../common/constants';
import { i18n } from '@kbn/i18n';
export class PagerDutyAction extends BaseAction {
constructor(props, errors) {
props.type = ACTION_TYPES.PAGERDUTY;
super(props, errors);
this.description = props.description;
}
// To Kibana
get downstreamJson() {
const result = super.downstreamJson;
Object.assign(result, {
description: this.description,
});
return result;
}
// From Kibana
static fromDownstreamJson(json) {
const props = super.getPropsFromDownstreamJson(json);
const { errors } = this.validateJson(json);
Object.assign(props, {
description: json.description,
});
const action = new PagerDutyAction(props, errors);
return { action, errors };
}
// To Elasticsearch
get upstreamJson() {
const result = super.upstreamJson;
result[this.id] = {
pagerduty: {
description: this.description,
},
};
return result;
}
// From Elasticsearch
static fromUpstreamJson(json) {
const props = super.getPropsFromUpstreamJson(json);
const { errors } = this.validateJson(json.actionJson);
Object.assign(props, {
description: json.actionJson.pagerduty.description,
});
const action = new PagerDutyAction(props, errors);
return { action, errors };
}
static validateJson(json) {
const errors = [];
if (!json.pagerduty) {
errors.push({
code: ERROR_CODES.ERR_PROP_MISSING,
message: i18n.translate(
'xpack.watcher.models.pagerDutyAction.actionJsonPagerDutyPropertyMissingBadRequestMessage',
{
defaultMessage: 'JSON argument must contain an {actionJsonPagerDuty} property',
values: {
actionJsonPagerDuty: 'actionJson.pagerduty',
},
}
),
});
}
if (json.pagerduty && !json.pagerduty.description) {
errors.push({
code: ERROR_CODES.ERR_PROP_MISSING,
message: i18n.translate(
'xpack.watcher.models.pagerDutyAction.actionJsonPagerDutyDescriptionPropertyMissingBadRequestMessage',
{
defaultMessage: 'JSON argument must contain an {actionJsonPagerDutyText} property',
values: {
actionJsonPagerDutyText: 'actionJson.pagerduty.description',
},
}
),
});
}
return { errors: errors.length ? errors : null };
}
}
|
<filename>structBytes/StructToBytes.go
package structBytes
import (
"bytes"
"encoding/binary"
"reflect"
)
func (o *object) Write(obj interface{}) bool {
o.Buffer = &bytes.Buffer{}
return o.WriteValue(obj, 0)
}
func (o *object) WriteValue(obj interface{}, depth int) (ok bool) {
v := reflect.ValueOf(obj)
switch v.Kind() {
case reflect.Interface:
case reflect.Ptr:
if !o.WriteValue(v.Elem().Interface(), depth+1) {
return
}
case reflect.Struct:
l := v.NumField()
for i := 0; i < l; i++ {
if !o.WriteValue(v.Field(i).Interface(), depth+1) {
return
}
}
case reflect.Slice, reflect.Array:
l := v.Len()
for i := 0; i < l; i++ {
if !o.WriteValue(v.Index(i).Interface(), depth+1) {
return
}
}
case reflect.Int:
i := int32(obj.(int))
if int(i) != obj.(int) {
return c.Error("Int does not fit into int32")
}
if !c.Error(binary.Write(o.Buffer, binary.BigEndian, i)) {
return
}
case reflect.Bool:
b := uint8(0)
if v.Bool() {
b = 1
}
if !c.Error(binary.Write(o.Buffer, binary.BigEndian, b)) {
return
}
default:
return c.Error(binary.Write(o.Buffer, binary.BigEndian, obj))
}
return true
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.