text stringlengths 1 1.05M |
|---|
#!/bin/bash -xp
##!/bin/bash
###################
# Script: scan_ports.sh
# Autor: rolmedo
# Fecha: 01/02/19
# Función: Leer los ficheros donde tenemos todas las ip públicas de nuestros
# equipos y clientes para poder una serie de escaneos de puertos
# abiertos. Estamos estudiando la mejor manera de escanear puertos ya
# que la suma de estos tanto UDP como TCP supera los 120.000.
#
###################################################################################
ArrayFicherosEquipos=" equipos_185.32.28.112_01-02-19.txt
equipos_217.13.124.128_01-02-19.txt
equipos_185.32.28.128_01-02-19.txt
equipos_217.13.124.192_01-02-19.txt
equipos_212.92.39.0_01-02-19.txt
equipos_217.13.124.64_01-02-19.txt
equipos_212.92.55.0_01-02-19.txt
equipos_94.24.114.0_01-02-19.txt
equipos_217.13.118.96_01-02-19.txt
equipos_217.13.119.0_01-02-19.txt
equipos_217.13.124.0_01-02-19.txt"
#ArrayFicherosEquipos="equipos_185.32.28.112_01-02-19.txt equipos_185.32.28.128_01-02-19.txt"
#ArrayFicherosEquipos="equipos_185.32.28.112_01-02-19.txt"
startTime=`date +%H:%M`
dateToday=`date +%d_%m_%Y`
path_files="/root/scripts/scan_ports/"
function parse_name_file_for_directory {
namefile=$1
namedirectory=`echo "$namefile" | awk -F"_" '{ print $2 }'`
echo $namedirectory
}
function scan_machines_topPorts {
ficheroTXT=$1
if [ ! -d "topPorts" ];
then
#directorio no existe
mkdir -p "topPorts"
cd "topPorts"
else
cd "topPorts"
fi
while read machine;
do
echo "Inicio SCAN $machine" > "$machine"_"$dateToday".txt;
startTime=`date +%H:%M`
echo "Hora de Inicio Scan: $startTime" > "$machine"_"$dateToday".txt;
nmap --top-ports 25 $machine >> "$machine"_"$dateToday".txt;
#nmap -sS $machine >> "$machine"_"$dateToday".txt;
finishTime=`date +%H:%M`
echo "Hora fin Scan: $finishTime" >> "$machine"_"$dateToday".txt;
done < $path_files$ficheroTXT;
}
function scan_machines_10k-20k {
ficheroTXT=$1
if [ ! -d "10k-20k" ];
then
#directorio no existe
mkdir -p "10k-20k"
cd "10k-20k"
else
cd "10k-20k"
fi
while read machine;
do
echo "Inicio SCAN $machine" > "$machine"_"$dateToday".txt;
startTime=`date +%H:%M`
echo "Hora de Inicio Scan: $startTime" > "$machine"_"$dateToday".txt;
nmap -p 10000-20000 -T5 $machine >> "$machine"_"$dateToday".txt;
finishTime=`date +%H:%M`
echo "Hora fin Scan: $finishTime" >> "$machine"_"$dateToday".txt;
done < $path_files$ficheroTXT;
}
function scan_machines_20k-30k {
ficheroTXT=$1
if [ ! -d "20k-30k" ];
then
#directorio no existe
mkdir -p "20k-30k"
cd "20k-30k"
else
cd "20k-30k"
fi
while read machine;
do
echo "Inicio SCAN $machine" > "$machine"_"$dateToday".txt;
startTime=`date +%H:%M`
echo "Hora de Inicio Scan: $startTime" > "$machine"_"$dateToday".txt;
nmap -p 20000-30000 -T5 $machine >> "$machine"_"$dateToday".txt;
finishTime=`date +%H:%M`
echo "Hora fin Scan: $finishTime" >> "$machine"_"$dateToday".txt;
done < $path_files$ficheroTXT;
}
function scan_machines_30k-40k {
ficheroTXT=$1
if [ ! -d "30k-40k" ];
then
#directorio no existe
mkdir -p "30k-40k"
cd "30k-40k"
else
cd "30k-40k"
fi
while read machine;
do
echo "Inicio SCAN $machine" > "$machine"_"$dateToday".txt;
startTime=`date +%H:%M`
echo "Hora de Inicio Scan: $startTime" > "$machine"_"$dateToday".txt;
nmap -p 30000-40000 -T5 $machine >> "$machine"_"$dateToday".txt;
finishTime=`date +%H:%M`
echo "Hora fin Scan: $finishTime" >> "$machine"_"$dateToday".txt;
done < $path_files$ficheroTXT;
}
function scan_machines_40k-50k {
ficheroTXT=$1
if [ ! -d "40k-50k" ];
then
#directorio no existe
mkdir -p "40k-50k"
cd "40k-50k"
else
cd "40k-50k"
fi
while read machine;
do
echo "Inicio SCAN $machine" > "$machine"_"$dateToday".txt;
startTime=`date +%H:%M`
echo "Hora de Inicio Scan: $startTime" > "$machine"_"$dateToday".txt;
nmap -p 40000-50000 -T5 $machine >> "$machine"_"$dateToday".txt;
finishTime=`date +%H:%M`
echo "Hora fin Scan: $finishTime" >> "$machine"_"$dateToday".txt;
done < $path_files$ficheroTXT;
}
function scan_machines_50k-60k {
ficheroTXT=$1
if [ ! -d "50k-60k" ];
then
#directorio no existe
mkdir -p "50k-60k"
cd "50k-60k"
else
cd "50k-60k"
fi
while read machine;
do
echo "Inicio SCAN $machine" > "$machine"_"$dateToday".txt;
startTime=`date +%H:%M`
echo "Hora de Inicio Scan: $startTime" > "$machine"_"$dateToday".txt;
nmap -p 50000-60000 -T5 $machine >> "$machine"_"$dateToday".txt;
finishTime=`date +%H:%M`
echo "Hora fin Scan: $finishTime" >> "$machine"_"$dateToday".txt;
done < $path_files$ficheroTXT;
}
function scan_machines_60k-65k {
ficheroTXT=$1
if [ ! -d "60k-65k" ];
then
#directorio no existe
mkdir -p "60k-65k"
cd "60k-65k"
else
cd "60k-65k"
fi
while read machine;
do
echo "Inicio SCAN $machine" > "$machine"_"$dateToday".txt;
startTime=`date +%H:%M`
echo "Hora de Inicio Scan: $startTime" > "$machine"_"$dateToday".txt;
nmap -p 60000-65535 -T5 $machine >> "$machine"_"$dateToday".txt;
finishTime=`date +%H:%M`
echo "Hora fin Scan: $finishTime" >> "$machine"_"$dateToday".txt;
done < $path_files$ficheroTXT;
}
#BUCLE PRINCIPAL
for fichero in $ArrayFicherosEquipos;
do
directorio=`parse_name_file_for_directory $fichero`
if [ ! -d "$path_files"/"$directorio" ];
then
#directorio no existe
mkdir -p "$path_files"/"$directorio"
cd "$path_files"/"$directorio"
else
cd "$path_files"/"$directorio"
fi
#Scan puertos principales
scan_machines_topPorts $fichero
cd ../
#Scan rango 10000-20000
scan_machines_10k-20k $fichero
cd ../
#Scan rango 20000-30000
scan_machines_20k-30k $fichero
cd ../
#Scan rango 30000-40000
scan_machines_30k-40k $fichero
cd ../
#Scan rango 40000-50000
scan_machines_40k-50k $fichero
cd ../
#Scan rango 50000-60000
scan_machines_50k-60k $fichero
cd ../
#Scan rango 60000-65535
scan_machines_60k-65k $fichero
done
finishTime=`date +%H:%M`
echo "Scaneo con fecha de $dateToday";
echo "Tiempo de ejecucción"
echo "Inicio: $startTime"
echo "Fin: $finishTime"
|
lans=("bn" "de" "es" "en" "fa" "hi" "ko" "nl" "ru" "tr" "zh")
for i in "${lans[@]}"
do
python -u parse_text.py --lan "${i}" &> log/${i}_parse_text.log &
done
|
#!/bin/sh
# Copyright (c) 2014-2016 The Eurodollar Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
DIR=$(dirname "$0")
[ "/${DIR#/}" != "$DIR" ] && DIR=$(dirname "$(pwd)/$0")
echo "Using verify-commits data from ${DIR}"
VERIFIED_ROOT=$(cat "${DIR}/trusted-git-root")
VERIFIED_SHA512_ROOT=$(cat "${DIR}/trusted-sha512-root-commit")
REVSIG_ALLOWED=$(cat "${DIR}/allow-revsig-commits")
HAVE_GNU_SHA512=1
[ ! -x "$(which sha512sum)" ] && HAVE_GNU_SHA512=0
if [ x"$1" = "x" ]; then
CURRENT_COMMIT="HEAD"
else
CURRENT_COMMIT="$1"
fi
if [ "${CURRENT_COMMIT#* }" != "$CURRENT_COMMIT" ]; then
echo "Commit must not contain spaces?" > /dev/stderr
exit 1
fi
VERIFY_TREE=0
if [ x"$2" = "x--tree-checks" ]; then
VERIFY_TREE=1
fi
NO_SHA1=1
PREV_COMMIT=""
INITIAL_COMMIT="${CURRENT_COMMIT}"
while true; do
if [ "$CURRENT_COMMIT" = $VERIFIED_ROOT ]; then
echo "There is a valid path from \"$INITIAL_COMMIT\" to $VERIFIED_ROOT where all commits are signed!"
exit 0
fi
if [ "$CURRENT_COMMIT" = $VERIFIED_SHA512_ROOT ]; then
if [ "$VERIFY_TREE" = "1" ]; then
echo "All Tree-SHA512s matched up to $VERIFIED_SHA512_ROOT" > /dev/stderr
fi
VERIFY_TREE=0
NO_SHA1=0
fi
if [ "$NO_SHA1" = "1" ]; then
export BITCOIN_VERIFY_COMMITS_ALLOW_SHA1=0
else
export BITCOIN_VERIFY_COMMITS_ALLOW_SHA1=1
fi
if [ "${REVSIG_ALLOWED#*$CURRENT_COMMIT}" != "$REVSIG_ALLOWED" ]; then
export BITCOIN_VERIFY_COMMITS_ALLOW_REVSIG=1
else
export BITCOIN_VERIFY_COMMITS_ALLOW_REVSIG=0
fi
if ! git -c "gpg.program=${DIR}/gpg.sh" verify-commit "$CURRENT_COMMIT" > /dev/null; then
if [ "$PREV_COMMIT" != "" ]; then
echo "No parent of $PREV_COMMIT was signed with a trusted key!" > /dev/stderr
echo "Parents are:" > /dev/stderr
PARENTS=$(git show -s --format=format:%P $PREV_COMMIT)
for PARENT in $PARENTS; do
git show -s $PARENT > /dev/stderr
done
else
echo "$CURRENT_COMMIT was not signed with a trusted key!" > /dev/stderr
fi
exit 1
fi
# We always verify the top of the tree
if [ "$VERIFY_TREE" = 1 -o "$PREV_COMMIT" = "" ]; then
IFS_CACHE="$IFS"
IFS='
'
for LINE in $(git ls-tree --full-tree -r "$CURRENT_COMMIT"); do
case "$LINE" in
"12"*)
echo "Repo contains symlinks" > /dev/stderr
IFS="$IFS_CACHE"
exit 1
;;
esac
done
IFS="$IFS_CACHE"
FILE_HASHES=""
for FILE in $(git ls-tree --full-tree -r --name-only "$CURRENT_COMMIT" | LC_ALL=C sort); do
if [ "$HAVE_GNU_SHA512" = 1 ]; then
HASH=$(git cat-file blob "$CURRENT_COMMIT":"$FILE" | sha512sum | { read FIRST _; echo $FIRST; } )
else
HASH=$(git cat-file blob "$CURRENT_COMMIT":"$FILE" | shasum -a 512 | { read FIRST _; echo $FIRST; } )
fi
[ "$FILE_HASHES" != "" ] && FILE_HASHES="$FILE_HASHES"'
'
FILE_HASHES="$FILE_HASHES$HASH $FILE"
done
if [ "$HAVE_GNU_SHA512" = 1 ]; then
TREE_HASH="$(echo "$FILE_HASHES" | sha512sum)"
else
TREE_HASH="$(echo "$FILE_HASHES" | shasum -a 512)"
fi
HASH_MATCHES=0
MSG="$(git show -s --format=format:%B "$CURRENT_COMMIT" | tail -n1)"
case "$MSG -" in
"Tree-SHA512: $TREE_HASH")
HASH_MATCHES=1;;
esac
if [ "$HASH_MATCHES" = "0" ]; then
echo "Tree-SHA512 did not match for commit $CURRENT_COMMIT" > /dev/stderr
exit 1
fi
fi
PARENTS=$(git show -s --format=format:%P "$CURRENT_COMMIT")
for PARENT in $PARENTS; do
PREV_COMMIT="$CURRENT_COMMIT"
CURRENT_COMMIT="$PARENT"
break
done
done
|
cat > kubernetes-csr.json <<EOF
{
"CN": "kubernetes",
"key": {
"algo": "rsa",
"size": 2048
},
"names": [
{
"C": "FI",
"L": "Tampere",
"O": "Kubernetes",
"OU": "Kubernetes The Hard Way",
"ST": "Pыrkanmaa"
}
]
}
EOF
CTRL_1_IP=$(grep controller_1_private_ip ../../infra/data.txt | awk -F ' = ' '{print $2}')
CTRL_2_IP=$(grep controller_2_private_ip ../../infra/data.txt | awk -F ' = ' '{print $2}')
CTRL_3_IP=$(grep controller_3_private_ip ../../infra/data.txt | awk -F ' = ' '{print $2}')
API_PUBLIC_NAME=$(grep api_public_dns_name ../../infra/data.txt | awk -F ' = ' '{print $2}')
cfssl gencert \
-ca=../ca/ca.pem \
-ca-key=../ca/ca-key.pem \
-config=../ca/ca-config.json \
-hostname=10.32.0.1,${API_PUBLIC_NAME},${CTRL_1_IP},${CTRL_2_IP},${CTRL_3_IP},127.0.0.1,kubernetes.default,kubernetes.default.svc,kubernetes.default.svc.cluster,kubernetes.default.svc.cluster.local \
-profile=kubernetes \
kubernetes-csr.json | cfssljson -bare kubernetes |
#!/usr/bin/env bash
# Copyright 2013 The Go Authors. All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
# race.bash tests the standard library under the race detector.
# http://golang.org/doc/articles/race_detector.html
set -e
function usage {
echo 'race detector is only supported on linux/amd64, freebsd/amd64 and darwin/amd64' 1>&2
exit 1
}
case $(uname) in
"Darwin")
# why Apple? why?
if sysctl machdep.cpu.extfeatures | grep -qv EM64T; then
usage
fi
;;
"Linux")
if [ $(uname -m) != "x86_64" ]; then
usage
fi
;;
"FreeBSD")
if [ $(uname -m) != "amd64" ]; then
usage
fi
;;
*)
usage
;;
esac
if [ ! -f make.bash ]; then
echo 'race.bash must be run from $GOROOT/src' 1>&2
exit 1
fi
. ./make.bash --no-banner
# golang.org/issue/5537 - we must build a race enabled cmd/cgo before trying to use it.
go install -race cmd/cgo
go install -race std
# we must unset GOROOT_FINAL before tests, because runtime/debug requires
# correct access to source code, so if we have GOROOT_FINAL in effect,
# at least runtime/debug test will fail.
unset GOROOT_FINAL
go test -race -short std
go test -race -run=nothingplease -bench=.* -benchtime=.1s -cpu=4 std
|
(function() {
'use strict';
angular
.module('naps')
.controller('NapsListController', NapsListController);
NapsListController.$inject = ['NapsService', 'Authentication', '$window', '$state'];
function NapsListController(NapsService, Authentication, $window, $state) {
var vm = this;
vm.user = Authentication.user;
vm.diplos = NapsService.query();
vm.naps = [];
vm.assis = [];
vm.diplos.$promise.then(function(result) {
result.forEach(function(value) {
if (value.typ[0] === "NAP") {
vm.naps.push(value)
}
if (value.typ[0] === "Assi") {
vm.assis.push(value)
}
})
})
vm.canEdit = vm.user.roles[0] === ("admin" || "leader")
vm.remove = remove;
function remove(nap) {
if ($window.confirm('Are you sure you want to delete?')) {
nap.$remove($state.go('naps.list'));
}
}
}
}());
|
async function processJishoData(query: string, page: number): Promise<Array<{ jlpt: Record<string, unknown> }>> {
try {
const response = await fetchFromJisho(query, page);
const json = await getJson(response) as JishoJSON;
const extractedData = json.data.map(entry => ({ jlpt: entry.jlpt }));
return extractedData;
} catch (error) {
// Handle and/or log the error
throw new Error('Failed to process Jisho data');
}
} |
from urllib.parse import urlparse
def parse_admin_url(url_prefix):
parsed_url = urlparse(url_prefix)
if parsed_url.netloc: # Absolute URL
return f"{parsed_url.scheme}://{parsed_url.netloc}/"
else: # Relative URL
return url_prefix |
//Program to print all the prime numbers less than a given element
#include<stdio.h>
main()
{
int i,n,p,sum;
printf("Enter a number!\n");
scanf("%d",&p);
for(n=2;n<=p;n++)
{
sum=0;
for(i=2;i<=n/2;i++)
{
if(n%i==0)
sum=1;
}
if(sum==0)
printf("%d\t",n);
}
}
|
<reponame>pladdy/distill
package main
import (
"strings"
"testing"
"github.com/pladdy/lumberjack"
)
var rawRecord []string = []string{
"TABLE_DUMP2",
"1474983369",
"B",
"172.16.31.10",
"8758",
"0.0.0.0/0",
"8758 6830",
"IGP",
"172.16.31.10",
"0",
"0",
"8758:110 8758:300",
"NAG",
"",
}
// Helper
func rebuildRecord(pieces []string) string {
return strings.Join(pieces, joinString)
}
func TestExpandASPath(t *testing.T) {
lumberjack.Hush()
var tests = []struct {
input string
expected string
}{
{input: "1234 5678 {357,2124}", expected: "1234 5678 357 2124"},
{input: "1234 5678 {357}", expected: "1234 5678 357"},
}
for _, test := range tests {
result := expandASPath(test.input)
if result != test.expected {
t.Error("Expected:", test.expected, "Got:", result)
}
}
}
func TestMarshallBGP(t *testing.T) {
asn := lastString(strings.Split(rawRecord[pathIndex], " "))
// generate result
result, err := marshalBGP(asn, strings.Join(rawRecord, "|"))
if err != nil {
t.Error("Failed to create a JSON record")
}
resultToString := string(result)
expected := `{
"AutonomousSystem": 6830,
"AutonomousSystemPaths": [
{
"ModificationTime": 1474983369,
"FromIP": "172.16.31.10",
"FromASN": 8758,
"Prefix": "0.0.0.0/0",
"AutonomousSystemPath": [
8758,
6830
]
}
],
"Prefixes": [
"0.0.0.0/0"
]
}`
if resultToString != expected {
t.Error("Got:", resultToString, "Expected:", expected)
}
}
func TestSystemPaths(t *testing.T) {
var tests = []struct {
records string
expected []seenASPath
}{
{rebuildRecord(rawRecord),
[]seenASPath{seenASPath{1474983369, "172.16.31.10", 8758, "0.0.0.0/0", []int{8758, 6830}}}},
}
for _, test := range tests {
results := systemPaths(test.records)
for i, result := range results {
expected := test.expected[i]
if result.ModificationTime != expected.ModificationTime {
t.Error("Got:", result.ModificationTime, "Expected:", expected.ModificationTime)
}
if result.FromIP != expected.FromIP {
t.Error("Got:", result.FromIP, "Expected:", expected.FromIP)
}
if result.FromASN != expected.FromASN {
t.Error("Got:", result.FromASN, "Expected:", expected.FromASN)
}
if result.Prefix != expected.Prefix {
t.Error("Got:", result.Prefix, "Expected:", expected.Prefix)
}
for i, asn := range result.AutonomousSystemPath {
if asn != expected.AutonomousSystemPath[i] {
t.Error("Got:", asn, "Expected:", expected.AutonomousSystemPath[i])
}
}
}
}
}
func TestUniquePrefixes(t *testing.T) {
testRecords := strings.Join(rawRecord, "|")
testRecords += "\n" + strings.Join(rawRecord, "|")
uniqueCIDRs := uniquePrefixes(testRecords)
if len(uniqueCIDRs) > 1 {
t.Error("Expected length to be 1")
}
if uniqueCIDRs[0] != rawRecord[prefixIndex] {
t.Error("Expected:", rawRecord[prefixIndex], "Got:", uniqueCIDRs[0])
}
}
|
// https://www.hackerrank.com/challenges/get-the-value-of-the-node-at-a-specific-position-from-the-tail
int GetNode(Node *head,int positionFromTail) {
Node *current = head;
for(int i = 0; i < positionFromTail; i++) current = current->next;
if (current->next == NULL) return head->data;
return GetNode(head->next, positionFromTail);
}
|
def iterative_function(n):
result = 1
for i in range(2,n + 1):
result = result * i
return result |
package com.wednesday.service;
import java.sql.*;
import java.util.Properties;
public class CSVHandler { // Part 4
private static final String CSV_JDBC_DRIVER = "org.relique.jdbc.csv.CsvDriver";
private static final String CSV_JDBC_HEADER = "jdbc:relique:csv:";
public static final String DEFAULT_DIRECTORY = "/Users/lihangzhou/Documents";
public static String parse(final String csvName) throws ClassNotFoundException, SQLException {
return parse(DEFAULT_DIRECTORY, csvName);
}
/**
* parse
*
* @param csvDirectory String CSV file directory
* @param csvName String CSV filename (no suffix)
*/
public static String parse(final String csvDirectory, final String csvName) throws ClassNotFoundException, SQLException {
Class.forName(CSV_JDBC_DRIVER);
final Properties props = new Properties();
props.put("separator", ",");
props.put("suppressHeaders", "false");
props.put("fileExtension", ".csv");
props.put("charset", "UTF-8");
final Connection conn = DriverManager.getConnection(CSV_JDBC_HEADER + csvDirectory, props);
final Statement stmt = conn.createStatement();
final ResultSet results =
stmt.executeQuery("SELECT * FROM " + csvName);
final ResultSetMetaData mtd = results.getMetaData();
final int COL_NUM = mtd.getColumnCount();
StringBuilder str = new StringBuilder();
str.append("<thead><tr>");
for (int i = 1; i <= COL_NUM; i++) {
str.append("<td>").append(mtd.getColumnName(i)).append("</td>");
}
str.append("</tr></thead>");
str.append("<tbody>");
while (results.next()) {
str.append("<tr>");
for (int i = 1; i <= COL_NUM; i++) {
str.append("<td>").append(results.getString(i)).append("</td>");
}
str.append("</tr>");
}
str.append("</tbody>");
// close
results.close();
stmt.close();
conn.close();
return str.toString();
}
}
|
import numpy as np
# Create a 3x3 array with all elements equal to 0
array = np.zeros((3,3))
print(array) # Output:
# [[0. 0. 0.]
# [0. 0. 0.]
# [0. 0. 0.]] |
"""
File: rocket.py
-----------------------
This program should implement a console program
that draws ASCII art - a rocket.
The size of rocket is determined by a constant
defined as SIZE at top of the file.
Output format should match what is shown in the sample
run in the Assignment 2 Handout.
"""
SIZE = 5
def main():
"""
the program will print a rocket which the size is determined by a constant"SIZE"
"""
# head(SIZE)
# belt(SIZE)
# upper(SIZE)
# lower(SIZE)
# belt(SIZE)
# head(SIZE)
#
# def head(SIZE):
# """
# :param SIZE: int
# :return: print the result
# """
# for i in range(SIZE):
# for j in range(SIZE-i):
# print(' ', end='')
# for j in range(i+1):
# print('/', end='')
# for j in range(i+1):
# print('\\', end='')
# print('')
#
#
# def belt(SIZE):
# """
# :param SIZE: int
# :return: print the result
# """
# print('+', end='')
# for i in range(SIZE*2):
# print('=', end='')
# print('+', end='')
# print(' ')
#
#
# def upper(SIZE):
# """
# :param SIZE: int
# :return: print the result
# """
# for i in range(SIZE):
# print('|', end='')
# for j in range(SIZE-i-1):
# print('.', end='')
# for j in range(i+1):
# print('/\\', end='')
# for j in range(SIZE-i-1):
# print('.', end='')
# print('|', end='')
# print(' ')
#
#
# def lower(SIZE):
# """
# :param SIZE: int
# :return: print the result
# """
# for i in range(SIZE):
# print('|', end='')
# for j in range(i):
# print('.', end='')
# for j in range(SIZE-i):
# print('\\/', end='')
# for j in range(i):
# print('.', end='')
# print('|', end='')
# print(' ')
###### DO NOT EDIT CODE BELOW THIS LINE ######
s = input('Please enter: ')
name_diamond(s)
def name_diamond(s):
for i in range(len(s)):
print(s[0:1+i])
for j in range(len(s)-1):
for k in range(j+1):
print('', end=" ")
print(s[j+1:])
if __name__ == "__main__":
main() |
#!/bin/bash
# Extract the version from lerna.json (this was updated by `npm run release:prepare`)
VERSION=$(node --eval "console.log(require('./lerna.json').version);")
# commit the changes from `npm run release:prepare`
git add --all
git commit -am "v$VERSION" --no-verify
# increment the package.json version to the lerna version so gh-release works
npm version $VERSION --allow-same-version --no-git-tag-version
# amend the changes from `npm version` to the release commit
git add --all
git commit -am "v$VERSION" --no-verify --amend
# tag this version
git tag v$VERSION
# push everything up to this point to master
git push -f https://github.com/Esri/hub.js.git master
git push -f --tags
# publish each package on npm
lerna publish --skip-git --yes --repo-version $VERSION --force-publish=*
# create a ZIP archive of the dist files
TEMP_FOLDER=hub-js-v$VERSION;
mkdir $TEMP_FOLDER
cp packages/*/dist/umd/* $TEMP_FOLDER
zip -r $TEMP_FOLDER.zip $TEMP_FOLDER
rm -rf $TEMP_FOLDER
# Run gh-release to create a new release with our changelog changes and ZIP archive
gh-release --t v$VERSION --repo hub.js --owner Esri -a $TEMP_FOLDER.zip
# Delete the ZIP archive
rm $TEMP_FOLDER.zip
|
#!/bin/bash
#
# Auto update themer version
main() {
program="themer"
script="$program.sh"
new_version="$program $(git describe --abbrev=0 --tags)\""
current_version=$(grep -oP "$program\sv.*" "$script")
sed -i "s/$current_version/$new_version/" "$script"
}
main "$@"
|
from flask import Flask, request
app = Flask(__name__)
@app.route("/calculate", methods=["POST"])
def calculate():
# Get the request data
data = request.get_json()
# Get the two numbers from the request data
a = data.get("a")
b = data.get("b")
# Calculate the sum, difference, and product
sum = a + b
difference = a - b
product = a * b
# Return the result
return f"sum: {sum}\ndifference: {difference}\nproduct: {product}"
app.run() |
/**渲染器接口 */
export default interface Render{
/**渲染器缩写名字 */
abbrName: string;
/**创建渲染器的根节点 */
createDom: () => HTMLElement;
/**打开渲染器 */
open: () => void;
/**关闭渲染器 */
close: () => void;
/**获取markdow文本 */
getMd: () => string;
/**
* 设置markdow文本
* @param md markdown文本
*/
setMd: (md: string) => void;
/**添加所有事件 */
attachAllEvent: () => void;
/**移除所有事件 */
detachAllEvent: () => void;
} |
<reponame>cdjq/DFRobot_DS323X<filename>Python/RaspberryPi/examples/read_write_SRAM.py
#-*- coding: utf-8 -*-
'''
@file get_time_and_temp.py
@brief Through the example, you can read and write data on DS3232's SRAM
@n Experiment phenomenon: There are 236 bytes of SRAM available for reading and writing
@n The address of SRAM is 0x14~0xFF
@Copyright Copyright (c) 2010 DFRobot Co.Ltd (http://www.dfrobot.com)
@licence The MIT License (MIT)
@author [LuoYufeng](<EMAIL>)
@url https://github.com/DFRobot/DFRobot_DS323X
@version V1.0
@date 2021-3-4
'''
import sys
sys.path.append('../')
import time
from DFRobot_DS323X import *
rtc = DFRobot_DS323X(bus=1)
#begin return True if succeed, otherwise return False
while not rtc.begin():
time.sleep(2)
i = 0
data1 = [22,1,15,78,65,49,56,49,25,47,89]
'''
@brief clear the SRAM
'''
for reg in range(0x14,0x1F):
rtc.clear_SRAM(reg)
'''
@brief write data into the SRAM
@param reg, address of SRAM 0x14~0xFF
@param data
'''
for reg in range(0x14,0x1F):
rtc.write_SRAM(reg, data1[i])
i += 1
'''
@brief read data of the SRAM
@param reg, address of SRAM 0x14~0xFF
@return data stored in SRAM
'''
def main():
while True:
for reg in range(0x14,0x1F):
print(rtc.read_SRAM(reg))
time.sleep(0.1)
if __name__ == "__main__":
main()
|
<filename>python_synth/validators.py
from typing import TYPE_CHECKING
from python_synth.constants import ANALOGUE_MIN, ANALOGUE_MAX
from python_synth.exceptions import SynthValidationError
if TYPE_CHECKING:
from attr import Attribute # noqa
from typing import ANY # noqa
def validate_analogue(instance, attribute, value):
# type: (Any, Attribute, int)
if value < ANALOGUE_MIN or value > ANALOGUE_MAX:
raise SynthValidationError(
'analogue values must be between {ANALOGUE_MIN} and {ANALOGUE_MAX}'
.format(**locals())
)
def validate_milliseconds(instance, attribute, value):
# type: (Any, attr.Attribute, int)
if value < 0:
raise SynthValidationError('milliseconds must be positive integers')
|
class Point2D:
def __init__(self, x, y):
self.x = x
self.y = y
def get_distance(self, other):
dx = self.x - other.x
dy = self.y - other.y
return (dx ** 2 + dy ** 2) ** 0.5 |
import connector from './EventInviteDialog.connector'
import component from './EventInviteDialog'
export default connector(component)
|
# This function takes three parameter and returns a list
# of anomalies it has detected
def identify_anomalies(data, mean, stdev):
anomalies = []
# Iterate through the list and find anomalies
for i in data:
z_score = (i - mean)/stdev
if np.abs(z_score) > 3:
anomalies.append(i)
return anomalies |
const { describe, it } = require('eslint/lib/testers/event-generator-tester');
const { before, after } = require('mocha');
const expect = require('expect.js');
const sinon = require('sinon');
const request = require('supertest-as-promised');
const httpStatus = require('http-status');
const EventsService = require('../../../app/services/events.service');
const ConsulService = require('../../../app/services/consul.service');
const PermissionModel = require('../../../app/models/permission.model');
const PROJECTS = require('../../fixtures/projects.json');
const app = require('../../../server').app;
const loginHelpers = require('../../helpers/login');
const USER = require('../../fixtures/user.json');
describe('ConsulController', () => {
let token = null;
let eventStub = null;
before((done) => {
sinon.stub(PermissionModel, 'getUserProjects').resolves(PROJECTS);
loginHelpers.createUser(USER)
.then(user => loginHelpers.getJWT(user.username))
.then(jwt => {
token = jwt;
done();
});
});
after((done) => {
PermissionModel.getUserProjects.restore();
loginHelpers.deleteUser(USER.username)
.then(() => {
token = null;
done();
});
});
beforeEach(() => {
eventStub = sinon.stub(EventsService, 'publish').resolves('published');
});
afterEach(() => {
EventsService.publish.restore();
});
describe('set()', () => {
it('should return unauthorized status', (done) => {
const stub = sinon.stub(ConsulService, 'set').resolves(true);
request(app)
.put('/consul/kv/CLOUD/console-server')
.expect(httpStatus.UNAUTHORIZED)
.then(() => {
expect(eventStub.called).to.be(false);
stub.restore();
done();
})
.catch((err) => {
stub.restore();
done(err);
});
});
it('should return 401 status', (done) => {
const stub = sinon.stub(ConsulService, 'set').rejects('error');
request(app)
.put('/consul/kv/CLOUD/console-server')
.set('token', token)
.expect(httpStatus.INTERNAL_SERVER_ERROR)
.then(() => {
expect(eventStub.called).to.be(false);
stub.restore();
done();
})
.catch((err) => {
stub.restore();
done(err);
});
});
it('should return 401 status if passed the empty key`', (done) => {
const stub = sinon.stub(ConsulService, 'set').resolves(true);
request(app)
.put('/consul/kv/')
.set('token', token)
.expect(httpStatus.UNAUTHORIZED)
.then(() => {
expect(eventStub.called).to.be(false);
stub.restore();
done();
})
.catch((err) => {
stub.restore();
done(err);
});
});
it('should return 500 status if passed secret key', (done) => {
const stub = sinon.stub(ConsulService, 'set').resolves(true);
request(app)
.put('/consul/kv/CLOUD/console-server/.secret')
.set('token', token)
.expect(httpStatus.INTERNAL_SERVER_ERROR)
.then(() => {
expect(eventStub.called).to.be(false);
stub.restore();
done();
})
.catch((err) => {
stub.restore();
done(err);
});
});
it('should set value to null if passed a folder key/, and publish event', (done) => {
const stub = sinon.stub(ConsulService, 'set').resolves(true);
request(app)
.put('/consul/kv/CLOUD/console-server/')
.send({ config: 'Value!' })
.set('token', token)
.expect(httpStatus.OK)
.then(() => {
expect(stub.getCall(0).args).to.eql(['CLOUD/console-server/', null]);
expect(eventStub.called).to.be(true);
expect(eventStub.getCall(0).args.length).to.eql(1);
expect(eventStub.getCall(0).args[0].where).to.eql('console');
expect(eventStub.getCall(0).args[0].source).to.eql('PUT /consul/kv/CLOUD/console-server/');
expect(eventStub.getCall(0).args[0].namespace).to.eql(undefined);
expect(eventStub.getCall(0).args[0].what).to.eql('config CLOUD/console-server/');
expect(eventStub.getCall(0).args[0].type).to.eql('updated');
expect(eventStub.getCall(0).args[0].description).to.eql('<NAME> updated config CLOUD/console-server/');
expect(eventStub.getCall(0).args[0].who).to.eql({
name: '<NAME>',
username: 'markmssd',
email: '<EMAIL>'
});
expect(eventStub.getCall(0).args[0].project).to.eql({
owner: 'CLOUD',
repo: 'console-server'
});
stub.restore();
done();
})
.catch((err) => {
stub.restore();
done(err);
});
});
it('should return true', (done) => {
const stub = sinon.stub(ConsulService, 'set').resolves(true);
request(app)
.put('/consul/kv/CLOUD/console-server/config.json')
.send({ config: 'Value!' })
.set('token', token)
.expect(httpStatus.OK)
.then((res) => {
expect(stub.getCall(0).args).to.eql(['CLOUD/console-server/config.json', 'Value!']);
expect(res.body.result).to.equal(true);
expect(res.body.data).to.equal(true);
expect(eventStub.called).to.be(true);
expect(eventStub.getCall(0).args.length).to.eql(1);
expect(eventStub.getCall(0).args[0].where).to.eql('console');
expect(eventStub.getCall(0).args[0].source).to.eql('PUT /consul/kv/CLOUD/console-server/config.json');
expect(eventStub.getCall(0).args[0].namespace).to.eql(undefined);
expect(eventStub.getCall(0).args[0].what).to.eql('config CLOUD/console-server/config.json');
expect(eventStub.getCall(0).args[0].type).to.eql('updated');
expect(eventStub.getCall(0).args[0].description).to.eql('<NAME> updated config CLOUD/console-server/config.json');
expect(eventStub.getCall(0).args[0].who).to.eql({
name: '<NAME>',
username: 'markmssd',
email: '<EMAIL>'
});
expect(eventStub.getCall(0).args[0].project).to.eql({
owner: 'CLOUD',
repo: 'console-server'
});
stub.restore();
done();
})
.catch((err) => {
stub.restore();
done(err);
});
});
});
});
|
package sentry
import (
"fmt"
"runtime"
"testing"
"github.com/pkg/errors"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestStackTraceGenerator(t *testing.T) {
t.Run("getStacktraceFramesForError()", func(t *testing.T) {
t.Run("StackTraceableError", func(t *testing.T) {
err := errors.New("test error")
frames := getStacktraceFramesForError(err)
if assert.NotEmpty(t, frames, "there should be frames from the error") {
assert.Equal(t, "TestStackTraceGenerator.func1.1", frames[frames.Len()-1].Function, "it should have the right function name as the top-most frame")
}
})
t.Run("error", func(t *testing.T) {
err := fmt.Errorf("test error")
frames := getStacktraceFramesForError(err)
assert.Empty(t, frames, "there should be no frames from a normal error")
})
})
t.Run("getStacktraceFrames()", func(t *testing.T) {
t.Run("Skip", func(t *testing.T) {
frames := getStacktraceFrames(999999999)
assert.Empty(t, frames, "with an extreme skip, there should be no frames")
})
t.Run("Current Function", func(t *testing.T) {
frames := getStacktraceFrames(0)
if assert.NotEmpty(t, frames, "there should be frames from the current function") {
assert.Equal(t, "TestStackTraceGenerator.func2.2", frames[frames.Len()-1].Function, "it should have the right function name as the top-most frame")
}
})
})
t.Run("getStackTraceFrame()", func(t *testing.T) {
pc, file, line, ok := runtime.Caller(0)
require.True(t, ok, "we should be able to get the current caller")
frame := getStacktraceFrame(pc)
require.NotNil(t, frame, "the frame should not be nil")
assert.Equal(t, file, frame.AbsoluteFilename, "the filename for the frame should match the caller")
assert.Equal(t, line, frame.Line, "the line from the frame should match the caller")
assert.Equal(t, "github.com/SierraSoftworks/sentry-go/stacktraceGen_test.go", frame.Filename, "it should have the correct filename")
assert.Equal(t, "TestStackTraceGenerator.func3", frame.Function, "it should have the correct function name")
assert.Equal(t, "sentry-go", frame.Module, "it should have the correct module name")
assert.Equal(t, "github.com/SierraSoftworks/sentry-go", frame.Package, "it should have the correct package name")
})
t.Run("stackTraceFrame.ClassifyInternal()", func(t *testing.T) {
frames := getStacktraceFrames(0)
require.Greater(t, frames.Len(), 3, "the number of frames should be more than 3")
for i, frame := range frames {
assert.False(t, frame.InApp, "all frames should initially be marked as external (frame index = %d)", i)
frame.ClassifyInternal([]string{"github.com/SierraSoftworks/sentry-go"})
}
assert.True(t, frames[frames.Len()-1].InApp, "the top-most frame should be marked as internal (this function)")
assert.False(t, frames[0].InApp, "the bottom-most frame should be marked as external (the test harness main method)")
})
t.Run("formatFuncName()", func(t *testing.T) {
cases := []struct {
Name string
FullName string
Package string
Module string
FunctionName string
}{
{"Full Name", "github.com/SierraSoftworks/sentry-go.Context", "github.com/SierraSoftworks/sentry-go", "sentry-go", "Context"},
{"Struct Function Name", "github.com/SierraSoftworks/sentry-go.packet.Clone", "github.com/SierraSoftworks/sentry-go", "sentry-go", "packet.Clone"},
{"No Package", "sentry-go.Context", "sentry-go", "sentry-go", "Context"},
}
for _, tc := range cases {
tc := tc
t.Run(tc.Name, func(t *testing.T) {
pack, module, name := formatFuncName(tc.FullName)
assert.Equal(t, tc.Package, pack, "the package name should be correct")
assert.Equal(t, tc.Module, module, "the module name should be correct")
assert.Equal(t, tc.FunctionName, name, "the function name should be correct")
})
}
})
t.Run("shortFilename()", func(t *testing.T) {
t.Run("GOPATH", func(t *testing.T) {
GOPATH := "/go/src"
pkg := "github.com/SierraSoftworks/sentry-go"
file := "stacktraceGen_test.go"
filename := fmt.Sprintf("%s/%s/%s", GOPATH, pkg, file)
assert.Equal(t, filename, shortFilename(filename, ""), "should use the original filename if no package is provided")
assert.Equal(t, filename, shortFilename(filename, "bitblob.com/bender"), "should use the original filename if the package name doesn't match the path")
assert.Equal(t, fmt.Sprintf("%s/%s", pkg, file), shortFilename(filename, pkg), "should use the $pkg/$file if the package is provided")
})
})
}
|
<reponame>berlioz-the/berlioz
const _ = require('the-lodash');
module.exports.getNaming = function({entity, scope}) {
return [
scope.gcpProjectNumber,
scope.deployment,
entity.clusterName,
scope.shortSourceRegion,
entity.sectorName,
entity.name];
}
module.exports.massageNamingPart = function(x) {
return x.toString().toLowerCase()
}
module.exports.getModelName = function({entity, scope}) {
return 'gcp-storage'
}
module.exports.setupItem = function({config, entity, item, scope, providerHelper}) {
var labels = {
'berlioz_deployment': scope.deployment,
'berlioz_cluster': entity.clusterName,
'berlioz_region': scope.shortSourceRegion,
'berlioz_sector': entity.sectorName,
'berlioz_database': entity.name
};
for(var x of _.keys(labels)) {
labels[x] = labels[x].toString().toLowerCase();
}
item.setConfig('config', {
location: scope.region.toUpperCase(),
storageClass: 'REGIONAL',
labels: labels
});
return providerHelper.registerGcpApiDependency(item, 'storage-api.googleapis.com')
} |
<reponame>ljfa-ag/Adventure-Backport
package de.ljfa.advbackport;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import cpw.mods.fml.common.FMLCommonHandler;
import cpw.mods.fml.common.Mod;
import cpw.mods.fml.common.event.FMLInitializationEvent;
import cpw.mods.fml.common.event.FMLInterModComms;
import cpw.mods.fml.common.event.FMLPostInitializationEvent;
import cpw.mods.fml.relauncher.Side;
import de.ljfa.advbackport.handlers.CanPlaceOnHandler;
import de.ljfa.advbackport.handlers.TooltipHandler;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraftforge.common.MinecraftForge;
@Mod(modid = Reference.MODID, name = Reference.MODNAME, version = Reference.VERSION,
dependencies = "required-after:adventure_backport_core", acceptedMinecraftVersions = "1.7.10")
public class AdventureBackport {
@Mod.Instance(Reference.MODID)
public static AdventureBackport instance;
public static final Logger logger = LogManager.getLogger(Reference.MODNAME);
/*@Mod.EventHandler
public void preInit(FMLPreInitializationEvent event) {
}*/
@Mod.EventHandler
public void init(FMLInitializationEvent event) {
if(Config.activateCanPlaceOn)
MinecraftForge.EVENT_BUS.register(new CanPlaceOnHandler());
if(event.getSide() == Side.CLIENT) {
MinecraftForge.EVENT_BUS.register(new TooltipHandler());
}
addVersionChecker();
}
@Mod.EventHandler
public void postInit(FMLPostInitializationEvent event) {
Config.createSets();
FMLCommonHandler.instance().bus().register(new Config.ChangeHandler());
}
public void addVersionChecker() {
NBTTagCompound tag = new NBTTagCompound();
tag.setString("curseProjectName", "228312-adventure-backport");
tag.setString("curseFilenameParser", "adventure_backport-[].jar");
FMLInterModComms.sendRuntimeMessage(Reference.MODID, "VersionChecker", "addCurseCheck", tag);
}
}
|
var express = require('express');
var path = require('path');
var favicon = require('static-favicon');
var logger = require('morgan');
var cookieParser = require('cookie-parser');
var bodyParser = require('body-parser');
var session = require('express-session');
var routes = require('./routes/index');
var users = require('./routes/users');
var app = express();
//middleware
var erros = require('./middleware/erros');
// view engine setup
app.set('views', path.join(__dirname, 'views'));
app.set('view engine', 'jade');
app.use(favicon());
app.use(logger('dev'));
app.use(bodyParser.json());
app.use(bodyParser.urlencoded());
app.use(cookieParser());
app.use(session({ secret: 'sua-chave-secreta' }));
app.use(express.static(path.join(__dirname, 'public')));
app.use('/', routes);
app.use('/users', users);
//middleware
app.use(erros.notfound);
app.use(erros.serverError);
app.listen(3000, function() {
console.log('Express server listening on port 3000');
});
|
<gh_stars>0
function setDate(day, month, year) {
const div = document.getElementById("date");
div.textContent = `${day}/${month}/${year}`;
}
function get12Rotation(int) {
const hour = int > 12 ? int - 12 : int;
return (360 / 12) * hour;
}
function get60Rotation(int) {
return (360 / 60) * int;
}
function setSvgClockHours(hours) {
const clockHours = document.getElementById("svg-clock_hours");
clockHours.style.transform = `rotate(${get12Rotation(hours)}deg)`;
clockHours.style.transformOrigin = "center";
}
function setSvgClockMinutes(minutes) {
const clockMinutes = document.getElementById("svg-clock_minutes");
clockMinutes.style.transform = `rotate(${get60Rotation(minutes)}deg)`;
clockMinutes.style.transformOrigin = "center";
}
function setSvgClockSeconds(seconds) {
const clockSeconds = document.getElementById("svg-clock_seconds");
clockSeconds.style.transform = `rotate(${get60Rotation(seconds)}deg)`;
clockSeconds.style.transformOrigin = "center";
}
function setDigitalClockHours(hours) {
const clockHours = document.getElementById("digital-clock_hours");
clockHours.textContent = hours;
}
function setDigitalClockMinutes(minutes) {
const formatted = minutes < 10 ? `0` + minutes : minutes;
const clockMinutes = document.getElementById("digital-clock_minutes");
clockMinutes.textContent = formatted;
}
function getHoursToString(hours) {
const hoursToText = [
"noon",
"one",
"two",
"three",
"four",
"five",
"six",
"seven",
"eight",
"nine",
"ten",
"eleven",
"twelve",
];
return hoursToText[hours % 12];
}
function getMinutesToString(minutes) {
const ones = [
"",
"one",
"two",
"three",
"four",
"five",
"six",
"seven",
"eight",
"nine",
];
const teens = [
"ten",
"eleven",
"twelve",
"thirteen",
"fourteen",
"fifteen",
"sixteen",
"seventeen",
"eighteen",
"nineteen",
];
const tens = ["", "", "twenty", "thirty", "forty", "fifty"];
const minutesToArray = minutes.toString().split("");
let text = "";
if (minutes < 10) {
text = ones[minutes];
} else if (minutes < 20) {
text = teens[minutesToArray[1]];
} else {
text = `${tens[minutesToArray[0]]} ${ones[minutesToArray[1]]}`;
}
return text;
}
function setTextClock(hours, minutes) {
const div = document.getElementById("text-clock");
const meridiem = hours < 12 ? "am" : "pm";
div.textContent = `It's ${getHoursToString(hours)} ${getMinutesToString(
minutes
)} ${meridiem}.`;
}
function updateAll() {
const now = new Date();
const [month, day, year] = [
now.getMonth() + 1,
now.getDate(),
now.getFullYear(),
];
const [hours, minutes, seconds] = [
now.getHours(),
now.getMinutes(),
now.getSeconds(),
];
setDate(day, month, year);
setSvgClockHours(hours);
setSvgClockMinutes(minutes);
setSvgClockSeconds(seconds);
setDigitalClockHours(hours);
setDigitalClockMinutes(minutes);
setTextClock(hours, minutes);
}
updateAll();
setInterval(updateAll, 1000);
|
import { BinaryTree, BinaryTreeNode } from '../'
import { BinaryTree as MBinaryTree } from '../../ts-data-structures'
function getInstance<T>(): BinaryTree<T> {
return new BinaryTree<T>()
}
describe('BinaryTree', () => {
test('exports', () => {
expect(MBinaryTree).toBeDefined()
expect(MBinaryTree).toBe(BinaryTree)
})
test('instantiates', () => {
const tree = getInstance()
expect(tree.count()).toEqual(0)
})
describe('.add', () => {
const tree = getInstance<number>()
const value = 123
tree.add(value)
test('adds the value to the tree', () => {
expect(tree.count()).toEqual(1)
expect(tree.contains(value)).toEqual(true)
})
test('throws if the value exists in the tree already', () => {
expect(() => {
tree.add(value)
}).toThrowError(
`BinaryTreeNode<number>.add Error: Value(${value}) already exists in BinaryTree.`
)
})
})
describe('.contains', () => {
const tree = getInstance<string>()
const addedValue = 'xyz'
const nonAddedValue = '123'
tree.add(addedValue)
test('returns true if the value is in the tree', () => {
expect(tree.contains(addedValue)).toEqual(true)
})
test('returns false if the value is not in the tree', () => {
expect(tree.contains(nonAddedValue)).toEqual(false)
})
})
describe('.print', () => {
test('should call print on the root node', () => {
const tree = getInstance()
tree.add(1)
;(tree.root as BinaryTreeNode<number>).print = jest.fn()
tree.print()
expect((tree.root as BinaryTreeNode<number>).print).toHaveBeenCalled()
})
})
})
|
<gh_stars>10-100
// TODO: Utilize ES6 features (like for loops)
import Vector from './Vector';
import CollisionMesh from './CollisionMesh';
// Utility functions:
function minAbs(min, ...vals) {
let minAbs = Math.abs(min);
for (const val of vals) {
let argAbs = Math.abs(val);
if (argAbs < minAbs) {
min = val;
minAbs = argAbs;
}
}
return {
min: min,
abs: minAbs
};
}
/**
* A BoundingBox class.
*/
class BoundingBox {
/**
* @param {Number} left - The left position of x-axis.
* @param {Number} top - The top position of y-axis.
* @param {Number} right - The right position of x-axis.
* @param {Number} bottom - The bottom position of y-axis.
*/
constructor(left, top, right, bottom) {
let obj = left;
if (obj && obj.constructor !== Number) {
if (obj.getBoundingBox) { obj = obj.getBoundingBox(); }
// new BoundingBox(obj)
this.left = obj.left;
this.top = obj.top;
this.right = obj.right;
this.bottom = obj.bottom;
} else {
// new BoundingBox(left, top, right, bottom)
this.left = left;
this.top = top;
this.right = right;
this.bottom = bottom;
}
}
/**
* Clone the current boundingbox to a new object.
* @returns {BoundingBox} A clone of this instance
*/
clone() {
return new BoundingBox(this);
}
/**
* Checks if any property on `this` is NaN.
* @returns {Boolean}
*/
isNaN() {
return isNaN(this.left) || isNaN(this.top) || isNaN(this.right) || isNaN(this.bottom);
}
/**
* Returns the width of `this`.
* @returns {Number} width
*/
getWidth() {
return Math.abs(this.right - this.left);
}
/**
* Returns the height of `this`.
* @returns {Number} height
*/
getHeight() {
return Math.abs(this.bottom - this.top);
}
/**
* Returns the size of `this`.
* @returns {Vector} size
*/
getSize() {
return new Vector(this.getWidth(), this.getHeight());
}
/**
* Returns the area of `this`.
* @returns {Number} area
*/
getArea() {
return this.getWidth() * this.getHeight();
}
/**
* Returns the position of `this`.
* @returns {Vector} position
*/
getPosition() {
return new Vector(this.left, this.top);
}
/**
* Resolve this object down to a {@link BoundingBox} instance.
* Since this instance is already a boundingbox, it returns itself.
* @returns {BoundingBox} self
*/
getBoundingBox() {
// We have this method, so any prototype in this script will return their bounding box,
// and if they are one it will return itself.
// This simplifies code, and prevents having to do a ton of checks.
return this;
}
/**
* Returns a {@link CollisionMesh} instance version of this boundingbox similar to:<br>
* ```javascript
* new CollisionMesh(BoundingBox)
* ```
* @returns {CollisionMesh}
*/
getCollisionMesh() {
return new CollisionMesh(this);
}
/**
* Returns the center position of `this`.
* @returns {Vector} position
*/
getCenterPosition() {
return new Vector(this.left + this.getWidth() / 2, this.top + this.getHeight() / 2);
}
/**
* Returns `this` subtract `other`.
* @param {BoundingBox}
* @returns {Vector} position
*/
difference(other) {
if (!other) { throw new Error("difference requires argument 'other'"); }
other = other.getBoundingBox();
return new BoundingBox(this.left - other.left, this.top - other.top,
this.right - other.right, this.bottom - other.bottom);
}
/**
* Returns a position, which if `this` is set to, `this` will be centered on `other`.
* @param {BoundingBox}
* @returns {Vector} position
*/
getCenteredOnPosition(other) {
if (!other) { throw new Error("getCenteredOnPosition requires argument 'other'"); }
other = other.getBoundingBox();
return other.getCenterPosition().subtract(this.getCenterPosition().subtract(this.getPosition()));
}
/**
* Returns the intersection between `this` and `other`.
* This will return a {@link Vector} if they only intersect at a point.
* This will return a {@link BoundingBox} if they intersect over an area or line.
* This will return a undefined if they do not intersect.
* @param {BoundingBox}
* @returns {Vector|BoundingBox|undefined} intersection object
*/
getIntersection(other) {
if (!other) { throw new Error("getIntersection requires argument 'other'"); }
other = other.getBoundingBox();
let left = Math.max(this.left, other.left);
let top = Math.max(this.top, other.top);
let right = Math.min(this.right, other.right);
let bottom = Math.min(this.bottom, other.bottom);
if (left === right && top === bottom) {
return new Vector(left, top);
} else if (left <= right && top <= bottom) {
return new BoundingBox(left, top, right, bottom);
}
}
/**
* Returns the squared distance between `this` and `other`.
* @param {Vector}
* @returns {Number} squared distance
*/
getDistanceSquaredToPoint(other) {
other = other.getVector();
let cLeft = (other.left <= this.left ? this.left : (other.left >= this.right ? this.right : other.left));
let cTop = (other.top <= this.top ? this.top : (other.top >= this.bottom ? this.bottom : other.top));
let cPos = new Vector(cLeft, cTop);
return cPos.distanceSquared(other);
}
/**
* Returns the distance between `this` and `other`.
* @param {Vector}
* @returns {Number} distance
*/
getDistanceToPoint(other) {
return Math.sqrt(this.getDistanceSquaredToPoint(other));
}
/**
* Sets `this`'s properties to `other`'s properties.
* @param {BoundingBox}
* @returns {BoundingBox} self
*/
set(other) {
if (!other) { throw new Error("set requires argument 'other'"); }
other = other.getBoundingBox();
this.left = other.left;
this.top = other.top;
this.right = other.right;
this.bottom = other.bottom;
return this;
}
/**
* Move `this` to position at `left` and/or `top`.
* @param {Number} [left=null]
* @param {Number} [top=null]
* @returns {BoundingBox} self
*/
moveTo(left, top) {
if (left && left.constructor === Number) {
this.right = left + (this.right - this.left);
this.left = left;
}
if (top && top.constructor === Number) {
this.bottom = top + (this.bottom - this.top);
this.top = top;
}
return this;
}
/**
* Move `this` relatively to position by `deltaLeft` and/or `deltaTop`.
* @param {Number} [deltaLeft=null]
* @param {Number} [deltaTop=null]
* @returns {BoundingBox} self
*/
moveBy(deltaLeft, deltaTop) {
if (deltaLeft && deltaLeft.constructor === Number) {
this.left += deltaLeft;
this.right += deltaLeft;
}
if (deltaTop && deltaTop.constructor === Number) {
this.top += deltaTop;
this.bottom += deltaTop;
}
return this;
}
/**
* Resize `this` to size `width` and/or `height`, anchored at `anchor`.
* @param {Number} [width=null]
* @param {Number} [height=null]
* @param {String} [anchor='top-left'] supports "top-left", "top-right", "bottom-left", or "bottom-right"
* @returns {BoundingBox} self
*/
resizeTo(width, height, anchor) {
// NOTE: anchor supports "top-left", "top-right", "bottom-left", or "bottom-right". By default it is "top-left".
// NOTE: anchor also supports being passed as a position. Allowing the resize anchor to be anywhere other than
// the predefined strings.
let curSize = this.getSize();
let newSize = new Vector(width || curSize.left, height || curSize.top);
anchor = anchor || 'top-left';
if (typeof anchor === 'string' || anchor instanceof String) {
let anchorStr = anchor;
anchor = this.getPosition();
if (anchorStr.indexOf('right') >= 0) { anchor.left += curSize.left; }
if (anchorStr.indexOf('bottom') >= 0) { anchor.top += curSize.top; }
}
this.left += (anchor.left - this.left) * (curSize.left - newSize.left) / curSize.left;
this.right += (anchor.left - this.right) * (curSize.left - newSize.left) / curSize.left;
this.top += (anchor.top - this.top) * (curSize.top - newSize.top) / curSize.top;
this.bottom += (anchor.top - this.bottom) * (curSize.top - newSize.top) / curSize.top;
return this;
}
/**
* Determines if `this` encapsulates `other`.
* @param {BoundingBox}
* @returns {Boolean}
*/
isContains(other) {
if (!other) { throw new Error("isContains requires argument 'other'"); }
other = other.getBoundingBox();
return other.left >= this.left && other.right <= this.right &&
other.top >= this.top && other.bottom <= this.bottom;
}
/**
* Determines if `this` encapsulates at least one of `others`.
* @param {BoundingBox[]}
* @returns {Boolean}
*/
someContains(others) {
if (!others) { throw new Error("someContains requires argument 'others'"); }
if (others.constructor !== Array) { throw new Error("someContains requires argument 'others' of type Array"); }
for (let index = 0; index < others.length; index += 1) {
if (this.isContains(others[index])) { return true; }
}
return false;
}
/**
* Determines if `this` touches an edge of `other`, but does not intersect area.
* @param {BoundingBox}
* @returns {Boolean}
*/
isTouching(other) {
if (!other) { throw new Error("isTouching requires argument 'other'"); }
other = other.getBoundingBox();
return ((this.top <= other.bottom && this.bottom >= other.top) &&
(this.left === other.right || this.right === other.left)) ||
((this.left <= other.right && this.right >= other.left) &&
(this.top === other.bottom || this.bottom === other.top));
}
/**
* If `this` touches one of `others`, but does not intersect area, then this returns the `this` edge name.
* @param {BoundingBox[]}
* @returns {String|undefined} edge name
*/
getEdgeTouching(others) {
if (!others) { throw new Error("getEdgeTouching requires argument 'others'"); }
if (others.constructor !== Array) { others = [others]; }
for (let index = 0; index < others.length; index += 1) {
let other = others[index].getBoundingBox();
if (this.top <= other.bottom && this.bottom >= other.top) {
if (this.left === other.right) { return 'left'; }
if (this.right === other.left) { return 'right'; }
}
if (this.left <= other.right && this.right >= other.left) {
if (this.top === other.bottom) { return 'top'; }
if (this.bottom === other.top) { return 'bottom'; }
}
}
}
/**
* If `this` touches one of `others`, but does not intersect area, then this returns the `other` edge name.
* @param {BoundingBox[]}
* @returns {String|undefined} edge name
*/
getOtherEdgeTouching(others) {
if (!others) { throw new Error("getOtherEdgeTouching requires argument 'others'"); }
if (others.constructor !== Array) { others = [others]; }
for (let index = 0; index < others.length; index += 1) {
let other = others[index].getBoundingBox();
if (this.top <= other.bottom && this.bottom >= other.top) {
if (this.left === other.right) { return 'right'; }
if (this.right === other.left) { return 'left'; }
}
if (this.left <= other.right && this.right >= other.left) {
if (this.top === other.bottom) { return 'bottom'; }
if (this.bottom === other.top) { return 'top'; }
}
}
}
/**
* Determines which edges of `this` is closest to `other`, returns all edges in sorted order by distance.
* @param {BoundingBox}
* @returns {String[]} edge names sorted from closest to furthest
*/
getEdgeClosestOrder(other) {
if (!other) { throw new Error("getEdgeClosest requires argument 'other'"); }
other = other.getBoundingBox();
let centerPos = this.getCenterPosition();
let dis = [];
dis.push({
'edge': 'left',
dis: other.getDistanceSquaredToPoint(this.left, centerPos.top)
});
dis.push({
'edge': 'top',
dis: other.getDistanceSquaredToPoint(centerPos.left, this.top)
});
dis.push({
'edge': 'right',
dis: other.getDistanceSquaredToPoint(this.right, centerPos.top)
});
dis.push({
'edge': 'bottom',
dis: other.getDistanceSquaredToPoint(centerPos.left, this.bottom)
});
dis.sort(function (a, b) {
return a.dis - b.dis;
});
return dis.map(function (dis) { return dis.edge; });
}
/**
* Determines which `this` edge is closest to `other`.
* @param {BoundingBox}
* @returns {String} edge name
*/
getEdgeClosest(other) {
let edges = this.getEdgeClosestOrder(other);
return edges[0];
}
/**
* Returns a vector representing the delta position to add to `this` to snap to `other`.<br>
* Note: `snapDelta` may contain `NaN` for `left` or `right`
* @param {BoundingBox}
* @param {Number} [snapDistance=5] max distance to move `this`
* @returns {Vector} snapDelta
*/
getSnapDelta(other, snapDistance) {
if (!other) { throw new Error("getSnapDelta requires argument 'other'"); }
other = other.getBoundingBox();
snapDistance = snapDistance || 5;
let snapDelta = new Vector(NaN, NaN);
if (this.top <= other.bottom && this.bottom >= other.top) {
// Handle x-snap:
const leftRightDis = minAbs(other.left - this.right, other.right - this.left);
if (leftRightDis.abs <= snapDistance) { // this.LeftRightSnapTo(other)
snapDelta.left = leftRightDis.min;
// Handle y-subsnap:
const topBottomDis = minAbs(other.top - this.top, other.bottom - this.bottom);
if (topBottomDis.abs <= snapDistance) { // this.TopBottomSubSnapTo(other)
snapDelta.top = topBottomDis.min;
}
}
} else if (this.left <= other.right && this.right >= other.left) {
// Handle y-snap:
const topBottomDis = minAbs(other.top - this.bottom, other.bottom - this.top);
if (topBottomDis.abs <= snapDistance) { // this.TopBottomSnapTo(other)
snapDelta.top = topBottomDis.min;
// Handle x-subsnap:
const leftRightDis = minAbs(other.left - this.left, other.right - this.right);
if (leftRightDis.abs <= snapDistance) { // this.LeftRightSubSnapTo(other)
snapDelta.left = leftRightDis.min;
}
}
}
return snapDelta;
}
/**
* Determines if `this` touches an edge of one of `others`, but does not intersect area.
* @param {BoundingBox[]}
* @returns {Boolean}
*/
someTouching(others) {
if (!others) { throw new Error("someTouching requires argument 'others'"); }
if (others.constructor !== Array) { throw new Error("someTouching requires argument 'others' of type Array"); }
for (let index = 0; index < others.length; index += 1) {
if (this.isTouching(others[index])) { return true; }
}
return false;
}
/**
* Determines if `this` intersects an area of `others`, not an edge.
* @param {BoundingBox}
* @returns {Boolean}
*/
isColliding(other) {
if (!other) { throw new Error("isColliding requires argument 'other'"); }
other = other.getBoundingBox();
return this.left < other.right && this.right > other.left && this.top < other.bottom && this.bottom > other.top;
}
/**
* Determines if `this` intersects an area of one of `others`, not an edge.
* @param {BoundingBox[]}
* @returns {Boolean}
*/
someColliding(others) {
if (!others) { throw new Error("someColliding requires argument 'others'"); }
if (others.constructor !== Array) { throw new Error("someColliding requires argument 'others' of type Array"); }
for (let index = 0; index < others.length; index += 1) {
if (this.isColliding(others[index])) { return true; }
}
return false;
}
/**
* Returns which of `other` that `this` intersects an area of, not an edge.
* @param {BoundingBox[]}
* @returns {BoundingBox|undefined}
*/
getColliding(others) {
if (!others) { throw new Error("getColliding requires argument 'others'"); }
if (others.constructor !== Array) { throw new Error("getColliding requires argument 'others' of type Array"); }
for (let index = 0; index < others.length; index += 1) {
if (this.isColliding(others[index])) { return others[index]; }
}
}
}
export default BoundingBox;
|
#!/bin/bash
#./build <BUILD-STAGE> <LOG-LEVEL> <function Name> <S3 Bucket TO PACKCAGE LAMBDA>
if [ "$1" != "Test" ] && [ "$1" != "Prod" ] && [ "$1" != "Dev" ]; then
echo "You must set Test or Prod or Dev as arg."
exit 1
fi
if [ "$2" != "INFO" ] && [ "$2" != "ERROR" ] && [ "$2" != "WARNING" ] && [ "$2" != "DEBUG" ]; then
echo "You must set log-level to either INFO, WARNING, ERROR OR DEBUG."
exit 1
fi
ENV=$1
LOGLEVEL=$2
FUNCTION_NAME=$3
if [ $ENV == "Dev" ]; then
AWS_DEFAULT_REGION="<>"
S3_BUCKET="<>"
elif [ $ENV == "Test" ] || [ $ENV == "Prod" ]; then
AWS_DEFAULT_REGION="<>"
S3_BUCKET="<>"
fi
case $ENV in
"Dev")
ARN="<>"
;;
"Test")
ARN="<>"
;;
"Prod")
ARN="<>"
;;
*)
esac
if [ $? -ne 0 ];then
echo "dotnet installation failed."
exit 1
fi
sed -i "s/{{ENV}}/$ENV/g" template.yaml
sed -i "s/{{LOGLEVEL}}/$LOGLEVEL/g" template.yaml
sed -i "s/{{ARN}}/$ARN/g" template.yaml
sed -i "s/{{REGION}}/$AWS_DEFAULT_REGION/g" template.yaml
# sam local invoke -t template.yaml -d 5890 --profile=<profile> -e event.json "LambdaFunction"
# #deploy
sam package --template-file template.yaml --s3-bucket $S3_BUCKET --output-template-file packaged.yaml
if [ $? -ne 0 ];then
echo "sam package failed."
exit 1
fi
if [ $ENV == "Dev" ]; then
aws cloudformation deploy --template-file packaged.yaml --stack-name $FUNCTION_NAME$ENV --capabilities CAPABILITY_IAM \
--role-arn <role-arn>
else
aws cloudformation deploy --template-file packaged.yaml --stack-name $FUNCTION_NAME$ENV --capabilities CAPABILITY_IAM \
--role-arn <role-arn>
fi
if [ $? -ne 0 ];then
echo "cloudformation deploy failed."
aws cloudformation describe-stack-events --stack-name $FUNCTION_NAME$ENV --max-items 5
exit 1
fi
sed -i "s/$ENV/{{ENV}}/g" template.yaml
sed -i "s/$LOGLEVEL/{{LOGLEVEL}}/g" template.yaml
sed -i "s/$ARN/{{ARN}}/g" template.yaml
sed -i "s/$AWS_DEFAULT_REGION/{{REGION}}/g" template.yaml |
<reponame>GreyHatBeard/cli-microsoft365<filename>src/m365/spfx/commands/project/project-upgrade/rules/FN014007_CODE_launch_localWorkbench.ts
import * as path from 'path';
import { Finding, Occurrence } from "../";
import { Project } from "../../model";
import { JsonRule } from './JsonRule';
export class FN014007_CODE_launch_localWorkbench extends JsonRule {
constructor() {
super();
}
get id(): string {
return 'FN014007';
}
get title(): string {
return 'Local workbench in .vscode/launch.json';
}
get description(): string {
return `In the .vscode/launch.json file, remove the local workbench launch configuration`;
}
get resolution(): string {
return ``;
}
get resolutionType(): string {
return 'json';
}
get severity(): string {
return 'Recommended';
}
get file(): string {
return '.vscode/launch.json';
}
visit(project: Project, findings: Finding[]): void {
if (!project.vsCode ||
!project.vsCode.launchJson ||
!project.vsCode.launchJson.configurations) {
return;
}
const occurrences: Occurrence[] = [];
project.vsCode.launchJson.configurations.forEach((configuration, i) => {
if (configuration.url &&
configuration.url.indexOf('/temp/workbench.html') > -1) {
const node = this.getAstNodeFromFile(project.vsCode!.launchJson!, `configurations[${i}].url`);
occurrences.push({
file: path.relative(project.path, this.file),
resolution: this.resolution,
position: this.getPositionFromNode(node)
});
}
});
if (occurrences.length > 0) {
this.addFindingWithOccurrences(occurrences, findings);
}
}
} |
<reponame>bilaleren/mui-tabs
export { default as Tab } from './Tab'
export type { TabProps } from './Tab'
export { default as Tabs } from './Tabs'
export type { TabsProps, TabsActionRefAttributes } from './Tabs'
|
package com.netcracker.ncstore.service.data.category;
import com.netcracker.ncstore.exception.CategoryServiceNotFoundException;
import com.netcracker.ncstore.model.Category;
import java.util.List;
/**
* Interface for all business services that work with Category Entity
*/
public interface ICategoryDataService {
/**
* Return real Category entity by name
*
* @param name - the name of Category
* @return Category
* @throws CategoryServiceNotFoundException - when no category with provided name exists
*/
Category getCategoryByName(final String name) throws CategoryServiceNotFoundException;
/**
* Returns all categories form database
*
* @return List of all categories
*/
List<Category> getAllCategories();
}
|
<filename>frontend/src/types/ModelTypes.ts
// type for JWT token
export interface JWTTokenUser {
_id: string,
email: string;
name: string;
surname: string;
organizationId: string;
}
// types for basic database models
export interface IOrganization {
_id: string;
name: string;
archivedIssues: [];
}
export interface IUser {
_id: string;
name: string;
surname: string;
email: string;
password: string;
isAdmin: boolean;
organizationId: string; // ref id
projects: [string]; // ref id
invitations: [string] // ref id
}
export interface IProject {
_id: string;
name: string;
description: string;
dateStart: Date;
dateEnd: Date;
creator: IUser;
organizationId: string;
}
export interface IBoard {
_id: string;
name: string;
projectId: string; // ref id
}
export interface IColumn {
_id: string;
name: string;
boardId: string; // ref id
}
export interface IIssue {
_id: string;
name: string;
description: string;
creator: string; // ref id
columnId: string; // ref id
isFinished: boolean;
attachments: [IAttachment];
messages: [IMessage];
steps: [IStep];
tags: [string]; // ref id
contributors: [string]; // ref id
}
export interface ITag {
_id: string;
name: string;
organizationId: string;
}
export interface IMessage {
_id: string;
content: string;
sender: string;
addTime: Date;
}
export interface IStep {
_id: string;
content: string;
isCompleted: boolean;
}
export interface IAttachment {
_id: string;
name: string;
}
// types with nested subtypes for call on getNestedBoard
// that fetches all data of board and its children based on
// virtual properties to create nested object
export interface INestedMessage {
_id: string;
content: string;
sender: INestedUser;
addTime: Date;
}
export interface INestedUser {
_id: string;
name: string;
surname: string;
organizationId: string;
}
export interface INestedBoard {
_id: string;
name: string;
projectId: string;
columns: [INestedColumn];
}
export interface INestedColumn {
_id: string;
name: string;
boardId: string;
issues: [INestedIssue];
}
export interface INestedIssue {
_id: string;
name: string;
description: string;
creator: INestedUser;
columnId: string;
isFinished: boolean;
attachments: [IAttachment];
messages: [INestedMessage];
steps: [IStep];
tags: [ITag];
contributors: [INestedUser];
}
// types with nested subtypes for GetBoardsGallery and ProjectInfoBanner
export interface INestedProject {
_id: string;
name: string;
description: string;
dateStart: Date;
dateEnd: Date;
creator: INestedUser;
boards: [{
_id: string;
name: string;
projectId: string;
totalIssues: number;
totalCompleted: number;
}]
} |
<gh_stars>0
export class GetFormInstanceDto {
public name: string;
public ownerId: string;
public constructor(name: string, ownerId: string) {
this.name = name;
this.ownerId = ownerId;
}
}
export default GetFormInstanceDto;
|
#!/usr/bin/env bash
#
# This file contains the user's settings. Modified by user_settings.sh.
#
set -e
cd "$(dirname "${BASH_SOURCE[0]}")"/../../ # to the root of the rpi_installer
. rpi_installer/common.sh
. "$RPI_INSTALLER_DIR"/utils.sh
. "$RPI_INSTALLER_DIR"/host_utils.sh
. projects/ble_uart/vars.sh
SETTINGS_TEMPLATE="$PROJECT_DIR/settings.sh.template"
SETTINGS_TEMPORARY="$GENERATED_DIR/$PROJECT_NAME/settings.sh.temporary"
SETTINGS_SH="$GENERATED_DIR/$PROJECT_NAME/settings.sh"
mkdir -p "$GENERATED_DIR/$PROJECT_NAME/"
change_settigs_main() {
FLAGS_HELP="USAGE: $0 [flags]"
parse_args "$@"
eval set -- "${FLAGS_ARGV}"
# Copy the template to the output file.
cp -f "$SETTINGS_TEMPLATE" "$SETTINGS_TEMPORARY"
# WLAN
echo "- WLAN"
ask_and_replace "What's the WLAN SSID ? " "WLAN0_SSID"
ask_and_replace "What's the WLAN password (empty for open network)? " "WLAN0_PASSWORD"
ask_and_replace "What's the WLAN IP address/mask (empty for DHCP)? " "WLAN0_IPV4_ADDR"
ask_and_replace "What's the WLAN IP gateway (empty for DHCP)? " "WLAN0_IPV4_GW"
# Ethernet
echo "- Ethernet"
ask_and_replace "What's the Ethernet IP address/mask (empty for DHCP)? " "ETH0_IPV4_ADDR"
ask_and_replace "What's the Ethernet IP gateway (empty for DHCP)? " "ETH0_IPV4_GW"
# Timezone
echo "- Timezone"
local def_tz="$(timedatectl |grep "Time zone" | awk '{ print $3}')"
ask_and_replace "What's the timezone? Hints: 'timedatectl list-timezones' (empty to use local) " "TIMEZONE" "$def_tz"
# Prompt the user that the file has been saved.
echo '------------------------------------------------'
mv -f "$SETTINGS_TEMPORARY" "$SETTINGS_SH"
msg_pass "Saved in '$SETTINGS_SH'."
echo '------------------------------------------------'
cat "$SETTINGS_SH"
}
[[ "${BASH_SOURCE[0]}" != "${0}" ]] || change_settigs_main "$@"
|
<reponame>equant/piecewise_linear_fit<filename>piecewise/plotter.py<gh_stars>0
# 3p
import matplotlib.pyplot as plt
# prj
from piecewise.regressor import piecewise
def plot_data_with_regression(t, v, min_stop_frac=0.03):
""" Fits a piecewise (aka "segmented") regression and creates a scatter plot
of the data overlaid with the regression segments.
Params:
t (listlike of ints or floats): independent/predictor variable values
v (listlike of ints or floats): dependent/outcome variable values
min_stop_frac (float between 0 and 1): the fraction of total error that
a merge must account for to be considered "too big" to keep merging;
the default is usually adequate, but this may be increased to make
merging more aggressive (leading to fewer segments in the result)
Returns:
None.
"""
model = piecewise(t, v, min_stop_frac)
print('Num segments: %s' % len(model.segments))
plt.plot(t, v, '.', alpha=0.6)
for seg in model.segments:
t_new = [seg.start_t, seg.end_t]
v_hat = [seg.predict(t) for t in t_new]
plt.plot(t_new, v_hat, 'k-')
plt.show()
|
try:
raise ArithmeticError
except Exception:
print("Caught ArithmeticError via Exception")
try:
raise ArithmeticError
except ArithmeticError:
print("Caught ArithmeticError")
try:
raise AssertionError
except Exception:
print("Caught AssertionError via Exception")
try:
raise AssertionError
except AssertionError:
print("Caught AssertionError")
try:
raise AttributeError
except Exception:
print("Caught AttributeError via Exception")
try:
raise AttributeError
except AttributeError:
print("Caught AttributeError")
try:
raise EOFError
except Exception:
print("Caught EOFError via Exception")
try:
raise EOFError
except EOFError:
print("Caught EOFError")
try:
raise Exception
except BaseException:
print("Caught Exception via BaseException")
try:
raise Exception
except Exception:
print("Caught Exception")
try:
raise ImportError
except Exception:
print("Caught ImportError via Exception")
try:
raise ImportError
except ImportError:
print("Caught ImportError")
try:
raise IndentationError
except SyntaxError:
print("Caught IndentationError via SyntaxError")
try:
raise IndentationError
except IndentationError:
print("Caught IndentationError")
try:
raise IndexError
except LookupError:
print("Caught IndexError via LookupError")
try:
raise IndexError
except IndexError:
print("Caught IndexError")
try:
raise KeyError
except LookupError:
print("Caught KeyError via LookupError")
try:
raise KeyError
except KeyError:
print("Caught KeyError")
try:
raise LookupError
except Exception:
print("Caught LookupError via Exception")
try:
raise LookupError
except LookupError:
print("Caught LookupError")
|
#!/bin/bash
## MyToDoReact version 1.0.
##
## Copyright (c) 2021 Oracle, Inc.
## Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/
echo create frontend LB...
kubectl create -f frontend-helidon/frontend-service.yaml -n mtdrworkshop
|
<reponame>elarivie/atom-editorconfig
module.exports = Symbol('Show-EOL');
|
import React from 'react'
import PropTypes from 'prop-types'
// import classNames from "./index.module.css";
export const addClass = (element /* : HTMLElement*/, klass /*: string*/) =>
(element.className += ` ${klass}`)
export const removeClass = (element /*: HTMLElement*/, klass /*: string*/) =>
(element.className = element.className.replace(` ${klass}`, ''))
export const isIOSSafari = window => {
const userAgent = window.navigator.userAgent
const iOS = !!userAgent.match(/iPad/i) || !!userAgent.match(/iPhone/i)
const webkit = !!userAgent.match(/WebKit/i)
return iOS && webkit && !userAgent.match(/CriOS/i)
}
export default class FullScreen extends React.Component {
top = 0
iOSSafari = typeof window !== 'undefined' && isIOSSafari(window)
static propTypes = {
classNames: PropTypes.shape({
iosFix: PropTypes.string,
}).isRequired,
children: PropTypes.node.isRequired,
isOpen: PropTypes.bool.isRequired,
}
componentDidMount() {
if (this.iOSSafari) {
const body = document.body
this.top = body.scrollTop
}
}
hide() {
const body = document.body
const html = document.getElementsByTagName('html')[0]
removeClass(html, this.props.classNames.iosFix)
removeClass(body, this.props.classNames.iosFix)
body.scrollTop = this.top
}
show() {
const body = document.body
const html = document.getElementsByTagName('html')[0]
this.top = body.scrollTop
addClass(body, this.props.classNames.iosFix)
addClass(html, this.props.classNames.iosFix)
}
componentWillUnmount() {
if (this.iOSSafari && this.props.isOpen) {
this.hide()
}
}
render() {
if (this.iOSSafari) {
this.props.isOpen ? this.show() : this.hide()
}
return this.props.children
}
}
|
<reponame>amoAHCP/vxms
/*
* Copyright [2018] [<NAME>]
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jacpfx.vxms.event;
import io.netty.handler.codec.http.HttpResponseStatus;
import io.vertx.core.Context;
import io.vertx.core.Vertx;
import io.vertx.core.eventbus.Message;
import java.lang.reflect.Method;
import java.util.List;
import java.util.Optional;
import java.util.function.Consumer;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.jacpfx.vxms.common.VxmsShared;
import org.jacpfx.vxms.common.util.ConfigurationUtil;
import org.jacpfx.vxms.common.util.URIUtil;
import org.jacpfx.vxms.event.annotation.Consume;
import org.jacpfx.vxms.event.annotation.OnEventError;
import org.jacpfx.vxms.event.response.EventbusHandler;
import org.jacpfx.vxms.event.util.ReflectionUtil;
/** Created by <NAME> on 09.03.16. */
public class EventInitializer {
private static final String EVENTBUS = "eventbus";
/**
* initialize default Event Bus implementation for vxms
*
* @param vxmsShared the vxmsShared instance, containing the Vertx instance and other shared
* objects per instance
* @param service the Vxms service object itself
*/
public static void initEventbusHandling(VxmsShared vxmsShared, Object service) {
Stream.of(service.getClass().getDeclaredMethods())
.filter(m -> m.isAnnotationPresent(Consume.class))
.forEach(restMethod -> initEventbusMethod(vxmsShared, service, restMethod));
}
/**
* Initialize a specific REST method from Service
*
* @param vxmsShared the vxmsShared instance, containing the Vertx instance and other shared
* objects per instance
* @param service The Service itself
* @param eventBusMethod the event-bus Method
*/
public static void initEventbusMethod(
VxmsShared vxmsShared, Object service, Method eventBusMethod) {
final Consume path = eventBusMethod.getAnnotation(Consume.class);
Optional.ofNullable(path)
.ifPresent(
p -> {
final Optional<Method> errorMethod =
getEventbusMethods(service, p.value())
.stream()
.filter(method -> method.isAnnotationPresent(OnEventError.class))
.findFirst();
initCallback(vxmsShared, service, eventBusMethod, path, errorMethod);
});
}
protected static void initCallback(
VxmsShared vxmsShared,
Object service,
Method eventBusMethod,
Consume path,
Optional<Method> errorMethod) {
final Vertx vertx = vxmsShared.getVertx();
final String contexRoot =
URIUtil.getCleanContextRoot(
ConfigurationUtil.getContextRoot(
vertx.getOrCreateContext().config(), service.getClass()));
final String route = contexRoot + URIUtil.cleanPath(path.value());
final Context context = vertx.getOrCreateContext();
final String methodId =
path.value() + EVENTBUS + ConfigurationUtil.getCircuitBreakerIDPostfix(context.config());
registerCallback(methodId, route, vxmsShared, service, eventBusMethod, errorMethod);
}
private static void registerCallback(
String methodId,
String route,
VxmsShared vxmsShared,
Object service,
Method eventBusMethod,
Optional<Method> errorMethod) {
final Vertx vertx = vxmsShared.getVertx();
vertx
.eventBus()
.consumer(
route,
eventbusHandler ->
handleIncomingEvent(
methodId, vxmsShared, service, eventBusMethod, errorMethod, eventbusHandler));
}
private static void handleIncomingEvent(
String methodId,
VxmsShared vxmsShared,
Object service,
Method restMethod,
Optional<Method> onErrorMethod,
Message<Object> eventbusHandler) {
try {
final Object[] parameters =
getInvocationParameters(
methodId, vxmsShared, service, restMethod, onErrorMethod, eventbusHandler);
ReflectionUtil.genericMethodInvocation(restMethod, () -> parameters, service);
} catch (Throwable throwable) {
handleEventBusError(
methodId + "ERROR", vxmsShared, service, onErrorMethod, eventbusHandler, throwable);
}
}
private static List<Method> getEventbusMethods(Object service, String sName) {
final String methodName = sName;
final Method[] declaredMethods = service.getClass().getDeclaredMethods();
return Stream.of(declaredMethods)
.filter(method -> filterEventbusMethods(method, methodName))
.collect(Collectors.toList());
}
private static boolean filterEventbusMethods(final Method method, final String methodName) {
return method.isAnnotationPresent(Consume.class)
&& method.getAnnotation(Consume.class).value().equalsIgnoreCase(methodName)
|| method.isAnnotationPresent(OnEventError.class)
&& method.getAnnotation(OnEventError.class).value().equalsIgnoreCase(methodName);
}
private static Object[] getInvocationParameters(
String methodId,
VxmsShared vxmsShared,
Object service,
Method restMethod,
Optional<Method> onErrorMethod,
Message<Object> eventbusHandler) {
final Consumer<Throwable> throwableConsumer =
throwable ->
handleEventBusError(
methodId + "ERROR", vxmsShared, service, onErrorMethod, eventbusHandler, throwable);
return ReflectionUtil.invokeParameters(
restMethod,
null,
new EventbusHandler(methodId, vxmsShared, null, throwableConsumer, eventbusHandler));
}
private static void handleEventBusError(
String methodId,
VxmsShared vxmsShared,
Object service,
Optional<Method> onErrorMethod,
Message<Object> eventbusHandler,
Throwable throwable) {
if (onErrorMethod.isPresent()) {
invokeOnErrorMethod(methodId, vxmsShared, service, onErrorMethod, eventbusHandler, throwable);
} else {
failRequest(eventbusHandler, throwable);
}
}
private static void invokeOnErrorMethod(
String methodId,
VxmsShared vxmsShared,
Object service,
Optional<Method> onErrorMethod,
Message<Object> eventbusHandler,
Throwable throwable) {
onErrorMethod.ifPresent(
errorMethod -> {
try {
ReflectionUtil.genericMethodInvocation(
errorMethod,
() ->
ReflectionUtil.invokeParameters(
errorMethod,
throwable,
new EventbusHandler(
methodId, vxmsShared, throwable, null, eventbusHandler)),
service);
} catch (Throwable t) {
failRequest(eventbusHandler, t);
}
});
}
private static void failRequest(Message<Object> eventbusHandler, Throwable throwable) {
eventbusHandler.fail(HttpResponseStatus.INTERNAL_SERVER_ERROR.code(), throwable.getMessage());
throwable.printStackTrace();
}
}
|
package com.example.ReplyKafka.config;
import java.util.concurrent.Executor;
import javax.annotation.PostConstruct;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Profile;
import org.springframework.kafka.annotation.EnableKafka;
import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
import org.springframework.kafka.core.ConsumerFactory;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.core.ProducerFactory;
import org.springframework.scheduling.annotation.EnableAsync;
import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
import com.example.ReplyKafka.ReplyKafkaApplication;
@EnableAutoConfiguration
@EnableKafka
@Configuration
@EnableAsync
@Profile("thread")
public class MyConfigurationThreadConfig extends KafkaConfigUtils {
@Bean(name = "threadPoolTaskExecutor")
public Executor threadPoolTaskExecutor() {
ThreadPoolTaskExecutor thread = new ThreadPoolTaskExecutor();
thread.setMaxPoolSize(100);
thread.setCorePoolSize(75);
thread.setQueueCapacity(75);
thread.initialize();
return thread;
}
@Bean
public ConsumerFactory<String, String> initConsumerFactory() throws Exception {
return consumerFactory(ReplyKafkaApplication.serverPostKafka, ReplyKafkaApplication.groupIdTopicReq, ReplyKafkaApplication.clientId, 5, false, true);
}
@Bean("kafkaListenerContainerFactory")
public ConcurrentKafkaListenerContainerFactory<String, String> initkafkaListenerContainerFactory(ConsumerFactory<String, String> consumerFactory) throws Exception {
return kafkaListenerContainerFactory(consumerFactory, 1);
}
@Bean
public ProducerFactory<String, String> initProducerFactory() throws Exception {
return producerFactory(ReplyKafkaApplication.serverPostKafka, ReplyKafkaApplication.clientId);
}
@Bean
public KafkaTemplate<String, String> initReplyingTemplate(ProducerFactory<String, String> producerFactory) throws Exception {
return kafkaTemplate(producerFactory);
}
@PostConstruct
public void print() {
System.out.println("Config By MyConfigurationThreadConfig");
}
}
|
<reponame>in1tiate/OoT3D_Randomizer<gh_stars>100-1000
#ifndef _NABOORU_H_
#define _NABOORU_H_
#include "z3D/z3D.h"
void EnNb_rDraw(Actor* thisx, GlobalContext* globalCtx);
#endif //_NABOORU_H_
|
<gh_stars>0
from sqlalchemy import create_engine
from sqlalchemy.engine.url import URL
from models import DeclarativeBase
from scrapers import settings
# Performs database connection using database settings from settings.py
# Variable type of engine: sqlalchemy engine
engine = create_engine(URL(**settings.DATABASE), echo=True)
def create_quotes_table(engine):
""""""
DeclarativeBase.metadata.create_all(engine)
if __name__ == "__main__":
create_quotes_table(engine)
|
<filename>src/main/java/com/gzwl/demo/handler/ExceptionHandler.java
package com.gzwl.demo.handler;
import java.util.HashMap;
import java.util.Map;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.shiro.authc.IncorrectCredentialsException;
import org.apache.shiro.authc.LockedAccountException;
import org.apache.shiro.authc.UnknownAccountException;
import org.apache.shiro.authz.UnauthorizedException;
import org.springframework.web.servlet.HandlerExceptionResolver;
import org.springframework.web.servlet.ModelAndView;
import com.alibaba.fastjson.support.spring.FastJsonJsonView;
public class ExceptionHandler implements HandlerExceptionResolver{
@Override
public ModelAndView resolveException(HttpServletRequest request,
HttpServletResponse response, Object handler, Exception ex) {
ModelAndView mv = new ModelAndView();
FastJsonJsonView view = new FastJsonJsonView();
Map<String,Object> attributes = new HashMap<>();
if (ex instanceof UnauthorizedException) {
attributes.put("code", "1000001");
attributes.put("msg", "用户无权限");
}else if(ex instanceof UnknownAccountException){
attributes.put("code", "1000002");
attributes.put("msg", "用户名密码有误");
}else if(ex instanceof IncorrectCredentialsException){
attributes.put("code", "1000002");
attributes.put("msg", "用户名密码有误");
}else if(ex instanceof LockedAccountException){
attributes.put("code", "1000003");
attributes.put("msg", "账号已被锁定");
}else {
attributes.put("code", "1000004");
attributes.put("msg", ex.getMessage());
}
view.setAttributesMap(attributes);
mv.setView(view);
return mv;
}
}
|
<reponame>ooooo-youwillsee/leetcode
/**
* @author ooooo
* @date 2020/9/25 13:43
*/
#ifndef CPP_0012__SOLUTION1_H_
#define CPP_0012__SOLUTION1_H_
#include <iostream>
#include <unordered_map>
#include <vector>
using namespace std;
class Solution {
public:
string intToRoman(int num) {
vector<string> vec;
int a = num % 10;
if (a == 4) vec.emplace_back("IV");
else if (a == 9) vec.emplace_back("IX");
else if (a >= 5) vec.emplace_back("V" + string(a - 5, 'I'));
else vec.emplace_back(string(a, 'I'));
num -= a;
if (num > 0) {
int b = num % 100;
if (b == 40) vec.emplace_back("XL");
else if (b == 90) vec.emplace_back("XC");
else if (a >= 50) vec.emplace_back("L" + string((b - 50) / 10, 'X'));
else vec.emplace_back(string(b, 'X'));
num -= b;
}
if (num > 0) {
int c = num % 1000;
if (c == 400) vec.emplace_back("CD");
else if (c == 900) vec.emplace_back("CM");
else if (c >= 500) vec.emplace_back("D" + string((c - 500) / 100, 'C'));
else vec.emplace_back(string(c, 'C'));
num -= c;
}
vec.emplace_back(string(num / 1000, 'M'));
reverse(vec.begin(), vec.end());
string s;
for (auto &item : vec) {
s += item;
}
return s;
}
};
#endif //CPP_0012__SOLUTION1_H_
|
<reponame>bossirreplaceable/Studying_View
package com.yobo.studying_view.lsn05_recler;
import android.content.Context;
import android.support.annotation.Nullable;
import android.support.v7.widget.RecyclerView;
import android.util.AttributeSet;
import android.view.View;
import java.util.ArrayList;
/**
* Created by YoBo on 2018/4/23.
* 自定义recyclerView
*/
public class C_WrapRecyclerView extends RecyclerView {
private ArrayList<View> mHeaderViewList = new ArrayList<>();
private ArrayList<View> mFooterViewList = new ArrayList<>();
private Adapter mAdapter;
public C_WrapRecyclerView(Context context, @Nullable AttributeSet attrs) {
super(context, attrs);
}
public void addHeadView(View view) {
mHeaderViewList.add(view);
if (mAdapter != null) {
if (!(mAdapter instanceof HeadListViewAdapter)) {
mAdapter = new HeadListViewAdapter(mHeaderViewList, mFooterViewList, mAdapter);
}
}
}
public void addFootView(View v) {
mFooterViewList.add(v);
if (mAdapter != null) {
if (!(mAdapter instanceof HeadListViewAdapter)) {
mAdapter = new HeadListViewAdapter(mHeaderViewList, mFooterViewList, mAdapter);
}
}
}
@Override
public void setAdapter(Adapter adapter) {
if (mHeaderViewList.size() > 0 || mFooterViewList.size() > 0) {
mAdapter = new HeadListViewAdapter(mHeaderViewList, mFooterViewList, adapter);
} else {
mAdapter = adapter;
}
super.setAdapter(mAdapter);
}
}
|
<gh_stars>100-1000
#include <stdint.h>
#include <stdarg.h>
#include <stdio.h>
#include <stdlib.h>
#include <unistd.h>
#include <string.h>
#include <assert.h>
#include <openssl/rsa.h>
#include <openssl/err.h>
#include "inc.h"
#include "tcpcrypt_ctl.h"
#include "tcpcrypt.h"
#include "tcpcryptd.h"
#include "crypto.h"
#include "profile.h"
#define KEYLEN 4096
#define LENM (KEYLEN / 8)
#define RSA_EXPONENT 3
struct key {
RSA *k_rsa;
int k_len;
int k_blen;
void *k_bin;
};
static struct state {
struct key s_key;
} _state;
struct rsa_priv {
struct key *r_key;
RSA *r_rsa;
};
static RSA* generate_key(int bits)
{
RSA* r;
xprintf(XP_DEFAULT, "Generating RSA key: %d bits\n", bits);
r = RSA_generate_key(bits, RSA_EXPONENT, NULL, NULL);
if (!r)
errssl(1, "RSA_generate_key()");
return r;
}
static void generate_keys(void)
{
struct key *k = &_state.s_key;
xprintf(XP_DEFAULT, "Generating RSA key\n");
if (k->k_rsa) {
RSA_free(k->k_rsa);
free(k->k_bin);
}
k->k_len = KEYLEN;
k->k_rsa = generate_key(k->k_len);
k->k_blen = BN_num_bytes(k->k_rsa->n);
k->k_bin = xmalloc(k->k_blen);
BN_bn2bin(k->k_rsa->n, k->k_bin);
xprintf(XP_DEFAULT, "Done generating RSA key\n");
}
static struct key *get_key(void)
{
return &_state.s_key;
}
static void rsa_destroy(struct crypt *c)
{
struct rsa_priv *tp = crypt_priv(c);
if (!tp)
return;
if (tp->r_rsa) {
tp->r_rsa->e = NULL;
RSA_free(tp->r_rsa);
}
free(tp);
free(c);
}
static int rsa_encrypt(struct crypt *c, void *iv, void *data, int len)
{
struct rsa_priv *tp = crypt_priv(c);
int sz = RSA_size(tp->r_rsa);
void *out = alloca(sz);
profile_add(1, "pre pkey encrypt");
if (RSA_public_encrypt(len, data, out, tp->r_rsa,
RSA_PKCS1_OAEP_PADDING) == -1)
errssl(1, "RSA_public_encrypt()");
profile_add(1, "post pkey encrypt");
memcpy(data, out, sz);
return sz;
}
static int rsa_decrypt(struct crypt *c, void *iv, void *data, int len)
{
struct rsa_priv *tp = crypt_priv(c);
void *out = alloca(len);
int rc;
if (_conf.cf_rsa_client_hack)
assert(!"not implemented");
profile_add(1, "pre pkey decrypt");
rc = RSA_private_decrypt(len, data, out, tp->r_key->k_rsa,
RSA_PKCS1_OAEP_PADDING);
if (rc == -1)
errssl(1, "RSA_private_decrypt()");
profile_add(1, "post pkey decrypt");
memcpy(data, out, rc);
return rc;
}
static int rsa_get_key(struct crypt *c, void **out)
{
struct rsa_priv *tp = crypt_priv(c);
struct key *k;
k = tp->r_key = get_key();
*out = k->k_bin;
return k->k_blen;
}
static int rsa_set_key(struct crypt *c, void *key, int len)
{
struct rsa_priv *tp = crypt_priv(c);
BIGNUM *pub;
int plen;
RSA* r;
tp->r_rsa = r = RSA_new();
if (!r)
return -1;
r->n = pub = BN_bin2bn(key, len, NULL);
if (!pub)
return -1;
plen = BN_num_bits(pub);
if (plen % LENM)
return -1;
r->e = get_key()->k_rsa->e;
return 0;
}
struct crypt *crypt_RSA_new(void)
{
struct rsa_priv *r;
struct crypt *c;
static int init = 0;
c = crypt_init(sizeof(*r));
c->c_destroy = rsa_destroy;
c->c_set_key = rsa_set_key;
c->c_get_key = rsa_get_key;
c->c_encrypt = rsa_encrypt;
c->c_decrypt = rsa_decrypt;
r = crypt_priv(c);
/* XXX have tcpcrypt call this and renew keys */
if (!init) {
generate_keys();
init = 1;
}
return c;
}
|
<filename>src/main/java/org/olat/modules/curriculum/ui/member/CurriculumMemberListTableModel.java
/**
* <a href="http://www.openolat.org">
* OpenOLAT - Online Learning and Training</a><br>
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); <br>
* you may not use this file except in compliance with the License.<br>
* You may obtain a copy of the License at the
* <a href="http://www.apache.org/licenses/LICENSE-2.0">Apache homepage</a>
* <p>
* Unless required by applicable law or agreed to in writing,<br>
* software distributed under the License is distributed on an "AS IS" BASIS, <br>
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br>
* See the License for the specific language governing permissions and <br>
* limitations under the License.
* <p>
* Initial code contributed and copyrighted by<br>
* frentix GmbH, http://www.frentix.com
* <p>
*/
package org.olat.modules.curriculum.ui.member;
import java.util.List;
import java.util.Locale;
import org.olat.core.commons.persistence.SortKey;
import org.olat.core.gui.components.form.flexible.impl.elements.table.DefaultFlexiTableDataModel;
import org.olat.core.gui.components.form.flexible.impl.elements.table.FlexiSortableColumnDef;
import org.olat.core.gui.components.form.flexible.impl.elements.table.FlexiTableColumnModel;
import org.olat.core.gui.components.form.flexible.impl.elements.table.SortableFlexiTableDataModel;
import org.olat.core.gui.components.form.flexible.impl.elements.table.SortableFlexiTableModelDelegate;
import org.olat.user.propertyhandlers.UserPropertyHandler;
/**
*
* Initial date: 19 oct. 2020<br>
* @author srosse, <EMAIL>, http://www.frentix.com
*
*/
public class CurriculumMemberListTableModel extends DefaultFlexiTableDataModel<CurriculumMemberRow> implements SortableFlexiTableDataModel<CurriculumMemberRow> {
private static final MemberCols[] COLS = MemberCols.values();
private static final Double ZERO = Double.valueOf(0.0d);
private final Locale locale;
private final List<UserPropertyHandler> userPropertyHandlers;
public CurriculumMemberListTableModel(FlexiTableColumnModel columnModel, List<UserPropertyHandler> userPropertyHandlers, Locale locale) {
super(columnModel);
this.locale = locale;
this.userPropertyHandlers = userPropertyHandlers;
}
@Override
public void sort(SortKey orderBy) {
if(orderBy != null) {
List<CurriculumMemberRow> views = new SortableFlexiTableModelDelegate<>(orderBy, this, locale).sort();
super.setObjects(views);
}
}
@Override
public Object getValueAt(int row, int col) {
CurriculumMemberRow member = getObject(row);
return getValueAt(member, col);
}
@Override
public Object getValueAt(CurriculumMemberRow row, int col) {
if(col < CurriculumMemberListController.USER_PROPS_OFFSET) {
switch(COLS[col]) {
case id: return row.getIdentity().getKey();
case progression: return row.getAverageCompletion();
case firstTime: return row.getFirstTime();
case role: return row.getMembership();
case tools: return row.getToolsLink();
default: return "ERROR";
}
}
int propPos = col - CurriculumMemberListController.USER_PROPS_OFFSET;
return userPropertyHandlers.get(propPos).getUserProperty(row.getIdentity().getUser(), locale);
}
public Double getAvergaeCompletion(CurriculumMemberRow row) {
if(!row.getMembership().isParticipant()) return null;
Double completion = row.getAverageCompletion();
if(completion == null) {
completion = ZERO;
}
return completion;
}
public int indexOf(Long identityKey) {
List<CurriculumMemberRow> rows = getObjects();
for(int i=rows.size(); i-->0; ) {
CurriculumMemberRow row = rows.get(i);
if(row != null && row.getKey().equals(identityKey)) {
return i;
}
}
return -1;
}
public enum MemberCols implements FlexiSortableColumnDef {
id("table.header.id"),
progression("table.header.progress"),
firstTime("table.header.firstTime"),
role("table.header.role"),
tools("table.header.tools");
private final String i18nKey;
private MemberCols(String i18nKey) {
this.i18nKey = i18nKey;
}
@Override
public String i18nHeaderKey() {
return i18nKey;
}
@Override
public boolean sortable() {
return true;
}
@Override
public String sortKey() {
return name();
}
}
}
|
import subprocess
import os
import click
@click.group()
def network():
pass
@click.command()
def ping(host='127.0.0.1'):
response = subprocess.call(['ping', '-c 1', host], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
if response != 0:
print('Unreachable')
os.exit(1)
print('Found')
network.add_command(ping)
|
import {expectType} from 'tsd';
import {htmlEscape, htmlUnescape} from './index.js';
expectType<string>(htmlEscape('🦄 & 🐐'));
expectType<string>(htmlUnescape('🦄 & 🐐'));
expectType<string>(htmlEscape('Hello <em>World</em>'));
const url = 'https://sindresorhus.com?x="🦄"';
expectType<string>(htmlEscape`<a href="${url}">Unicorn</a>`);
const escaped = '🦄 & 🐐';
expectType<string>(htmlUnescape`unicorn and goat: ${escaped}`);
|
<reponame>ch1huizong/learning
#! /usr/bin/env python3
# -*-coding:utf-8 -*-
# @Time : 2019/06/15 19:04:25
# @Author : che
# @Email : <EMAIL>
import os
class Delete(object):
def __init__(self, file):
self.file = file
def interactive(self):
choice = input("Do you really want to delete %s [N]/Y? " % self.file)
if choice.upper():
print("DELETING: %s" % self.file)
status = os.remove(self.file)
else:
print("Skipping: %s" % self.file)
def dryrun(self):
print("Dry Run: %s [NOT DELETED]" % self.file)
def delete(self):
print("DELETING: %s" % self.file)
try:
status = os.remove(self.file)
except Exception as e:
print(err)
if __name__ == "__main__":
from dupe import find_dupes
dupes = find_dupes("tmp")
for d in dupes:
delete = Delete(d)
#delete.dryrun()
#delete.interactive()
delete.delete()
|
<gh_stars>0
_base_ = [
'./pipelines/fixmatch_pipeline.py'
]
__train_pipeline = {{_base_.train_pipeline}}
__train_pipeline_strong = {{_base_.train_pipeline_strong}}
__test_pipeline = {{_base_.test_pipeline}}
seed = 1234
data = dict(
samples_per_gpu=64,
workers_per_gpu=4,
num_classes=100,
train=[
# Labeled Dataset
dict(
type="TVDatasetSplit",
base="CIFAR100",
num_classes=100,
train=True,
data_prefix="data/torchvision/cifar100",
num_images=400,
pipeline=__train_pipeline,
samples_per_gpu=16,
workers_per_gpu=2,
seed=seed,
download=True,
),
# Unlabeled Dataset
dict(
type="TVDatasetSplit",
base="CIFAR100",
num_classes=100,
train=True,
data_prefix="data/torchvision/cifar100",
num_images=-1,
pipeline=dict(
weak=__train_pipeline,
strong=__train_pipeline_strong
),
samples_per_gpu=112,
workers_per_gpu=2,
seed=seed,
download=True,
use_labels=False
)
],
val=dict(
type="TVDatasetSplit",
base="CIFAR100",
num_classes=100,
train=True,
data_prefix="data/torchvision/cifar100",
num_images=10000,
samples_per_gpu=128,
workers_per_gpu=4,
seed=seed,
pipeline=__test_pipeline,
download=True,
),
test=dict(
type="TVDatasetSplit",
base="CIFAR100",
num_classes=100,
train=False,
num_images=-1,
data_prefix="data/torchvision/cifar100",
samples_per_gpu=128,
workers_per_gpu=4,
seed=seed,
pipeline=__test_pipeline,
download=True,
)
)
|
#! /bin/bash
source $(dirname "$0")/Environment.sh
export CC=clang-8
export CXX=clang++-8
# ==============================================================================
# -- Parse arguments -----------------------------------------------------------
# ==============================================================================
DOC_STRING="Build and package CARLA Python API."
USAGE_STRING="Usage: $0 [-h|--help] [--rebuild] [--clean] [--python-version=VERSION]"
REMOVE_INTERMEDIATE=false
BUILD_RSS_VARIANT=false
BUILD_PYTHONAPI=true
OPTS=`getopt -o h --long help,rebuild,clean,rss,python-version:,packages:,clean-intermediate,all,xml, -n 'parse-options' -- "$@"`
if [ $? != 0 ] ; then echo "$USAGE_STRING" ; exit 2 ; fi
eval set -- "$OPTS"
PY_VERSION=3
while [[ $# -gt 0 ]]; do
case "$1" in
--rebuild )
REMOVE_INTERMEDIATE=true;
BUILD_PYTHONAPI=true;
shift ;;
--python-version )
PY_VERSION="$2"
shift 2 ;;
--rss )
BUILD_RSS_VARIANT=true;
shift ;;
--clean )
REMOVE_INTERMEDIATE=true;
BUILD_PYTHONAPI=false;
shift ;;
-h | --help )
echo "$DOC_STRING"
echo "$USAGE_STRING"
exit 1
;;
* )
shift ;;
esac
done
if ! { ${REMOVE_INTERMEDIATE} || ${BUILD_PYTHONAPI} ; }; then
fatal_error "Nothing selected to be done."
fi
pushd "${CARLA_PYTHONAPI_SOURCE_FOLDER}" >/dev/null
# ==============================================================================
# -- Clean intermediate files --------------------------------------------------
# ==============================================================================
if ${REMOVE_INTERMEDIATE} ; then
log "Cleaning intermediate files and folders."
rm -Rf build dist carla.egg-info source/carla.egg-info
find source -name "*.so" -delete
find source -name "__pycache__" -type d -exec rm -r "{}" \;
fi
# ==============================================================================
# -- Build API -----------------------------------------------------------------
# ==============================================================================
if ${BUILD_RSS_VARIANT} ; then
export BUILD_RSS_VARIANT=${BUILD_RSS_VARIANT}
fi
if ${BUILD_PYTHONAPI} ; then
log "Building Python API for Python 3."
/usr/bin/env python${PY_VERSION} setup.py bdist_egg
fi
# ==============================================================================
# -- ...and we are done --------------------------------------------------------
# ==============================================================================
popd >/dev/null
log "Success!"
|
import { useEffect, useState } from 'react'
import { useHistory } from 'react-router-dom'
import Card from '../Card'
import gameService from '../../services/gameService'
import genreService from '../../services/genreService'
import devService from '../../services/devService'
import './List.css'
const List = () => {
const history = useHistory()
const type = history.location.pathname.split('/')[1]
const [search, setSearch] = useState('')
const [items, setItems] = useState([])
useEffect(() => {
const fetchItems = async () => {
let res
if (type === 'games') res = await gameService.getAll()
if (type === 'genres') res = await genreService.getAll()
if (type === 'devs') res = await devService.getAll()
if (res) setItems(res)
}
fetchItems()
}, [type])
const onSearchSubmitHandler = (e) => {
e.preventDefault()
const fetchData = async () => {
let res
if (type === 'games') res = await gameService.getAll(search)
if (type === 'genres') res = await genreService.getAll(search)
if (type === 'devs') res = await devService.getAll(search)
if (res) setItems(res)
}
fetchData()
}
const onSearchChangeHandler = (e) => setSearch(e.target.value)
return (
<section>
<header>
<h1>Search {type}:</h1>
<form onSubmit={onSearchSubmitHandler}>
<input
placeholder={'Search ' + type}
name="searchQuery"
type="text"
value={search}
onChange={onSearchChangeHandler}
/>
</form>
</header>
<article>
{items.map(item => <Card
key={item._id}
id={item._id}
title={item.title}
image={item.image}
type={type}
/>)}
</article>
</section>
)
}
export default List |
<reponame>manueme/manueme-blog<filename>src/app/components/shared/include-header/include-header.component.ts
import { Component, Input, OnInit } from '@angular/core';
@Component({
selector: 'app-include-header',
templateUrl: './include-header.component.html',
styleUrls: ['./include-header.component.scss']
})
export class IncludeHeaderComponent implements OnInit {
@Input() includedFile: string;
constructor() { }
ngOnInit() {
}
}
|
/*
* Copyright (c) 2017 dmfs GmbH
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.smoothsync.smoothsetup.model;
import android.os.Parcel;
import android.os.Parcelable;
import com.smoothsync.api.model.Provider;
/**
* A basic implementation of an {@link Account}.
*
* @author <NAME>
*/
public final class BasicAccount implements Account
{
public final static Creator<BasicAccount> CREATOR = new Creator<BasicAccount>()
{
@Override
public BasicAccount createFromParcel(Parcel source)
{
return new BasicAccount(source.readString(), (Provider) source.readParcelable(getClass().getClassLoader()));
}
@Override
public BasicAccount[] newArray(int size)
{
return new BasicAccount[0];
}
};
private final String mAccountId;
private final Provider mProvider;
public BasicAccount(String accountId, Provider provider)
{
mAccountId = accountId;
mProvider = provider;
}
@Override
public String accountId()
{
return mAccountId;
}
@Override
public Provider provider()
{
return mProvider;
}
@Override
public int describeContents()
{
return 0;
}
@Override
public void writeToParcel(Parcel dest, int flags)
{
dest.writeString(mAccountId);
if (mProvider instanceof Parcelable)
{
dest.writeParcelable((Parcelable) mProvider, flags);
}
else
{
dest.writeParcelable(new ParcelableProvider(mProvider), flags);
}
}
}
|
<filename>lib/controller/handler.go
// Copyright (C) The Arvados Authors. All rights reserved.
//
// SPDX-License-Identifier: AGPL-3.0
package controller
import (
"context"
"errors"
"fmt"
"net/http"
"net/url"
"strings"
"sync"
"time"
"git.arvados.org/arvados.git/lib/controller/api"
"git.arvados.org/arvados.git/lib/controller/federation"
"git.arvados.org/arvados.git/lib/controller/localdb"
"git.arvados.org/arvados.git/lib/controller/railsproxy"
"git.arvados.org/arvados.git/lib/controller/router"
"git.arvados.org/arvados.git/lib/ctrlctx"
"git.arvados.org/arvados.git/sdk/go/arvados"
"git.arvados.org/arvados.git/sdk/go/ctxlog"
"git.arvados.org/arvados.git/sdk/go/health"
"git.arvados.org/arvados.git/sdk/go/httpserver"
"github.com/jmoiron/sqlx"
_ "github.com/lib/pq"
)
type Handler struct {
Cluster *arvados.Cluster
setupOnce sync.Once
handlerStack http.Handler
proxy *proxy
secureClient *http.Client
insecureClient *http.Client
pgdb *sqlx.DB
pgdbMtx sync.Mutex
}
func (h *Handler) ServeHTTP(w http.ResponseWriter, req *http.Request) {
h.setupOnce.Do(h.setup)
if req.Method != "GET" && req.Method != "HEAD" {
// http.ServeMux returns 301 with a cleaned path if
// the incoming request has a double slash. Some
// clients (including the Go standard library) change
// the request method to GET when following a 301
// redirect if the original method was not HEAD
// (RFC7231 6.4.2 specifically allows this in the case
// of POST). Thus "POST //foo" gets misdirected to
// "GET /foo". To avoid this, eliminate double slashes
// before passing the request to ServeMux.
for strings.Contains(req.URL.Path, "//") {
req.URL.Path = strings.Replace(req.URL.Path, "//", "/", -1)
}
}
if h.Cluster.API.RequestTimeout > 0 {
ctx, cancel := context.WithDeadline(req.Context(), time.Now().Add(time.Duration(h.Cluster.API.RequestTimeout)))
req = req.WithContext(ctx)
defer cancel()
}
h.handlerStack.ServeHTTP(w, req)
}
func (h *Handler) CheckHealth() error {
h.setupOnce.Do(h.setup)
_, err := h.db(context.TODO())
if err != nil {
return err
}
_, _, err = railsproxy.FindRailsAPI(h.Cluster)
return err
}
func (h *Handler) Done() <-chan struct{} {
return nil
}
func neverRedirect(*http.Request, []*http.Request) error { return http.ErrUseLastResponse }
func (h *Handler) setup() {
mux := http.NewServeMux()
mux.Handle("/_health/", &health.Handler{
Token: h.Cluster.ManagementToken,
Prefix: "/_health/",
Routes: health.Routes{"ping": func() error { _, err := h.db(context.TODO()); return err }},
})
oidcAuthorizer := localdb.OIDCAccessTokenAuthorizer(h.Cluster, h.db)
rtr := router.New(federation.New(h.Cluster), api.ComposeWrappers(ctrlctx.WrapCallsInTransactions(h.db), oidcAuthorizer.WrapCalls))
mux.Handle("/arvados/v1/config", rtr)
mux.Handle("/"+arvados.EndpointUserAuthenticate.Path, rtr)
if !h.Cluster.ForceLegacyAPI14 {
mux.Handle("/arvados/v1/collections", rtr)
mux.Handle("/arvados/v1/collections/", rtr)
mux.Handle("/arvados/v1/users", rtr)
mux.Handle("/arvados/v1/users/", rtr)
mux.Handle("/login", rtr)
mux.Handle("/logout", rtr)
}
hs := http.NotFoundHandler()
hs = prepend(hs, h.proxyRailsAPI)
hs = h.setupProxyRemoteCluster(hs)
hs = prepend(hs, oidcAuthorizer.Middleware)
mux.Handle("/", hs)
h.handlerStack = mux
sc := *arvados.DefaultSecureClient
sc.CheckRedirect = neverRedirect
h.secureClient = &sc
ic := *arvados.InsecureHTTPClient
ic.CheckRedirect = neverRedirect
h.insecureClient = &ic
h.proxy = &proxy{
Name: "arvados-controller",
}
}
var errDBConnection = errors.New("database connection error")
func (h *Handler) db(ctx context.Context) (*sqlx.DB, error) {
h.pgdbMtx.Lock()
defer h.pgdbMtx.Unlock()
if h.pgdb != nil {
return h.pgdb, nil
}
db, err := sqlx.Open("postgres", h.Cluster.PostgreSQL.Connection.String())
if err != nil {
ctxlog.FromContext(ctx).WithError(err).Error("postgresql connect failed")
return nil, errDBConnection
}
if p := h.Cluster.PostgreSQL.ConnectionPool; p > 0 {
db.SetMaxOpenConns(p)
}
if err := db.Ping(); err != nil {
ctxlog.FromContext(ctx).WithError(err).Error("postgresql connect succeeded but ping failed")
return nil, errDBConnection
}
h.pgdb = db
return db, nil
}
type middlewareFunc func(http.ResponseWriter, *http.Request, http.Handler)
func prepend(next http.Handler, middleware middlewareFunc) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
middleware(w, req, next)
})
}
func (h *Handler) localClusterRequest(req *http.Request) (*http.Response, error) {
urlOut, insecure, err := railsproxy.FindRailsAPI(h.Cluster)
if err != nil {
return nil, err
}
urlOut = &url.URL{
Scheme: urlOut.Scheme,
Host: urlOut.Host,
Path: req.URL.Path,
RawPath: req.URL.RawPath,
RawQuery: req.URL.RawQuery,
}
client := h.secureClient
if insecure {
client = h.insecureClient
}
return h.proxy.Do(req, urlOut, client)
}
func (h *Handler) proxyRailsAPI(w http.ResponseWriter, req *http.Request, next http.Handler) {
resp, err := h.localClusterRequest(req)
n, err := h.proxy.ForwardResponse(w, resp, err)
if err != nil {
httpserver.Logger(req).WithError(err).WithField("bytesCopied", n).Error("error copying response body")
}
}
// Use a localhost entry from Services.RailsAPI.InternalURLs if one is
// present, otherwise choose an arbitrary entry.
func findRailsAPI(cluster *arvados.Cluster) (*url.URL, bool, error) {
var best *url.URL
for target := range cluster.Services.RailsAPI.InternalURLs {
target := url.URL(target)
best = &target
if strings.HasPrefix(target.Host, "localhost:") || strings.HasPrefix(target.Host, "127.0.0.1:") || strings.HasPrefix(target.Host, "[::1]:") {
break
}
}
if best == nil {
return nil, false, fmt.Errorf("Services.RailsAPI.InternalURLs is empty")
}
return best, cluster.TLS.Insecure, nil
}
|
#!/bin/bash
#
# Projeto: Smart-Infra é uma coleção de scripts em Shell Script para rodar em
# Bash facilitando a implantação de infraestrutura de rede em linux.
# Hospedado: https://github.com/sandrodias-sysadmin/smart-infra
# Autor: Sandro Dias
# E-mail: sandrodias.oficiall@gmail.com
#
# Caminho Absoluto: /smart-infra/backup-custom.sh
# Função: Criar backups com o conteúdo de diretórios ignorando seus subdiretórios
# Atualizado em: 07/05/2022
# Versao: 0.7
#
##############################
#
# Rascunho das Fases
# Fase 0 - Cria os parâmetros usados no script
# Fase 1 - Verifica se os parâmetros informados estão corretos para o processo de backup.
# Fase 2 - Verifica a existência de algum backup anterior que não foi retirado do diretório de destino e indica a liberação de espaço em disco.
# Fase 3 - Calcula o espaço bruto dos diretórios antes do backup e testa se há espaço suficiente para realizá-lo.
# Fase 4 - Cria a lista de diretórios(e suas exceções se houver) que serão copiados no processo de backup.
# Fase 5 - Cria o backup e registra as ocorrências em Log para possíveis consultas posteriores.
# Fase 6 - Envia email(s) para comunicar o status final do backup com os logs anexados.
# Fase 7 - Apenas Calcula o tempo de execução do script de backup
# Fase 8 -
# Fase 9 -
# Fase 10 -
#
##############################
#
# Fase 0 - Cria os parâmetros usados no script
#
StartTime=$(date +%s)
today=$(date '+%A')
InverteCorPiscando="\e[5;36;40m"
EndCollor="\e[0m"
if [ ! $# -gt 0 ]; then
clear
echo -e "\n ERRO: Falta de parâmetros.\n Acrescente o -h para ajuda básica do script.\n\n EXEMPLO:\n "\$\:\>" ${InverteCorPiscando}$0 -h${EndCollor}"
exit 1
else
while getopts his:d:e: option; do
case "${option}" in
h)
clear
echo -e "\n DICAS DE USO DO $0."
echo -e "\n Você precisa informar os parâmetros obrigatórios."
echo -e " Para Origem : -s + "/diretorio_origem""
echo -e " Para Destino : -d + "/diretorio_destino""
echo -e " Para Exclusão : -e + "/diretorio_excluido""
echo -e "\n Já os parâmetros opcionais são usados de acordo com o cenário."
echo -e " Para Backup Incremental : -i, sem o -i o Backup será Completo."
echo -e "\nEXEMPLO:\n "\$\:\>" sudo $0 -s /diretorio_origem -d /diretorio_destino -e /diretorio_excluido\n"
exit 0
;;
i)
Incremental="1"
yesterday=$(date '+%A' -d '-1 day')
;;
s)
source=${OPTARG}
listtemp="${source}/temp.txt"
listdir="${source}/diretorios.txt"
;;
d)
destiny=${OPTARG}
;;
e)
exclusion=${OPTARG}
;;
esac
done
fi
#
##############################
#
# Declaração Variáveis
#
UncompressedSize=$(du -sh "${source}" | awk '{print $1}')
AvailableSpace=$(df -h "${destiny}" | awk '{print $4}' | sed 1d)
PartitionSpace=$(df -h "${destiny}" | awk '{print $1}' | sed 1d)
SpaceOldBackup=$(du -sh "${destiny}" | awk '{print $1}')
#
##############################
#
# Declaração Variáveis de Resposta
CheckCRON_CompletedStep=
CheckCRON_FailureStep=
CheckCRON_RunningStep=
CheckBackupOLD_CompletedStep=
CheckBackupOLD_FailureStep=
CheckBackupOLD_RunningStep=
CheckSpace_CompletedStep=
CheckSpace_FailureStep=
CheckSpace_RunningStep=
CreateListDestiny_CompletedStep=
CreateListDestiny_FailureStep=
CreateListDestiny_RunningStep=
CreateBackup_CompletedStep=
CreateBackup_FailureStep=
CreateBackup_RunningStep=
SendEmail_CompletedStep=
SendEmail_FailureStep=
SendEmail_RunningStep=
#
##############################
#
# Funções usadas nas Fases
function_HeaderDefault() {
clear
echo -e "${InverteCorPiscando}\n\n +-----------------------------------------------------------------+\n │░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░│\n │░░░░░░░ A L T A --- = --- S P O R T S ░░░░░░░░│\n │░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░│\n +-----------------------------------------------------------------+\n S M A R T I N F R A"
echo -e " SCRIPT DE BACKUP\n" # Define o Título do Script
}
# Fase 1 - Verifica se os parâmetros informados estão corretos para o processo de backup.
function_CheckParam() {
# +-----------------------------------------------------------------+
CheckParam_Source="Você precisa informar a opção -s seguido de um diretório de origem válido."
CheckParam_Destiny="Você precisa informar a opção -d seguido de um diretório de destino válido."
if [ ! -d "${source}" ]; then
echo " "${CheckParam_Source}""
exit 1
elif
[ ! -d "${destiny}" ]
then
echo " "${CheckParam_Destiny}""
exit 1
else
echo > /dev/null
fi
}
# Fase 2 - Calcula o espaço bruto dos diretórios antes do backup e testa se há espaço suficiente para realizá-lo.
function_CheckSpace() {
echo -e " AVALIAÇÃO DO ESPAÇO EM DISCO"
echo -e " +-----------------------------------------------------------------+"
sleep 0.1
echo -e " Dir.Origem : "${source}""
sleep 0.1
echo -e " Esp. Bruto : "${UncompressedSize}""
sleep 0.1
echo -e " Dir.Destin : "${destiny}""
sleep 0.1
echo -e " Disc.Parti : "${PartitionSpace}""
sleep 0.1
echo -e " Esp.Dispon : "${AvailableSpace}"\n\n"
}
# Fase 2 - Verifica a existência de algum backup anterior que não foi retirado do diretório de destino e indica a liberação de espaço em disco.
function_CheckBackupOLD() {
# +-----------------------------------------------------------------+
CheckBackupOLD=$(ls "${destiny}" | wc -l)
if [ ! "${CheckBackupOLD}" -eq 0 ]; then
sleep 0.1
echo -e " DICA DE OTIMIZAÇÃO DO ESPAÇO EM DISCO"
echo -e " +-----------------------------------------------------------------+"
sleep 0.1
echo -e " Percebi que ainda existem arquivos em disco do Backup anterior."
echo -e " Mova ou Apague esses arquivos para liberar até "${SpaceOldBackup}" de espaço.\n\n"
else
echo > /dev/null
fi
}
# Fase 4 - Cria a lista de diretórios(e suas exceções se houver) que serão copiados no processo de backup.
function_CreateListDestiny() {
# +-----------------------------------------------------------------+
sleep 0.1
echo -e " ANÁLISE DO DIRETÓRIO DE ORIGEM DO BACKUP"
echo -e " +-----------------------------------------------------------------+"
sleep 0.1
cd $source
du -h >$listtemp
sed -i 's/\ /§/g' $listtemp
tac $listtemp | awk '{print $2}' | sed 's/\.\///g' > $listdir
sed -i 1d $listdir
sed -i 's/§/\ /g' $listdir
sed -i '/^'${exclusion:=Omitido}'/d' $listdir
rm -rf $listtemp
echo -e " Criada a lista de diretórios que serão incluídos no backup.\n\n"
}
# Fase 5 - Cria o backup e registra as ocorrências em Log para possíveis consultas posteriores.
function_CreateBackup() {
# +-----------------------------------------------------------------+
sleep 0.1
echo -e " PREPARAÇÃO DO DIRETÓRIO DE DESTINO DO BACKUP"
echo -e " +-----------------------------------------------------------------+"
sleep 0.1
echo -e " Criando Backup...\n Diretório de Origem : "${source}"\n Diretório de Destino : "${destiny}"\n Diretório Excluído : "${exclusion:=Omitido}""
date_backup=$(date +-%d%h%y)
if [ ! $Incremental -eq 1 ]; then
while IFS= read -r diretorios || [ -n "${diretorios}" ]; do
mkdir -p "${destiny}/${today^}/${diretorios}"
chmod 777 "${destiny}/${today^}/${diretorios}"
name_backupC=$(
echo "$diretorios" >pwd.txt
sed -i 's/\// /g' pwd.txt
cat pwd.txt | awk '{ printf $(NF)}'
)
rm -rf pwd.txt
cd "${source}/${diretorios}"
find *.* -type f -print0 | xargs -0 tar -czf "${source}"/"${diretorios}"/"${name_backupC}"-Completo-"${date_backup}".tar.gz --no-recursion
mv "${source}"/"${diretorios}"/"${name_backupC}"-Completo-"${date_backup}".tar.gz "${destiny}/${today^}/${diretorios}"
done <$listdir
else
while IFS= read -r diretorios || [ -n "$diretorios" ]; do
mkdir -p "${destiny}/${yesterday^}/${diretorios}"
chmod 777 "${destiny}/${yesterday^}/${diretorios}"
name_backupI=$(
echo "$diretorios" >pwd.txt
sed -i 's/\// /g' pwd.txt
cat pwd.txt | awk '{ printf $(NF)}'
)
rm -rf pwd.txt
cd "${source}/${diretorios}"
find *.* -mtime -1 -ls f -print0 | xargs -0 tar -czf "${destiny}/${yesterday^}/${diretorios}/${name_backupI}"-Incremental-"{$date_backup}".tar.gz --no-recursion
done <$listdir
fi
rm -rf $listdir
SizeBackup=$(du -sh "${destiny}" | awk '{print $1}')
EndTime=$(date +%s)
CalcTime=$(expr $EndTime - $StartTime)
ResultTime=$(expr 10800 + $CalcTime)
TotalTime=`date -d @$ResultTime +%H"Hrs "%M"Min "%S"Seg"`
echo -e " Backup concluído após $TotalTime gerando $SizeBackup de dados.\n\n" | sed 's/00Hrs //;s/00Min //'
}
# Fase 6 - Envia email(s) para comunicar o status final do backup com os logs anexados.
function_SendEmail() {
# +-----------------------------------------------------------------+
sleep 0.1
echo -e " RELATÓRIO DE TODO O PROCESSO DE BACKUP"
echo -e "+-----------------------------------------------------------------+"
echo -e " Enviando email com os LOGs do Backup em anexo...\n"
sendemail -f infra-ti@altasports.com.br \
-t ti3@altasports.com.br \
-s email-ssl.com.br:587 \
-u "Alerta de Backup" \
-m "O $0 realizou backup de "${source}" para "${destiny}" e precisa ser retirado do servidor para liberar espaco em disco!" \
-xu ti3@altasports.com.br \
-xp '!Q2w#E4r' \
-o tls=yes
-o message-charset=UTF-8
#sleep 3
}
# Fase 7 -
# Fase 8 -
# Fase 9 -
# Fase 10 -
#
##############################
#
function_HeaderDefault
#function_CheckParam
#function_CheckSpace
#function_CheckBackupOLD
#function_CreateListDestiny
#function_CreateBackup 2>/dev/null
#function_SendEmail | sed 's/^/\ \ \ \ \ /g'
|
a=$'
test0
test1'
echo "$a" >> 1
|
#!/bin/bash
cat - <<FIN
#
# ▄▀▀▄ ▄▀▀▄
# █ █ █
# ▐ █ █
# █ █ buntu
# ▀▄▄▄▄▀
#
FIN
# Start with new file log
cd /vagrant/log
rm * >> /vagrant/log/ubuntu.log 2>&1
# Updating packages
apt-get update >> /vagrant/log/ubuntu.log 2>&1
sudo apt-get install lsb-core -y >> /vagrant/log/ubuntu.log 2>&1
echo -e "$(lsb_release -a)"
cat - <<INFO
#
Ubuntu updated
lsb-core installed
#
INFO |
<gh_stars>0
import java.util.ArrayList;
/**
* Compressor, like Talker, is a class that helps the user edit their file.
* Compressor specifically edits .cmp files. It inherits Processor due to their
* many shared fields and methods. Compressor first determines the command and
* does the next step accordingly.
*
* Test: ">g" creates an empty file called Untitled.cmp Untitled.cmp 0 bytes
* Test: largerThanBefore.cmp 246 bytes tests how a compressed file can be larger than a txt file
* @author <NAME> ssf2130 COMS1007
*
*/
public class Compressor extends Processor {
private Boolean isEmpty = true;
/**
* After the user enters input, this method acts depending on the input.
*/
public void followCommand() {
char command = input.charAt(0);
String text = "Untitled";
if (input.length() > 1)
text = input.substring(2, input.length());
if (command == 'g') {
fileName = text + ".cmp";
getFileFromDirectory(fileName);
if (lineList.size() > 0)
isEmpty = false;
recordDictionaryKeyAndValue();
createCompressedList();
} else if (command == 'p') {
decompressList();
printEntireFile();
} else if (command == 'c') {
decompressList();
printCurrentLine();
} else if (command == 'i')
insertAfterLine(text);
else if (command == 'd')
if (counter > -1)
deleteCurrentLine();
else
System.out.println("Out of lines to delete!");
else if (command == 'r')
replaceCurrentLine(text);
else if (command == 't')
goToTopLine();
else if (command == 'v')
goDownOneLine();
else if (command == 's') {
fileName = text + ".cmp";
lineList = new ArrayList<String>();
String key = "";
for (String word : valueList) {
key += word;
key += " ";
}
lineList.add(key);
lineList.addAll(compressedList);
setFileToDirectory();
} else if (command == 'q') {
System.out.println("Editor stopped");
System.exit(0);
} else if (command == 'w') {
replaceWord(text);
}
}
/**
* Creates a list of compressed lines (without the key)
*/
private void createCompressedList() {
compressedList = new ArrayList<String>();
for (String line : lineList)
compressedList.add(line);
compressedList.remove(0);
}
/**
* Decompresses the compressed list by using the key and substituting all
* numbers to words
*/
private void decompressList() {
decompressedList = new ArrayList<String>();
for (String line : compressedList) {
for (Integer i = 0; i < valueList.size(); i++)
line = line.replace(i.toString(), valueList.get(i));
decompressedList.add(line);
}
}
/**
* Records a dictionary by taking in the first line of the compressed file. If
* the file is empty, then adds empty objects to the file to prevent errors.
*/
private void recordDictionaryKeyAndValue() {
if (!isEmpty) {
String key = lineList.get(0);
String[] wordList = key.split(" ");
valueList = new ArrayList<String>();
for (int i = 0; i < wordList.length; i++) {
valueList.add(wordList[i]);
}
} else {
lineList.add(null);
valueList.add(null);
}
}
/**
* Prints every line in the decompressed list
*/
public void printEntireFile() {
for (String line : decompressedList) {
System.out.println(line);
}
}
/**
* Print the current line
*/
public void printCurrentLine() {
System.out.println(decompressedList.get(counter));
}
/**
* Inserts the user's text into compressedList at the current line after
* replacing text with numbers. Any new words are added to the dictionary. If
* the file is empty then there is precautionary measures taken to prevent
* errors.
*
* @param text
* is what the user wants to insert
*/
public void insertAfterLine(String text) {
String[] textWordList = text.split(" ");
String valueLine = lineList.get(0);
String line = text;
if (isEmpty) {
lineList.set(0, text);
valueList.remove(0);
}
for (int i = 0; i < textWordList.length; i++)
if (isEmpty || valueLine.indexOf(textWordList[i]) == -1) { // if the word does not exist in the
// original dictionary
valueList.add(textWordList[i]);
}
for (Integer i = 0; i < valueList.size(); i++) {
line = line.replace(valueList.get(i), i.toString());
}
if (compressedList.size() == (counter))
compressedList.add(line);
else
compressedList.add(counter + 1, line);
isEmpty = false;
}
/**
* Deletes the current line. If there are no lines it marks isEmpty true.
*/
public void deleteCurrentLine() {
compressedList.remove(counter);
if (counter == compressedList.size())
counter--;
if (compressedList.size() == 0)
isEmpty = true;
}
/**
* Replaces the current line with text provided by the user
*
* @param text
* is what the user wants to replace with
*/
public void replaceCurrentLine(String text) {
compressedList.remove(counter);
insertAfterLine(text);
}
/**
* Increases the counter, unless it is at the end of the file. Then it tells the
* user.
*/
public void goDownOneLine() {
if (counter + 1 < decompressedList.size()) {
counter++;
} else
System.out.println("You are at the end of the file");
}
/**
* Replaces one word with either another or blank space. If the new word does
* not exist it is added to the dictionary. The compressor's replaceWord is
* harder to code than that of the Talker's due to the interaction with the
* dictionary and compressed format in general. When the user
* replaces the word with nothing, there is extra space around where the word
* used to be.
*
* @param text
* is the original word plus the replacement word with space in
* between, or it is only the original word
*/
public void replaceWord(String text) {
String originalWord;
String replacementWord;
String[] wordList = text.split(" ");
originalWord = wordList[0];
if (wordList.length == 2)
replacementWord = wordList[1];
else
replacementWord = "";
for (int i = 0; i < compressedList.size(); i++) {
String line = compressedList.get(i);
int originalWordValue = valueList.indexOf(originalWord);
int replacementWordValue = valueList.indexOf(replacementWord);
if (replacementWordValue == -1) {
valueList.add(replacementWord);
replacementWordValue = valueList.size() - 1;
}
line = line.replaceAll(Integer.toString(originalWordValue), Integer.toString(replacementWordValue));
compressedList.set(i, line);
}
}
}
|
declare interface Utils {
sheet_to_json: (input: any) => any
}
//declare class ReactPivot extends React.Component<any, any> {}
declare module 'xlsx' {
export function read(data: string, read_opts?: any): any;
export function readFile(filename: string, read_opts?: any): any;
export const utils: Utils;
//export = any;
//export default ReactPivot;
}
|
package main
import (
"context"
"net/http"
"os"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
restclient "k8s.io/client-go/rest"
v1alpha1 "sigs.k8s.io/mcs-api/pkg/apis/v1alpha1"
mcsClientset "sigs.k8s.io/mcs-api/pkg/client/clientset/versioned"
"github.com/labstack/echo/v4"
"github.com/labstack/echo/v4/middleware"
"github.com/labstack/gommon/log"
)
func prepareClient() (mcsClientset.Interface, error) {
config, err := restclient.InClusterConfig()
if err != nil {
return nil, err
}
return mcsClientset.NewForConfig(config)
}
func getAllImportedServicesIn(namespace string) (*v1alpha1.ServiceImportList, error) {
clientSet, err := prepareClient()
if err != nil {
return nil, err
}
return clientSet.MulticlusterV1alpha1().ServiceImports(namespace).List(context.TODO(), metav1.ListOptions{})
}
func main() {
e := echo.New()
e.Use(middleware.Logger())
e.Use(middleware.Recover())
e.GET("/", func(c echo.Context) error {
log.Info("1")
imports, err := getAllImportedServicesIn(metav1.NamespaceAll)
log.Info("2")
if err != nil {
log.Info("3")
return echo.NewHTTPError(http.StatusInternalServerError, err)
}
log.Info("4")
return c.JSON(http.StatusOK, imports)
})
e.GET("/:namespace", func(c echo.Context) error {
log.Info("1")
imports, err := getAllImportedServicesIn(c.Param("namespace"))
log.Info("2")
if err != nil {
log.Info("3")
return echo.NewHTTPError(http.StatusInternalServerError, err)
}
log.Info("4")
return c.JSON(http.StatusOK, imports)
})
httpPort := os.Getenv("SERVER_PORT")
e.Logger.Fatal(e.Start(":" + httpPort))
}
|
package io.github.ibuildthecloud.gdapi.request.handler;
import io.github.ibuildthecloud.gdapi.request.ApiRequest;
import java.io.IOException;
import javax.servlet.ServletException;
public interface ApiRequestHandler {
void handle(ApiRequest request) throws IOException;
default boolean handleException(ApiRequest request, Throwable e) throws IOException, ServletException {
return false;
}
}
|
import tensorflow as tf
# create a constant array with 3 values
x = tf.constant([1.0, -2.0, 3.0])
# apply the sigmoid function to the array
sigmoid = tf.sigmoid(x)
# print the result
print(sigmoid.numpy()) # [0.7310586, 0.11920292, 0.95257413] |
<reponame>ch1huizong/learning
#! /usr/bin/env python3
# -*-coding:UTF-8 -*-
# @Time : 2019/01/05 11:31:20
# @Author : che
# @Email : <EMAIL>
import time
class Timer(object):
def __init__(self, func=time.perf_counter):
self.elapsed = 0.0
self._func = func
self._start = None
def start(self):
if self._start is not None:
raise RuntimeError('Already started')
self._start = self._func()
def stop(self):
if self._start is None:
raise RuntimeError('Not started')
end = self._func()
self.elapsed += end - self._start # 总耗时
self._start = None
def reset(self):
self.elapsed = 0.0
@property
def running(self):
return self._start is not None
def __enter__(self):
self.start()
return self
def __exit__(self, *args):
self.stop()
def countdown(n):
while n > 0:
n -= 1
if __name__ == '__main__':
with Timer() as t2:
countdown(100000000)
print(t2.elapsed)
|
<reponame>TIWG/imce.oti.uml.magicdraw.dynamicscripts
/*
* Copyright 2016 California Institute of Technology ("Caltech").
* U.S. Government sponsorship acknowledged.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* License Terms
*/
package gov.nasa.jpl.imce.oti.magicdraw.dynamicScripts.ui
import java.lang.IllegalArgumentException
import gov.nasa.jpl.dynamicScripts.DynamicScriptsTypes
import gov.nasa.jpl.dynamicScripts.magicdraw.ui.nodes._
import gov.nasa.jpl.dynamicScripts.magicdraw.ui.tables._
import gov.nasa.jpl.dynamicScripts.magicdraw.utils._
import org.omg.oti.magicdraw.uml.read._
import org.omg.oti.json.common.OTIPrimitiveTypes._
import org.omg.oti.uml.read.UMLStereotypeTagValue
import org.omg.oti.uml.read.api._
import scala.Predef.ArrowAssoc
import scala.collection.immutable._
import scala.reflect.ClassTag
import scala.util.{Failure, Success, Try}
import scala.{None,Some,StringContext}
import scala.Predef.{???,require,String}
object AppliedStereotypeWidgetHelper {
def makeComputedDerivedTreeForAppliedStereotype(derived: DynamicScriptsTypes.ComputedDerivedWidget): DynamicScriptsTypes.ComputedDerivedTree =
DynamicScriptsTypes.ComputedDerivedTree(
derived.name, derived.icon, derived.context, derived.access,
derived.className, derived.methodName, derived.refresh,
columnValueTypes = Some(Seq(
DynamicScriptsTypes.DerivedFeatureValueType(
key = DynamicScriptsTypes.SName("extended element"),
typeName = DynamicScriptsTypes.HName("Element"),
typeInfo = DynamicScriptsTypes.StringTypeDesignation()),
DynamicScriptsTypes.DerivedFeatureValueType(
key = DynamicScriptsTypes.SName("applied stereotype"),
typeName = DynamicScriptsTypes.HName("Stereotype"),
typeInfo = DynamicScriptsTypes.StringTypeDesignation()),
DynamicScriptsTypes.DerivedFeatureValueType(
key = DynamicScriptsTypes.SName("tag property"),
typeName = DynamicScriptsTypes.HName("Property"),
typeInfo = DynamicScriptsTypes.StringTypeDesignation()),
DynamicScriptsTypes.DerivedFeatureValueType(
key = DynamicScriptsTypes.SName("tag type"),
typeName = DynamicScriptsTypes.HName("Type"),
typeInfo = DynamicScriptsTypes.StringTypeDesignation()),
DynamicScriptsTypes.DerivedFeatureValueType(
key = DynamicScriptsTypes.SName("tag value"),
typeName = DynamicScriptsTypes.HName("tag value"),
typeInfo = DynamicScriptsTypes.StringTypeDesignation()))))
def createRowForTagValue
(tagValue: UMLStereotypeTagValue[MagicDrawUML])
(implicit umlUtil: MagicDrawUMLUtil)
: Map[String, AbstractTreeNodeInfo] = {
tagValue match {
case tv: MagicDrawUMLStereotypeTagExtendedMetaclassPropertyElementReference =>
Map(
"extended element" ->
(tv.extendedElement match {
case ne: MagicDrawUMLNamedElement if ne.qualifiedName.isDefined =>
ReferenceNodeInfo(ne.qualifiedName.get, ne.getMagicDrawElement)
case _ =>
ReferenceNodeInfo(
TOOL_SPECIFIC_ID.unwrap(tv.extendedElement.toolSpecific_id),
tv.extendedElement.getMagicDrawElement)
}),
"applied stereotype" ->
ReferenceNodeInfo(tv.appliedStereotype.name.get, tv.appliedStereotype.getMagicDrawElement),
"tag property" ->
ReferenceNodeInfo(tv.stereotypeTagProperty.name.get, tv.stereotypeTagProperty.getMagicDrawElement),
"tag type" ->
ReferenceNodeInfo(tv.stereotypeTagPropertyType.name.get, tv.stereotypeTagPropertyType.getMagicDrawElement),
"tag value" ->
(tv.extendedElement match {
case ne: MagicDrawUMLNamedElement if ne.qualifiedName.isDefined =>
ReferenceNodeInfo(ne.qualifiedName.get, ne.getMagicDrawElement)
case _ =>
ReferenceNodeInfo(
TOOL_SPECIFIC_ID.unwrap(tv.extendedElement.toolSpecific_id),
tv.extendedElement.getMagicDrawElement)
})
)
case tv: MagicDrawUMLStereotypeTagPropertyMetaclassElementReference =>
Map(
"extended element" ->
(tv.extendedElement match {
case ne: MagicDrawUMLNamedElement if ne.qualifiedName.isDefined =>
ReferenceNodeInfo(ne.name.get, ne.getMagicDrawElement)
case _ =>
ReferenceNodeInfo(
TOOL_SPECIFIC_ID.unwrap(tv.extendedElement.toolSpecific_id),
tv.extendedElement.getMagicDrawElement)
}),
"applied stereotype" ->
ReferenceNodeInfo(tv.appliedStereotype.name.get, tv.appliedStereotype.getMagicDrawElement),
"tag property" ->
ReferenceNodeInfo(tv.stereotypeTagProperty.name.get, tv.stereotypeTagProperty.getMagicDrawElement),
"tag type" ->
ReferenceNodeInfo(tv.stereotypeTagPropertyType.name.get, tv.stereotypeTagPropertyType.getMagicDrawElement),
"tag value" ->
(tv.tagPropertyValueElementReferences.head match {
case ne: MagicDrawUMLNamedElement if ne.qualifiedName.isDefined =>
ReferenceNodeInfo(ne.qualifiedName.get, ne.getMagicDrawElement)
case e =>
ReferenceNodeInfo(
TOOL_SPECIFIC_ID.unwrap(e.toolSpecific_id),
e.getMagicDrawElement)
})
// TreeNodeInfo(
// identifier = "values",
// nested = tv.tagPropertyValueElementReferences.toSeq map { e => e match {
// case ne: MagicDrawUMLNamedElement if ne.qualifiedName.isDefined =>
// Tuple2(ReferenceNodeInfo(ne.qualifiedName.get, e.getMagicDrawElement), Map[String, AbstractTreeNodeInfo]())
// case _ =>
// Tuple2(ReferenceNodeInfo(e.xmiID.head, e.getMagicDrawElement), Map[String, AbstractTreeNodeInfo]())
// }
// })
)
case tv: MagicDrawUMLStereotypeTagStereotypeInstanceValue =>
Map(
"extended element" ->
(tv.extendedElement match {
case ne: MagicDrawUMLNamedElement if ne.qualifiedName.isDefined =>
ReferenceNodeInfo(ne.name.get, ne.getMagicDrawElement)
case _ =>
toToolSpecificIDReferenceNodeInfo(tv.extendedElement)
}),
"applied stereotype" ->
ReferenceNodeInfo(tv.appliedStereotype.name.get, tv.appliedStereotype.getMagicDrawElement),
"tag property" ->
ReferenceNodeInfo(tv.stereotypeTagProperty.name.get, tv.stereotypeTagProperty.getMagicDrawElement),
"tag type" ->
ReferenceNodeInfo(tv.stereotypeTagPropertyType.name.get, tv.stereotypeTagPropertyType.getMagicDrawElement),
"tag value" ->
(tv.tagPropertyValueElementReferences.head match {
case ne: MagicDrawUMLNamedElement if ne.qualifiedName.isDefined =>
ReferenceNodeInfo(ne.qualifiedName.get, ne.getMagicDrawElement)
case e =>
toToolSpecificIDReferenceNodeInfo(e)
})
// TreeNodeInfo(
// identifier = "values",
// nested = tv.tagPropertyValueElementReferences.toSeq map { e => e match {
// case ne: MagicDrawUMLNamedElement if ne.qualifiedName.isDefined =>
// Tuple2(ReferenceNodeInfo(ne.qualifiedName.get, e.getMagicDrawElement), Map[String, AbstractTreeNodeInfo]())
// case _ =>
// Tuple2(ReferenceNodeInfo(e.xmiID.head, e.getMagicDrawElement), Map[String, AbstractTreeNodeInfo]())
// }
// })
)
case tv: MagicDrawUMLStereotypeTagPropertyClassifierValue =>
Map(
"extended element" ->
(tv.extendedElement match {
case ne: MagicDrawUMLNamedElement if ne.qualifiedName.isDefined =>
ReferenceNodeInfo(ne.name.get, ne.getMagicDrawElement)
case _ =>
toToolSpecificIDReferenceNodeInfo(tv.extendedElement)
}),
"applied stereotype" ->
ReferenceNodeInfo(tv.appliedStereotype.name.get, tv.appliedStereotype.getMagicDrawElement),
"tag property" ->
ReferenceNodeInfo(tv.stereotypeTagProperty.name.get, tv.stereotypeTagProperty.getMagicDrawElement),
"tag type" ->
ReferenceNodeInfo(tv.stereotypeTagPropertyType.name.get, tv.stereotypeTagPropertyType.getMagicDrawElement),
"tag value" ->
(tv.values.headOption match {
case None =>
LabelNodeInfo("<no value>")
case Some(tagValue) =>
tagValue match {
case _v: MagicDrawTagPropertyEnumerationLiteralValue =>
ReferenceNodeInfo(_v.value.name.get, _v.value.getMagicDrawEnumerationLiteral)
case _v: MagicDrawTagPropertyInstanceSpecificationValue =>
ReferenceNodeInfo(_v.value.name.get, _v.value.getMagicDrawInstanceSpecification)
case _v: MagicDrawTagPropertyBooleanValue =>
LabelNodeInfo(_v.value.toString)
case _v: MagicDrawTagPropertyIntegerValue =>
LabelNodeInfo(_v.value.toString)
case _v: MagicDrawTagPropertyUnlimitedNaturalValue =>
LabelNodeInfo(_v.value.toString)
case _v: MagicDrawTagPropertyRealValue =>
LabelNodeInfo(_v.value.toString)
case _v: MagicDrawTagPropertyStringValue =>
LabelNodeInfo(_v.value.toString)
case _ =>
???
}
})
// TreeNodeInfo(
// identifier = "literals",
// nested = tv.tagPropertyValueElementReferences.toSeq map { lit =>
// Tuple2(ReferenceNodeInfo(lit.qualifiedName.get, lit.getMagicDrawElement), Map[String, AbstractTreeNodeInfo]())
// })
)
case _ =>
Map()
}
}
def createGroupTableUIPanelForElements
(derived: DynamicScriptsTypes.ComputedDerivedWidget,
tagValues: Seq[UMLStereotypeTagValue[MagicDrawUML]])
(implicit util: MagicDrawUMLUtil)
: Try[(java.awt.Component, Seq[ValidationAnnotation])] = {
val rows: Seq[Map[String, AbstractTreeNodeInfo]] = tagValues.map( createRowForTagValue )
val ui = GroupTableNodeUI(
makeComputedDerivedTreeForAppliedStereotype(derived),
rows,
Seq("extended element", "applied stereotype", "tag property", "tag type", "tag value"))
//ui._table.addMouseListener( DoubleClickMouseListener.createAbstractTreeNodeInfoDoubleClickMouseListener( ui._table ) )
HyperlinkTableCellValueEditorRenderer.addRenderer4AbstractTreeNodeInfo(ui._table)
val validationAnnotations = rows flatMap
(_.values) flatMap
AbstractTreeNodeInfo.collectAnnotationsRecursively
Success((ui.panel, validationAnnotations))
}
def appliedStereotypeInstanceWidget[U <: UMLElement[MagicDrawUML]]
(derived: DynamicScriptsTypes.ComputedDerivedWidget,
mdE: MagicDrawUML#Element,
util: MagicDrawUMLUtil)
(implicit uTag: ClassTag[U])
: Try[(java.awt.Component, Seq[ValidationAnnotation])]
= {
val e = util.umlElement(mdE)
val uClass = uTag.runtimeClass
require(uClass != null)
if (uClass.isInstance(e))
createGroupTableUIPanelForElements(derived, e.tagValues.getOrElse(Seq()))(util)
else
Failure(new IllegalArgumentException(s"${mdE.getHumanType}: ${mdE.getID} is not a kind of ${uClass.getName}"))
}
} |
def handler(event, context):
print "==> Function ready to listen events..."
util_data = False
try:
if 'op' in event['data']['payload']:
util_data = True
except:
util_data = False
if util_data == True:
# CREATE operation
print("CREATE operation executed")
# Handle other types of operations here
else:
print("Unknown operation") |
#!/usr/bin/env bash
set -e
source .env
if [[ -z "${XILUTION_ENVIRONMENT}" ]]; then
echo "XILUTION_ENVIRONMENT not found in .env"
exit 1
fi
if [[ -z "${XILUTION_SUB_ORGANIZATION_ID}" ]]; then
echo "XILUTION_SUB_ORGANIZATION_ID not found in .env"
exit 1
fi
if [[ -z "${XILUTION_WEB_CLIENT_ID}" ]]; then
echo "XILUTION_WEB_CLIENT_ID not found in .env"
exit 1
fi
if [[ -z "${API_BASE_URL}" ]]; then
echo "API_BASE_URL not found in .env"
exit 1
fi
webpack-cli \
--env.XILUTION_ENVIRONMENT="${XILUTION_ENVIRONMENT}" \
--env.XILUTION_SUB_ORGANIZATION_ID="${XILUTION_SUB_ORGANIZATION_ID}" \
--env.XILUTION_WEB_CLIENT_ID="${XILUTION_WEB_CLIENT_ID}" \
--env.API_BASE_URL="${API_BASE_URL}" \
--mode production \
--config webpack.config.js
|
#!/bin/bash
set -e
./configure --prefix=/usr \
--host=$LFS_TGT \
--bindir=/bin
make && make DESTDIR=$LFS install |
#!/usr/bin/env bash
export RUN_NAME=HLTRI_RERANK_RQE_25
export EXP_RUN_NAME=HLTRI_RERANK_RQE_21
export SEARCH_RUN=passage-large
export RERANK_RUN_NAME=HLTRI_RERANK_15
export DATASET=expert
export COLLECTION=epic_qa_prelim
export RGQE_THRESHOLD=0.8
export RQE_THRESHOLD=0.005
export RERANK_MODEL_NAME=rerank-expert-${SEARCH_RUN}-${RERANK_RUN_NAME}
export EXP_MODEL_NAME=docT5query-base
export RQE_MODEL_NAME=quora-seq-nboost-pt-bert-base-uncased-msmarco
export EXP_PRE_MODEL_NAME=models/docT5query-base/model.ckpt-1004000
export QUERY_PATH=data/${COLLECTION}/${DATASET}/questions.json
export LABEL_PATH=data/${COLLECTION}/${DATASET}/split/val.json
export COLLECTION_PATH=data/${COLLECTION}/${DATASET}/data
export SEARCH_PATH=models/${RERANK_MODEL_NAME}/${RERANK_RUN_NAME}.txt
export ANSWERS_PATH=models/${RERANK_MODEL_NAME}/${RERANK_RUN_NAME}.answers
export EXP_PATH=models/${EXP_MODEL_NAME}/${EXP_RUN_NAME}.exp
export RGQE_SELF_PATH=models/${RQE_MODEL_NAME}/${RUN_NAME}.rgqe_self
export RGQE_CC_PATH=models/${RQE_MODEL_NAME}/${RUN_NAME}.rgqe_cc
export RGQE_QUESTION_PATH=models/${RQE_MODEL_NAME}/${RUN_NAME}.rgqe_question
export RGQE_TOP_PATH=models/${RQE_MODEL_NAME}/${RUN_NAME}.rgqe_top
export RGQE_TOP_CC_PATH=models/${RQE_MODEL_NAME}/${RUN_NAME}.rgqe_top_cc
export RGQE_ALL_PATH=models/${RQE_MODEL_NAME}/${RUN_NAME}.rgqe_all
export RGQE_ALL_CC_SCORED_PATH=models/${RQE_MODEL_NAME}/${RUN_NAME}.rgqe_all_cc_scored
export RUN_PATH=models/${RQE_MODEL_NAME}/${RUN_NAME}.txt
export EVAL_PATH=models/${RQE_MODEL_NAME}/${RUN_NAME}.eval
#python -m rerank.extract_answers \
# --search_path ${SEARCH_PATH} \
# --collection_path ${COLLECTION_PATH} \
# --output_path ${ANSWERS_PATH}
# create expanded questions for every answer
#python expand_query./expand.py \
# --input_path ${SEARCH_PATH} \
# --collection_path ${COLLECTION_PATH} \
# --pre_model_name ${EXP_PRE_MODEL_NAME} \
# --model_name ${EXP_MODEL_NAME} \
# --top_k 20 \
# --num_samples 20 \
# --batch_size 16 \
#; \
#python -m expand_query.format_expand \
# --model_path models/${EXP_MODEL_NAME} \
# --output_path ${EXP_PATH}
# self entailment
python -m rgqe.rgqe \
--input_path ${EXP_PATH} \
--model_name models/${RQE_MODEL_NAME} \
--max_seq_len 64 \
--mode self \
; \
python -m rgqe.format_rgqe_self \
--model_path models/${RQE_MODEL_NAME} \
--output_path ${RGQE_SELF_PATH} \
; \
python -m rgqe.rgqe_self_components \
--input_path ${RGQE_SELF_PATH} \
--expand_path ${EXP_PATH} \
--output_path ${RGQE_CC_PATH} \
--threshold ${RGQE_THRESHOLD}
# query-question entailment to filter out bad generated questions
python -m rgqe.rgqe \
--input_path ${RGQE_CC_PATH} \
--search_path ${SEARCH_PATH} \
--query_path ${QUERY_PATH} \
--label_path ${LABEL_PATH} \
--model_name models/${RQE_MODEL_NAME} \
--max_seq_len 64 \
--mode question \
; \
python -m rgqe.format_rgqe_question \
--model_path models/${RQE_MODEL_NAME} \
--output_path ${RGQE_QUESTION_PATH}
# top_k set entailment
python -m rgqe.rgqe \
--input_path ${RGQE_CC_PATH} \
--qe_path ${RGQE_QUESTION_PATH} \
--search_path ${SEARCH_PATH} \
--model_name models/${RQE_MODEL_NAME} \
--max_seq_len 64 \
--mode top \
--top_k 100 \
--threshold ${RQE_THRESHOLD} \
; \
python -m rgqe.format_rgqe_top \
--model_path models/${RQE_MODEL_NAME} \
--output_path ${RGQE_TOP_PATH} \
; \
python -m rgqe.rgqe_top_components \
--input_path ${RGQE_TOP_PATH} \
--cc_path ${RGQE_CC_PATH} \
--output_path ${RGQE_TOP_CC_PATH} \
--threshold ${RGQE_THRESHOLD}
# all entailment against sets
python -m rgqe.rgqe \
--input_path ${RGQE_TOP_CC_PATH} \
--cc_path ${RGQE_CC_PATH} \
--qe_path ${RGQE_QUESTION_PATH} \
--model_name models/${RQE_MODEL_NAME} \
--max_seq_len 64 \
--mode all \
; \
python -m rgqe.format_rgqe_all \
--model_path models/${RQE_MODEL_NAME} \
--output_path ${RGQE_ALL_PATH}
python -m rgqe.rgqe_all_components \
--input_path ${RGQE_ALL_PATH} \
--cc_path ${RGQE_TOP_CC_PATH} \
--answers_path ${ANSWERS_PATH} \
--queries_path ${QUERY_PATH} \
--output_path ${RGQE_ALL_CC_SCORED_PATH} \
--threshold ${RGQE_THRESHOLD} \
--ratio 0.8 \
; \
python -m rgqe.format_eval \
--results_path ${RGQE_ALL_CC_SCORED_PATH} \
--output_path ${RUN_PATH} \
; \
python rerank/epic_eval.py \
${LABEL_PATH} \
${RUN_PATH} \
rerank/.${DATASET}_ideal_ranking_scores.tsv \
--task ${DATASET} \
| tail -n 3 \
| awk \
'{ for (i=1; i<=NF; i++) RtoC[i]= (RtoC[i]? RtoC[i] FS $i: $i) }
END{ for (i in RtoC) print RtoC[i] }' \
| tail -n 2 > ${EVAL_PATH} \
; \
cat ${EVAL_PATH}
|
#!/bin/sh
#
# basic map-reduce test
#
RACE=
# uncomment this to run the tests with the Go race detector.
#RACE=-race
# run the test in a fresh sub-directory.
rm -rf mr-tmp
mkdir mr-tmp || exit 1
cd mr-tmp || exit 1
rm -f mr-*
# make sure software is freshly built.
(cd ../../mrapps && go build $RACE -buildmode=plugin wc.go) || exit 1
(cd ../../mrapps && go build $RACE -buildmode=plugin indexer.go) || exit 1
(cd ../../mrapps && go build $RACE -buildmode=plugin mtiming.go) || exit 1
(cd ../../mrapps && go build $RACE -buildmode=plugin rtiming.go) || exit 1
(cd ../../mrapps && go build $RACE -buildmode=plugin crash.go) || exit 1
(cd ../../mrapps && go build $RACE -buildmode=plugin nocrash.go) || exit 1
(cd .. && go build $RACE mrmaster.go) || exit 1
(cd .. && go build $RACE mrworker.go) || exit 1
(cd .. && go build $RACE mrsequential.go) || exit 1
failed_any=0
# first word-count
# generate the correct output
../mrsequential ../../mrapps/wc.so ../pg*txt || exit 1
sort mr-out-0 > mr-correct-wc.txt
rm -f mr-out*
echo '***' Starting wc test.
timeout -k 2s 180s ../mrmaster ../pg*txt &
# give the master time to create the sockets.
sleep 1
# start multiple workers.
timeout -k 2s 180s ../mrworker ../../mrapps/wc.so &
timeout -k 2s 180s ../mrworker ../../mrapps/wc.so &
timeout -k 2s 180s ../mrworker ../../mrapps/wc.so &
# wait for one of the processes to exit.
# under bash, this waits for all processes,
# including the master.
wait
# the master or a worker has exited. since workers are required
# to exit when a job is completely finished, and not before,
# that means the job has finished.
sort mr-out* | grep . > mr-wc-all
if cmp mr-wc-all mr-correct-wc.txt
then
echo '---' wc test: PASS
else
echo '---' wc output is not the same as mr-correct-wc.txt
echo '---' wc test: FAIL
failed_any=1
fi
# wait for remaining workers and master to exit.
wait ; wait ; wait
# now indexer
rm -f mr-*
# generate the correct output
../mrsequential ../../mrapps/indexer.so ../pg*txt || exit 1
sort mr-out-0 > mr-correct-indexer.txt
rm -f mr-out*
echo '***' Starting indexer test.
timeout -k 2s 180s ../mrmaster ../pg*txt &
sleep 1
# start multiple workers
timeout -k 2s 180s ../mrworker ../../mrapps/indexer.so &
timeout -k 2s 180s ../mrworker ../../mrapps/indexer.so
sort mr-out* | grep . > mr-indexer-all
if cmp mr-indexer-all mr-correct-indexer.txt
then
echo '---' indexer test: PASS
else
echo '---' indexer output is not the same as mr-correct-indexer.txt
echo '---' indexer test: FAIL
failed_any=1
fi
wait ; wait
echo '***' Starting map parallelism test.
rm -f mr-out* mr-worker*
timeout -k 2s 180s ../mrmaster ../pg*txt &
sleep 1
timeout -k 2s 180s ../mrworker ../../mrapps/mtiming.so &
timeout -k 2s 180s ../mrworker ../../mrapps/mtiming.so
NT=`cat mr-out* | grep '^times-' | wc -l | sed 's/ //g'`
if [ "$NT" != "2" ]
then
echo '---' saw "$NT" workers rather than 2
echo '---' map parallelism test: FAIL
failed_any=1
fi
if cat mr-out* | grep '^parallel.* 2' > /dev/null
then
echo '---' map parallelism test: PASS
else
echo '---' map workers did not run in parallel
echo '---' map parallelism test: FAIL
failed_any=1
fi
wait ; wait
echo '***' Starting reduce parallelism test.
rm -f mr-out* mr-worker*
timeout -k 2s 180s ../mrmaster ../pg*txt &
sleep 1
timeout -k 2s 180s ../mrworker ../../mrapps/rtiming.so &
timeout -k 2s 180s ../mrworker ../../mrapps/rtiming.so
NT=`cat mr-out* | grep '^[a-z] 2' | wc -l | sed 's/ //g'`
if [ "$NT" -lt "2" ]
then
echo '---' too few parallel reduces.
echo '---' reduce parallelism test: FAIL
failed_any=1
else
echo '---' reduce parallelism test: PASS
fi
wait ; wait
# generate the correct output
../mrsequential ../../mrapps/nocrash.so ../pg*txt || exit 1
sort mr-out-0 > mr-correct-crash.txt
rm -f mr-out*
echo '***' Starting crash test.
rm -f mr-done
(timeout -k 2s 180s ../mrmaster ../pg*txt ; touch mr-done ) &
sleep 1
# start multiple workers
timeout -k 2s 180s ../mrworker ../../mrapps/crash.so &
# mimic rpc.go's masterSock()
SOCKNAME=/var/tmp/824-mr-`id -u`
( while [ -e $SOCKNAME -a ! -f mr-done ]
do
timeout -k 2s 180s ../mrworker ../../mrapps/crash.so
sleep 1
done ) &
( while [ -e $SOCKNAME -a ! -f mr-done ]
do
timeout -k 2s 180s ../mrworker ../../mrapps/crash.so
sleep 1
done ) &
while [ -e $SOCKNAME -a ! -f mr-done ]
do
timeout -k 2s 180s ../mrworker ../../mrapps/crash.so
sleep 1
done
wait
wait
wait
rm $SOCKNAME
sort mr-out* | grep . > mr-crash-all
if cmp mr-crash-all mr-correct-crash.txt
then
echo '---' crash test: PASS
else
echo '---' crash output is not the same as mr-correct-crash.txt
echo '---' crash test: FAIL
failed_any=1
fi
if [ $failed_any -eq 0 ]; then
echo '***' PASSED ALL TESTS
else
echo '***' FAILED SOME TESTS
exit 1
fi
|
#!/bin/bash
#SBATCH -J Act_lrelu030_1
#SBATCH --mail-user=eger@ukp.informatik.tu-darmstadt.de
#SBATCH --mail-type=FAIL
#SBATCH -e /work/scratch/se55gyhe/log/output.err.%j
#SBATCH -o /work/scratch/se55gyhe/log/output.out.%j
#SBATCH -n 1 # Number of cores
#SBATCH --mem-per-cpu=6000
#SBATCH -t 23:59:00 # Hours, minutes and seconds, or '#SBATCH -t 10' -only mins
#module load intel python/3.5
python3 /home/se55gyhe/Act_func/sequence_tagging/arg_min/PE-my.py lrelu030 286 Adamax 1 0.1559366888955135 0.0023651389575647533 id 0.3
|
<filename>src/utils/index.ts
import env from "./env";
import store from "./store";
export { env, store };
|
<gh_stars>0
'use strict';
/**
* A search field with clear and submit buttons
*
* @memberof HashBrown.Entity.View.Widget
*/
class Search extends HashBrown.Entity.View.Widget.WidgetBase {
/**
* Constructor
*/
constructor(params) {
super(params);
this.template = require('template/widget/search');
}
/**
* Event: Click search
*/
onClickSearch() {
if(typeof this.model.onsearch === 'function') {
this.model.onsearch(this.namedElements.input.value);
}
}
/**
* Event: Click clear
*/
onClickClear() {
this.namedElements.input.value = '';
if(typeof this.model.onclear === 'function') {
this.model.onclear();
}
}
}
module.exports = Search;
|
import { createContext, useState, createElement, useContext } from 'react';
const defContext = {
langs: ['en', 'es'],
setLang: () => {},
mainLang: 'en',
langCode: 'en',
strings: {}
};
const portrayContext = createContext(defContext);
function withPortray(Component, strings, settings) {
const PortrayWrappedComponent = props => {
const mainLang = (settings === null || settings === void 0 ? void 0 : settings.mainLang) || 'en';
const [langCode, setLang] = useState(mainLang);
return createElement(portrayContext.Provider, {
value: {
langCode,
setLang,
mainLang,
strings,
langs: (settings === null || settings === void 0 ? void 0 : settings.langs) || ['en', 'es']
}
}, createElement(Component, Object.assign({}, props)));
};
return PortrayWrappedComponent;
}
function getTextFromDict(key, ctx) {
const trimmedKey = key[0].trim();
if (trimmedKey in ctx.strings) {
const text = ctx.strings[trimmedKey][ctx.langCode];
return text === '$' ? trimmedKey : text;
} else {
const tmpStrings = { ...ctx.strings
};
tmpStrings[trimmedKey] = Object.fromEntries(ctx.langs.map(langCode => [langCode, langCode === ctx.mainLang ? '$' : '']));
return trimmedKey;
}
}
function withStrings(Component) {
const WithStringsComponent = props => {
const ctx = useContext(portrayContext);
function $(key) {
return getTextFromDict(key, ctx);
}
return createElement(Component, Object.assign({}, props, ctx, {
"$": $
}));
};
return WithStringsComponent;
}
export { withPortray, withStrings };
//# sourceMappingURL=index.modern.js.map
|
<reponame>AbeeraTariq02787/HU_Carpool
export interface LoginResultModel {
token: string;
error: string;
} |
package com.vaadin.fusion.parser.core;
import java.util.Objects;
import javax.annotation.Nonnull;
import io.github.classgraph.AnnotationInfo;
public final class RelativeAnnotationInfo
extends AbstractRelative<AnnotationInfo, Relative<?>> {
public RelativeAnnotationInfo(@Nonnull AnnotationInfo origin,
@Nonnull Relative<?> parent) {
super(origin, Objects.requireNonNull(parent));
}
}
|
<reponame>sajadweb/msm-cli
const BlogCtrl = new (class BlogController {
async getBlog(req, res) {
try {
} catch (e) {}
}
async showBlog(req, res) {
try {
} catch (e) {}
}
async insertBlog(req, res) {
try {
} catch (e) {}
}
async updateBlog(req, res) {
try {
} catch (e) {}
}
async destroyBlog(req, res) {
try {
} catch (e) {}
}
})();
export default BlogCtrl;
|
#!/usr/bin/env bash
#
# Copyright (c) 2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
export LC_ALL=C.UTF-8
cd "build/vircle-$HOST" || (echo "could not enter distdir build/vircle-$HOST"; exit 1)
if [ "$RUN_UNIT_TESTS" = "true" ]; then
BEGIN_FOLD unit-tests
DOCKER_EXEC LD_LIBRARY_PATH=$TRAVIS_BUILD_DIR/depends/$HOST/lib make $MAKEJOBS check VERBOSE=1
END_FOLD
fi
if [ $((`date +%s`-$START_TIME)) -gt $RUN_TESTS_TIMEOUT ]; then
RUN_FUNCTIONAL_TESTS=false;
fi
echo $((`date +%s`-$START_TIME))
echo $RUN_TESTS_TIMEOUT
echo "$RUN_FUNCTIONAL_TESTS"
if [ "$RUN_FUNCTIONAL_TESTS" = "true" ]; then
BEGIN_FOLD functional-tests
DOCKER_EXEC test/functional/test_runner.py --ci --combinedlogslen=4000 --coverage --quiet --failfast --vircle --insight --bitcoin
END_FOLD
fi
if [ "$RUN_FUZZ_TESTS" = "true" ]; then
BEGIN_FOLD fuzz-tests
DOCKER_EXEC test/fuzz/test_runner.py -l DEBUG ${DIR_FUZZ_IN}
END_FOLD
fi
|
def remove_item(items, target):
return [i for i in items if i != target] |
require 'spec_helper'
describe Rex::RopBuilder do
it 'has a version number' do
expect(Rex::RopBuilder::VERSION).not_to be nil
end
end
|
<gh_stars>1-10
import React, { useRef, useState } from "react"
import { graphql } from "gatsby"
import Layout from "../components/shared/layout"
import CTA from "../components/shared/cta"
import { OutboundLink } from "gatsby-plugin-google-analytics"
import Check from "../../static/images/utility_check.svg"
import Close from "../../static/images/utility_close.svg"
import Question from "../../static/images/utility_question.svg"
import NA from "../../static/images/utility_na.svg"
import Yes from "../../static/images/utility_yes.svg"
import No from "../../static/images/utility_no.svg"
import Fade from "react-reveal/Fade"
import Flip from "react-reveal/Flip"
import style from "../styles/templates/post_template.module.scss"
// Basic post component
export default function Template({
data
}) {
const { directusPrivacyGuide } = data
const { slug, name, created_on, score, top_choice, avoid, type, organization, tags, link_image, link_entity, link_privacy_policy, content_summary, content_risk, content_pros, content_cons, content_share, content_conclusion, breach_breached_in_the_past, breach_date_of_last_breach, breach_description } = directusPrivacyGuide
let { question_collect_biometrics, question_delete_data, question_cookies_telemetry_tracking, question_friendly_privacy_policy, question_parental_controls, question_encryption, question_security_updates, question_strong_password, question_vulnerabilities, question_has_privacy_policy, snoop_camera, snoop_microphone, snoop_location, snoop_email, snoop_phone, snoop_address, payment_credit_card, payment_cryptocurrency, payment_paypal, payment_cash_or_gift_card } = directusPrivacyGuide
const score_class_array = [style.score_class_0, style.score_class_1, style.score_class_2, style.score_class_3, style.score_class_4, style.score_class_5, style.score_class_6, style.score_class_7, style.score_class_8, style.score_class_9, style.score_class_10]
const score_class = score_class_array[score]
let top_choice_or_avoid_wrapper = style.top_choice_or_avoid_wrapper
if (!top_choice && !avoid) {
top_choice_or_avoid_wrapper = style.top_choice_or_avoid_wrapper_hidden
}
let top_choice_or_avoid_text = "FreePN Top Choice "
let top_choice_or_avoid_emoji = "🏆"
let top_choice_or_avoid_class = style.top_choice
if (avoid) {
top_choice_or_avoid_text = "Avoid If Possible "
top_choice_or_avoid_emoji = "☠️"
top_choice_or_avoid_class = style.avoid
}
const [copySuccess, setCopySuccess] = useState("Copy & Share Link 🔗");
const textAreaRef = useRef(null);
function copyToClipboard(e) {
textAreaRef.current.select();
document.execCommand('copy');
e.target.focus(); // prefer to not show the whole text area selected.
setCopySuccess("Link Copied! 🔗");
setTimeout(() => { setCopySuccess("Copy & Share Link 🔗"); }, 2000); // wait 3s then revert to original message
};
const copy_value = `https://www.freepn.org/privacy-guide/${slug}`
if (question_collect_biometrics === "yes") {
question_collect_biometrics = Check
} else if (question_collect_biometrics === "no") {
question_collect_biometrics = Close
} else if (question_collect_biometrics === "unclear") {
question_collect_biometrics = Question
} else {
question_collect_biometrics = NA
}
if (question_delete_data === "yes") {
question_delete_data = Check
} else if (question_delete_data === "no") {
question_delete_data = Close
} else if (question_delete_data === "unclear") {
question_delete_data = Question
} else {
question_delete_data = NA
}
if (question_cookies_telemetry_tracking === "yes") {
question_cookies_telemetry_tracking = Check
} else if (question_cookies_telemetry_tracking === "no") {
question_cookies_telemetry_tracking = Close
} else if (question_cookies_telemetry_tracking === "unclear") {
question_cookies_telemetry_tracking = Question
} else {
question_cookies_telemetry_tracking = NA
}
if (question_friendly_privacy_policy === "yes") {
question_friendly_privacy_policy = Check
} else if (question_friendly_privacy_policy === "no") {
question_friendly_privacy_policy = Close
} else if (question_friendly_privacy_policy === "unclear") {
question_friendly_privacy_policy = Question
} else {
question_friendly_privacy_policy = NA
}
if (question_parental_controls === "yes") {
question_parental_controls = Check
} else if (question_parental_controls === "no") {
question_parental_controls = Close
} else if (question_parental_controls === "unclear") {
question_parental_controls = Question
} else {
question_parental_controls = NA
}
if (question_encryption === "yes") {
question_encryption = Check
} else if (question_encryption === "no") {
question_encryption = Close
} else if (question_encryption === "unclear") {
question_encryption = Question
} else {
question_encryption = NA
}
if (question_security_updates === "yes") {
question_security_updates = Check
} else if (question_security_updates === "no") {
question_security_updates = Close
} else if (question_security_updates === "unclear") {
question_security_updates = Question
} else {
question_security_updates = NA
}
if (question_strong_password === "yes") {
question_strong_password = Check
} else if (question_strong_password === "no") {
question_strong_password = Close
} else if (question_strong_password === "<PASSWORD>") {
question_strong_password = Question
} else {
question_strong_password = NA
}
if (question_vulnerabilities === "yes") {
question_vulnerabilities = Check
} else if (question_vulnerabilities === "no") {
question_vulnerabilities = Close
} else if (question_vulnerabilities === "unclear") {
question_vulnerabilities = Question
} else {
question_vulnerabilities = NA
}
if (question_has_privacy_policy === "yes") {
question_has_privacy_policy = Check
} else if (question_has_privacy_policy === "no") {
question_has_privacy_policy = Close
} else if (question_has_privacy_policy === "unclear") {
question_has_privacy_policy = Question
} else {
question_has_privacy_policy = NA
}
if (snoop_camera === "yes") {
snoop_camera = Check
} else if (snoop_camera === "no") {
snoop_camera = Close
} else if (snoop_camera === "unclear") {
snoop_camera = Question
} else {
snoop_camera = NA
}
if (snoop_microphone === "yes") {
snoop_microphone = Check
} else if (snoop_microphone === "no") {
snoop_microphone = Close
} else if (snoop_microphone === "unclear") {
snoop_microphone = Question
} else {
snoop_microphone = NA
}
if (snoop_location === "yes") {
snoop_location = Check
} else if (snoop_location === "no") {
snoop_location = Close
} else if (snoop_location === "unclear") {
snoop_location = Question
} else {
snoop_location = NA
}
if (snoop_email === "yes") {
snoop_email = Check
} else if (snoop_email === "no") {
snoop_email = Close
} else if (snoop_email === "unclear") {
snoop_email = Question
} else {
snoop_email = NA
}
if (snoop_phone === "yes") {
snoop_phone = Check
} else if (snoop_phone === "no") {
snoop_phone = Close
} else if (snoop_phone === "unclear") {
snoop_phone = Question
} else {
snoop_phone = NA
}
if (snoop_address === "yes") {
snoop_address = Check
} else if (snoop_address === "no") {
snoop_address = Close
} else if (snoop_address === "unclear") {
snoop_address = Question
} else {
snoop_address = NA
}
if (payment_credit_card === "yes") {
payment_credit_card = Check
} else if (payment_credit_card === "no") {
payment_credit_card = Close
} else if (payment_credit_card === "unclear") {
payment_credit_card = Question
} else {
payment_credit_card = NA
}
if (payment_cryptocurrency === "yes") {
payment_cryptocurrency = Check
} else if (payment_cryptocurrency === "no") {
payment_cryptocurrency = Close
} else if (payment_cryptocurrency === "unclear") {
payment_cryptocurrency = Question
} else {
payment_cryptocurrency = NA
}
if (payment_paypal === "yes") {
payment_paypal = Check
} else if (payment_paypal === "no") {
payment_paypal = Close
} else if (payment_paypal === "unclear") {
payment_paypal = Question
} else {
payment_paypal = NA
}
if (payment_cash_or_gift_card === "yes") {
payment_cash_or_gift_card = Check
} else if (payment_cash_or_gift_card === "no") {
payment_cash_or_gift_card = Close
} else if (payment_cash_or_gift_card === "unclear") {
payment_cash_or_gift_card = Question
} else {
payment_cash_or_gift_card = NA
}
return (
<Layout title={`${name} | Privacy Guide`} description={`FreePN privacy review of ${name} (${organization}). Read to see the review, rating, risks, and recommendations.`}>
<div className={style.wrapper}>
<div className={style.wrapper_created_on_and_type}>
<Flip top>
<div className={style.wrapper_created_on}><span className={style.created_on}>{`Reviewed ${created_on}`}</span></div>
</Flip>
<Flip top>
<div className={style.wrapper_type}><span className={style.type}>{type}</span></div>
</Flip>
</div>
<div className={style.wrapper_link_image}>
<Flip top>
<div className={style.wrapper_link_image_inner}>
<img className={style.link_image} src={link_image} alt={"privacy guide"} />
</div>
</Flip>
<Flip top>
<div className={`${top_choice_or_avoid_wrapper} ${top_choice_or_avoid_class}`}>
<div>{top_choice_or_avoid_text}<span className={style.top_choice_or_avoid_emoji}>{top_choice_or_avoid_emoji}</span></div>
</div>
</Flip>
</div>
<Fade bottom>
<div className={style.content}>
<div className={style.content_inner}>
<div className={style.wrapper_share_links}>
<OutboundLink className={style.link_entity} href={link_entity} target={"_blank"}>{`${organization} 🔗`}</OutboundLink>
<div className={style.link_copy_and_share} onClick={copyToClipboard} onKeyDown={copyToClipboard} role={"button"} tabIndex={0}>{copySuccess}</div>
<form className={style.link_copy_and_share_value_hidden}>
<textarea ref={textAreaRef} value={copy_value} />
</form>
</div>
<hr />
<div className={style.wrapper_header_info}>
<div>
<h1 className={style.name}>{name}</h1>
<div><span className={style.organization}>{organization}</span></div>
</div>
<div className={`${style.score} ${score_class}`}>
<span className={style.wrapper_inner_score}><sup>{score}</sup><span className={style.fraction_slash}>{"/"}</span><sub>{"10"}</sub></span>
</div>
</div>
<hr />
<h3>Summary</h3>
<div className={style.section_subtitle}>{""}</div>
<p className={style.content_summary}>{content_summary}</p>
</div>
</div>
</Fade>
<Fade bottom>
<div className={style.content_secondary}>
<div className={style.content_inner}>
<h2 className={style.section_title_no_subtitle}>{"Breach History"}</h2>
{/* <div className={style.section_subtitle}>{"Does it meet our minimum privacy standards?"}</div> */}
<hr />
<div className={style.wrapper_question}>
<p className={style.question}>{"Has there ever been a breach or data leak?"}</p>
<img className={style.answer} src={breach_breached_in_the_past ? Yes : No} alt={"answer"} />
</div>
<hr />
{
breach_breached_in_the_past ?
<div>
<div className={style.wrapper_question}>
<p className={style.question}>{"Date of last breach"}</p>
<p className={style.answer_text}>{breach_date_of_last_breach}</p>
</div>
<hr />
<p className={style.content_summary}>{breach_description}</p>
</div>
:
<div className={style.wrapper_question}>
<p className={style.question}>{"No known hacks or data breaches!"}</p>
<p className={style.answer_text}>{"🎉"}</p>
</div>
}
</div>
</div>
</Fade>
<Fade bottom>
<div className={style.content_secondary}>
<div className={style.content_inner}>
<h2>{"Privacy Checklist"}</h2>
<div className={style.section_subtitle}>{"Does it meet our minimum privacy standards?"}</div>
<hr />
<div className={style.wrapper_question}>
<p className={style.question}>{"Is any kind of biometric data collected (health, heart, voice, etc.)?"}</p>
<img className={style.answer} src={question_collect_biometrics} alt={"answer"} />
</div>
<hr />
<div className={style.wrapper_question}>
<p className={style.question}>{"Are you able to delete your data / the data collected about you?"}</p>
<img className={style.answer} src={question_delete_data} alt={"answer"} />
</div>
<hr />
<div className={style.wrapper_question}>
<p className={style.question}>{"Does it use cookies, telemetry, or third-party tracking scripts?"}</p>
<img className={style.answer} src={question_cookies_telemetry_tracking} alt={"answer"} />
</div>
<hr />
<div className={style.wrapper_question}>
<p className={style.question}>{"Is the privacy policy user-friendly (easy to understand)?"}</p>
<img className={style.answer} src={question_friendly_privacy_policy} alt={"answer"} />
</div>
<hr />
<div className={style.wrapper_question}>
<p className={style.question}>{"Does it have parental controls?"}</p>
<img className={style.answer} src={question_parental_controls} alt={"answer"} />
</div>
<hr />
<div className={style.wrapper_question}>
<p className={style.question}>{"Does it use encryption to protect your data both in-transit and at-rest?"}</p>
<img className={style.answer} src={question_encryption} alt={"answer"} />
</div>
<hr />
<div className={style.wrapper_question}>
<p className={style.question}>{"Does it get regular software and / or firmware updates?"}</p>
<img className={style.answer} src={question_security_updates} alt={"answer"} />
</div>
<hr />
<div className={style.wrapper_question}>
<p className={style.question}>{"Does it require you to create a strong password?"}</p>
<img className={style.answer} src={question_strong_password} alt={"answer"} />
</div>
<hr />
<div className={style.wrapper_question}>
<p className={style.question}>{"Is there a system in place for managing security vulnerabilities?"}</p>
<img className={style.answer} src={question_vulnerabilities} alt={"answer"} />
</div>
<hr />
<div className={style.wrapper_question}>
<p className={style.question}>{"Does it have a privacy policy?"}</p>
<img className={style.answer} src={question_has_privacy_policy} alt={"answer"} />
</div>
</div>
</div>
</Fade>
<Fade bottom>
<div className={style.content_secondary}>
<div className={style.content_inner}>
<h2>{"How Can It Snoop?"}</h2>
<div className={style.section_subtitle}>{"What sensitive hardware access requirements does it have?"}</div>
<hr />
<div className={style.wrapper_question}>
<p className={style.question}>{"Does it have a camera or request / require access to a camera?"}</p>
<img className={style.answer} src={snoop_camera} alt={"answer"} />
</div>
<hr />
<div className={style.wrapper_question}>
<p className={style.question}>{"Does it have a microphone or request / require access to a microphone?"}</p>
<img className={style.answer} src={snoop_microphone} alt={"answer"} />
</div>
<hr />
<div className={style.wrapper_question}>
<p className={style.question}>{"Does it track your location or request / require access to location data?"}</p>
<img className={style.answer} src={snoop_location} alt={"answer"} />
</div>
</div>
</div>
</Fade>
<Fade bottom>
<div className={style.content_secondary}>
<div className={style.content_inner}>
<h2>{"Account Requirements"}</h2>
<div className={style.section_subtitle}>{"What information is required to create an account or use it?"}</div>
<hr />
<div className={style.wrapper_question}>
<p className={style.question}>{"Does it require an email address to use?"}</p>
<img className={style.answer} src={snoop_email} alt={"answer"} />
</div>
<hr />
<div className={style.wrapper_question}>
<p className={style.question}>{"Does it require a phone number to use?"}</p>
<img className={style.answer} src={snoop_phone} alt={"answer"} />
</div>
<hr />
<div className={style.wrapper_question}>
<p className={style.question}>{"Does it require an address (home or office) to use?"}</p>
<img className={style.answer} src={snoop_address} alt={"answer"} />
</div>
</div>
</div>
</Fade>
<Fade bottom>
<div className={style.content_secondary}>
<div className={style.content_inner}>
<h2>{"Payment Options"}</h2>
<div className={style.section_subtitle}>{"What payment methods are available?"}</div>
<hr />
<div className={style.wrapper_question}>
<p className={style.question}>{"Are you able to use a credit card as a payment option?"}</p>
<img className={style.answer} src={payment_credit_card} alt={"answer"} />
</div>
<hr />
<div className={style.wrapper_question}>
<p className={style.question}>{"Are you able to use a cryptocurrency as a payment option?"}</p>
<img className={style.answer} src={payment_cryptocurrency} alt={"answer"} />
</div>
<hr />
<div className={style.wrapper_question}>
<p className={style.question}>{"Are you able to use PayPal as a payment option?"}</p>
<img className={style.answer} src={payment_paypal} alt={"answer"} />
</div>
<hr />
<div className={style.wrapper_question}>
<p className={style.question}>{"Are you able to use cash or gift cards as a payment option?"}</p>
<img className={style.answer} src={payment_cash_or_gift_card} alt={"answer"} />
</div>
</div>
</div>
</Fade>
<Fade bottom>
<div className={style.content_secondary}>
<div className={style.content_inner}>
<h2>{"What Could Go Wrong?"}</h2>
<div className={style.section_subtitle}>{"What's the worst case scenario for your data?"}</div>
<hr />
<p className={style.question_text_long}>{content_risk}</p>
</div>
</div>
</Fade>
<Fade bottom>
<div className={style.content_secondary}>
<div className={style.content_inner}>
<h2>{"How Does It Share Your Data?"}</h2>
<div className={style.section_subtitle}>{"Where is your data going? Who has access to it?"}</div>
<hr />
<p className={style.question_text_long}>{content_share}</p>
</div>
</div>
</Fade>
<Fade bottom>
<div className={style.content_secondary}>
<div className={style.content_inner}>
<div className={style.wrapper_pro_con}>
<div className={style.wrapper_inner_pro_con}>
<div className={`${style.content_pro_con} ${style.pro}`}>
<h2>{"Good Things"}</h2>
<div className={style.section_subtitle}>{"Why should you use it?"}</div>
<hr />
<div className={style.pro_con} dangerouslySetInnerHTML={{ __html: content_pros }} />
</div>
<hr className={style.mobile_divider_pro_con} />
<div className={`${style.content_pro_con} ${style.con}`}>
<h2>{"Bad Things"}</h2>
<div className={style.section_subtitle}>{"What are the downsides?"}</div>
<hr />
<div className={style.pro_con} dangerouslySetInnerHTML={{ __html: content_cons }} />
</div>
</div>
</div>
</div>
</div>
</Fade>
<Fade bottom>
<div className={style.content_secondary}>
<div className={style.content_inner}>
<h2 className={style.section_title_no_subtitle}>{"Final Thoughts"}</h2>
<hr />
<p className={style.question_text_long}>{content_conclusion}</p>
</div>
</div>
</Fade>
<Fade bottom>
<div className={style.content_bottom}>
<div className={style.content_inner}>
<div className={style.wrapper_policy_and_tags}>
<OutboundLink className={style.link_privacy_policy} href={link_privacy_policy} target={"_blank"}>{"Privacy Policy 🔗"}</OutboundLink>
<div className={style.wrapper_tags}>
{tags.map((tag, index) => {
return (
<div className={style.tag} key={`tag_${index}`}>{`#${tag}`}</div>
)
})}
</div>
</div>
</div>
</div>
</Fade>
<Fade bottom>
<div className={style.wrapper_issue_contact}>
<a className={style.issue_contact} href={`mailto:<EMAIL>?subject=Issue with ${name} Privacy Guide Post`}>{"See something incorrect? Let us know."}</a>
</div>
</Fade>
</div>
<CTA newsletterOnly backgroundColor={"#FB8C00"} source={`CTA_Privacy_Guide_Privacy_Guide_Item_Page_${slug}`} />
</Layout>
);
};
// Query to be ran on build, passes resulting JSON as 'data' prop
export const query = graphql`
query($slug: String!) {
directusPrivacyGuide(slug: { eq: $slug }) {
slug
name
created_on(formatString: "MMMM Do, YYYY")
score
top_choice
avoid
type
organization
tags
link_image
link_entity
link_privacy_policy
content_summary
content_risk
content_pros
content_cons
content_share
content_conclusion
question_collect_biometrics
question_delete_data
question_cookies_telemetry_tracking
question_friendly_privacy_policy
question_parental_controls
question_encryption
question_security_updates
question_strong_password
question_vulnerabilities
question_has_privacy_policy
snoop_camera
snoop_microphone
snoop_location
snoop_email
snoop_phone
snoop_address
payment_credit_card
payment_cryptocurrency
payment_paypal
payment_cash_or_gift_card
breach_breached_in_the_past
breach_date_of_last_breach(formatString: "MMMM YYYY")
breach_description
}
}
`
|
#!/bin/bash
nom=$1
[ ${#TRUST_ROOT} = 0 ] && echo "Initialisez l'environnement TRUST." && exit
# Cas des fichiers dans $TRUST_ROOT
# qui ont File pour reperer leur nom en-tete
# et Directory pour reperer le repertoire ou ils sont places
# Bien afficher le File ou le Directory a la fin car probleme
# si dans le fichier s.fichier evolution de l'en tete !!!
if [ 1 -eq 2 ]
then
if [ -f $nom ]
then
nom_fic=`$TRUST_Awk '/File/ {File=$NF} END {print File}' $nom`
dir_fic=`$TRUST_Awk '/Directory/ {Directory=$NF} END {print Directory}' $nom`
dir_fic=$TRUST_ROOT/${dir_fic#'$TRUST_ROOT/'}
else
nom_fic=$nom
dir_fic="."
fi
fi
nomfichier=nontrouve
#nomfichier=$dir_fic/$nom_fic
if [ ! -f $nomfichier ]
then
#nomfichier=`find $TRUST_ROOT -name $nom -print`
for dir in `cat $TRUST_ENV/rep.TRUST`
do
rep=$TRUST_ROOT/$dir
if [ -f $rep/$nom ] && nomfichier=$rep/$nom
then
echo $rep/$nom
[ "$2" = -copy ] && cp -f $rep/$nom .
fi
done
else
echo $nomfichier
[ "$2" = -copy ] && cp -f $nomfichier .
fi
|
def knapsack(weights, values, capacity):
# Create a matrix to store the maximum values at each nth item
matrix = [[0 for x in range(capacity+1)] for x in range(len(weights)+1)]
# Fill in the matrix
for i in range(len(weights)+1):
for j in range(capacity+1):
# If the capacity is 0 or there are no items to choose, the value is 0
if i == 0 or j == 0:
matrix[i][j] = 0
# If the current item is less than or equal to total capacity
elif weights[i-1] <= j:
# Choose between the maximum of not including the item versus including it
matrix[i][j] = max(values[i-1] + matrix[i-1][j-weights[i-1]], \
matrix[i-1][j])
# If it is larger, the maximal value is without the item
else:
matrix[i][j] = matrix[i-1][j]
# Return the top right corner of the matrix
return matrix[len(weights)][capacity]
weights = [1,2,2,3,4]
values = [5,6,8,10,11]
capacity = 5
print(knapsack(weights, values, capacity))
# Expected output: 18 |
<gh_stars>0
export const defaultHeaders = { 'Content-Type': 'application/json' };
|
<gh_stars>0
package com.atjl.office.util;
import com.atjl.util.file.FileUtil;
import org.junit.*;
import org.junit.rules.ExpectedException;
public class Html2WordUtilTest {
@Test
public void testHtmlToWord() throws Exception {
String content = FileUtil.cat("E:\\test.html");
Html2WordUtil.htmlToWord(content, "e:\\1.doc");
}
@Test
public void testInputStreamToWord() throws Exception {
/*
try {
Method method = Html2WordUtil.getClass().getMethod("inputStreamToWord", InputStream.class, OutputStream.class);
method.setAccessible(true);
method.invoke(<Object>, <Parameters>);
} catch(NoSuchMethodException e) {
} catch(IllegalAccessException e) {
} catch(InvocationTargetException e) {
}
*/
}
@Test
public void testGetContent() throws Exception {
/*
try {
Method method = Html2WordUtil.getClass().getMethod("getContent", InputStream....class);
method.setAccessible(true);
method.invoke(<Object>, <Parameters>);
} catch(NoSuchMethodException e) {
} catch(IllegalAccessException e) {
} catch(InvocationTargetException e) {
}
*/
}
@Before
public void before() throws Exception {
}
@After
public void after() throws Exception {
}
@BeforeClass
public static void beforeClass() throws Exception {
}
@Rule
public final ExpectedException expectedException = ExpectedException.none();
}
|
<gh_stars>0
/*************************************************************************
***** *****
***** 使用教程/readme : *****
***** https://cloud.tencent.com/document/product/583/32996 *****
***** *****
**************************************************************************/
const fs = require('fs');
const path = require('path');
exports.main_handler = async (event, context, callback) => {
let html = fs.readFileSync(path.resolve(__dirname, './index.html'), {
encoding: 'utf-8',
});
return {
isBase64Encoded: false,
statusCode: 200,
headers: { 'Content-Type': 'text/html' },
body: html,
};
};
|
<filename>verification/verify.py
# -*- coding: utf-8 -*-
from concurrent.futures import ProcessPoolExecutor, ThreadPoolExecutor
import itertools
from multiprocessing import cpu_count
import os
import shutil
from subprocess import Popen, PIPE
from typing import Union
from urllib.request import urlopen, Request
import numpy as np
from pexpect import spawn
from tqdm import tqdm
from typeguard import typechecked
import zarr
from pyCRGI.pure import (
get_syn as pure_get_syn,
get_value as pure_get_value,
)
from pyCRGI.jited import (
get_syn as jited_get_syn,
get_value as jited_get_value,
)
CMD = "igrf13"
DATA = "data.zarr"
DTYPE = 'f4'
FLD = os.path.dirname(__file__)
URL = "https://www.ngdc.noaa.gov/IAGA/vmod/igrf13.f"
@typechecked
def _compute(
year: float,
alt: float,
lat: float,
lon: float,
itype: int = 1,
) -> dict[str, float]:
assert itype in (1, 2)
assert 1900.0 <= year <= 2030.0
assert -90.0 <= lat <= 90.0
assert 0.0 <= lon < 360.0
cmd_fn = os.path.join(FLD, CMD)
proc = spawn(cmd_fn)
proc.expect(' or press "Return" for output to screen')
proc.sendline('') # file or stdout -> stdout
proc.expect(r' 2 - geocentric \(shape of Earth is approximated by a sphere\)')
proc.sendline(f'{itype:d}') # coordinate system
proc.expect(r' 3 - values on a latitude\/longitude grid at one date')
proc.sendline('1') # values at one or more locations & dates
proc.expect(' 2 - in decimal degrees')
proc.sendline('2') # decimal degrees
proc.expect(' Enter date in years A.D.')
proc.sendline(f'{year:0.03f}')
if itype == 1:
proc.expect(' Enter altitude in km')
else:
proc.expect(r' Enter radial distance in km \(>3485 km\)')
proc.sendline(f'{alt:0.03f}')
proc.expect(' Enter latitude & longitude in decimal degrees')
proc.sendline(f'{lat:0.03f}')
proc.sendline(f'{lon:0.03f}')
proc.expect(r' Enter place name \(20 characters maximum\)')
proc.sendline('')
try:
proc.expect(r' Do you want values for another date \& position\? \(y/n\)')
reply = _parse_reply(proc.before.decode('utf-8'))
proc.sendline('n')
except Exception as e:
print(proc.before.decode('utf-8'))
raise ValueError(year, alt, lat, lon, itype) from e
proc.wait()
return reply
@typechecked
def _compute_arrays(
data_fn: str,
year_step: float = 0.5, # 0.5 ... 2.0
lat_step: float = 7.5, # 7.5 ... 90.0
lon_step: float = 7.5, # 7.5 ... 90.0
alt_step: float = 49.5, # 49.5 ... 100.0
parallel: bool = True,
):
years = np.arange(1900.0, 2030.0 + year_step, year_step, dtype = DTYPE)
lats = np.arange(-90.0, 90.0 + lat_step, lat_step, dtype = DTYPE)
lons = np.arange(0.0, 360.0, lon_step, dtype = DTYPE)
alts = np.arange(-100.0, 400.0 + alt_step, alt_step, dtype = DTYPE)
itypes = (1, 2) # above sea level, from centre of Earth
radius = 6371.2 # km
columns = ('D', 'I', 'H', 'X', 'Y', 'Z', 'F')
columns = columns + tuple(f'{column}_SV' for column in columns)
data = zarr.open(data_fn, mode = 'w')
field = data.create_dataset(
name = 'field',
shape = (years.shape[0], lats.shape[0], lons.shape[0], alts.shape[0], len(itypes), len(columns)),
chunks = (1, lats.shape[0], lons.shape[0], alts.shape[0], len(itypes), len(columns)),
dtype = DTYPE,
)
field.attrs['dims'] = ['years', 'lats', 'lons', 'alts', 'itypes', 'columns']
field.attrs['columns'] = list(columns)
field.attrs['radius'] = radius
data.create_dataset('years', data = years)
data.create_dataset('lats', data = lats)
data.create_dataset('lons', data = lons)
data.create_dataset('alts', data = alts)
data.create_dataset('itypes', data = np.array(itypes, dtype = 'u4'))
if parallel:
with ProcessPoolExecutor(max_workers = cpu_count()) as p:
tasks = [
p.submit(
_compute_year_array,
data_fn = data_fn,
year_idx = year_idx,
year = float(year),
lats = lats,
lons = lons,
alts = alts,
itypes = itypes,
columns = columns,
radius = radius,
)
for year_idx, year in enumerate(years)
]
for task in tqdm(tasks):
_ = task.result()
else:
for year_idx, year in enumerate(tqdm(years)):
_ = _compute_year_array(
data_fn = data_fn,
year_idx = year_idx,
year = float(year),
lats = lats,
lons = lons,
alts = alts,
itypes = itypes,
columns = columns,
radius = radius,
)
@typechecked
def _compute_year_array(
data_fn: str,
year_idx: int,
year: float,
lats: np.array,
lons: np.array,
alts: np.array,
itypes: tuple[int, int],
columns: tuple[str, ...],
radius: float,
) -> bool:
chunk = np.zeros(
(lats.shape[0], lons.shape[0], alts.shape[0], len(itypes), len(columns)),
dtype = DTYPE,
)
with ThreadPoolExecutor(max_workers = 30) as p:
tasks = [
p.submit(
_compute_llai_value,
lat_idx,
lat,
lon_idx,
lon,
alt_idx,
alt,
itype_idx,
itype,
year,
radius,
columns,
chunk,
)
for (lat_idx, lat), (lon_idx, lon), (alt_idx, alt), (itype_idx, itype) in itertools.product(
enumerate(lats), enumerate(lons), enumerate(alts), enumerate(itypes),
)
]
for task in tasks:
_ = task.result()
data = zarr.open(data_fn, mode = 'a')
data['field'][year_idx, ...] = chunk
return True
def _compute_llai_value(
lat_idx,
lat,
lon_idx,
lon,
alt_idx,
alt,
itype_idx,
itype,
year,
radius,
columns,
chunk,
) -> bool:
elevation = 0.0 if itype == 1 else radius
field = _compute(
year = year,
lat = float(lat),
lon = float(lon),
alt = float(alt) + elevation,
itype = itype,
)
for column_idx, column in enumerate(columns):
chunk[
lat_idx,
lon_idx,
alt_idx,
itype_idx,
column_idx,
] = field[column]
return True
@typechecked
def _verify_arrays(
data_fn: str,
parallel: bool = True,
):
data = zarr.open(data_fn, mode = 'r')
years = data['years'][...]
lats = data['lats'][...]
lons = data['lons'][...]
alts = data['alts'][...]
itypes = tuple(int(number) for number in data['itypes'][...]) # above sea level, from centre of Earth
radius = data['field'].attrs['radius'] # km
columns = tuple(data['field'].attrs['columns'])
if parallel:
with ProcessPoolExecutor(max_workers = cpu_count()) as p:
tasks = [
p.submit(
_verify_year_array,
data_fn = data_fn,
year_idx = year_idx,
year = float(year),
lats = lats,
lons = lons,
alts = alts,
itypes = itypes,
columns = columns,
radius = radius,
)
for year_idx, year in enumerate(years)
]
for task in tqdm(tasks):
_ = task.result()
else:
for year_idx, year in enumerate(tqdm(years)):
_ = _verify_year_array(
data_fn = data_fn,
year_idx = year_idx,
year = float(year),
lats = lats,
lons = lons,
alts = alts,
itypes = itypes,
columns = columns,
radius = radius,
)
@typechecked
def _verify_year_array(
data_fn: str,
year_idx: int,
year: float,
lats: np.array,
lons: np.array,
alts: np.array,
itypes: tuple[int, int],
columns: tuple[str, ...],
radius: float,
atol: float = 0.7, # 0.7 ... 2.0 # nT
) -> bool:
data = zarr.open(data_fn, mode = 'r')
chunk = data['field'][year_idx, ...]
d_idx = columns.index('D')
i_idx = columns.index('I')
h_idx = columns.index('H')
x_idx = columns.index('X')
y_idx = columns.index('Y')
z_idx = columns.index('Z')
f_idx = columns.index('F')
# dsv_idx = columns.index('D_SV')
# isv_idx = columns.index('I_SV')
# hsv_idx = columns.index('H_SV')
# xsv_idx = columns.index('X_SV')
# ysv_idx = columns.index('Y_SV')
# zsv_idx = columns.index('Z_SV')
# fsv_idx = columns.index('F_SV')
for (lat_idx, lat), (lon_idx, lon), (alt_idx, alt), (itype_idx, itype) in itertools.product(
enumerate(lats), enumerate(lons), enumerate(alts), enumerate(itypes),
):
offset = 0.0 if itype == 1 else radius
expected = chunk[lat_idx, lon_idx, alt_idx, itype_idx, [x_idx, y_idx, z_idx, f_idx]]
computed = np.array(pure_get_syn(
year = year,
lat = float(lat),
elong = float(lon),
alt = float(alt) + offset,
itype = itype,
), dtype = chunk.dtype)
if not np.allclose(expected, computed, atol = atol):
raise ValueError((
f"SYN year={year:.02f} lat={lat:.02f} lon={lon:.02f} alt={alt:.02f} itype={itype:d} atol={atol:.02f}\n"
f" {_columns_to_str(['X', 'Y', 'Z', 'F']):s}\n"
f" fortran = {_array_to_str(expected):s}\n"
f" pure = {_array_to_str(computed):s}\n"
f" diff = {_array_to_str(np.abs(computed-expected)):s}"
))
computed = jited_get_syn(
year = year,
lat = float(lat),
elong = float(lon),
alt = float(alt) + offset,
itype = itype,
)
if not np.allclose(expected, computed, atol = atol):
raise ValueError((
f"SYN year={year:.02f} lat={lat:.02f} lon={lon:.02f} alt={alt:.02f} itype={itype:d} atol={atol:.02f}\n"
f" {_columns_to_str(['X', 'Y', 'Z', 'F']):s}\n"
f" fortran = {_array_to_str(expected):s}\n"
f" jited = {_array_to_str(computed):s}\n"
f" diff = {_array_to_str(np.abs(computed-expected)):s}"
))
if itype != 1:
continue
expected = chunk[lat_idx, lon_idx, alt_idx, itype_idx, [d_idx, i_idx, h_idx, x_idx, y_idx, z_idx, f_idx]]
computed = np.array(pure_get_value(
lat = float(lat),
lon = float(lon),
alt = float(alt),
year = year,
), dtype = chunk.dtype)
if not np.allclose(expected, computed, atol = atol):
raise ValueError((
f"VALUE year={year:.02f} lat={lat:.02f} lon={lon:.02f} alt={alt:.02f} itype={itype:d} atol={atol:.02f}\n"
f" {_columns_to_str(['D', 'I', 'H', 'X', 'Y', 'Z', 'F']):s}\n"
f" fortran = {_array_to_str(expected):s}\n"
f" pure = {_array_to_str(computed):s}\n"
f" diff = {_array_to_str(np.abs(computed-expected)):s}"
))
computed = jited_get_value(
lat = float(lat),
lon = float(lon),
alt = float(alt),
year = year,
)
if not np.allclose(expected, computed, atol = atol):
raise ValueError((
f"VALUE year={year:.02f} lat={lat:.02f} lon={lon:.02f} alt={alt:.02f} itype={itype:d} atol={atol:.02f}\n"
f" {_columns_to_str(['D', 'I', 'H', 'X', 'Y', 'Z', 'F']):s}\n"
f" fortran = {_array_to_str(expected):s}\n"
f" jited = {_array_to_str(computed):s}\n"
f" diff = {_array_to_str(np.abs(computed-expected)):s}"
))
# TODO variation implementation differs between pyIGRF and Fortran (`t` and `tc` variables)
# if year < 1901.0 or year > 2029.0:
# continue
#
# dd, di, dh, dx, dy, dz, df = get_variation(
# lat = float(lat),
# lon = float(lon),
# alt = float(alt),
# year = year,
# )
# expected = chunk[lat_idx, lon_idx, alt_idx, itype_idx, [dsv_idx, isv_idx, hsv_idx, xsv_idx, ysv_idx, zsv_idx, fsv_idx]]
# computed = np.array((dd, di, dh, dx, dy, dz, df), dtype = chunk.dtype)
# if not np.allclose(expected, computed, atol = atol):
# raise ValueError((
# f"VARIATION year={year:.02f} lat={lat:.02f} lon={lon:.02f} alt={alt:.02f} itype={itype:d} atol={atol:.02f}\n"
# f" {_columns_to_str(['D', 'I', 'H', 'X', 'Y', 'Z', 'F']):s}\n"
# f" fortran = {_array_to_str(expected):s}\n"
# f" python = {_array_to_str(computed):s}\n"
# f" diff = {_array_to_str(np.abs(computed-expected)):s}"
# ))
return True
@typechecked
def _array_to_str(data: np.ndarray) -> str:
return '[' + ' '.join([f'{number:10.03f}' for number in data]) + ']'
@typechecked
def _columns_to_str(columns: list[str]) -> str:
return '[' + ' '.join([' '*(10-len(column))+column for column in columns]) + ']'
@typechecked
def _parse_reply(reply: str) -> dict[str, float]:
lines = [
line.strip()
for line in reply.split('\n')
if len(line.strip()) > 0
]
lines = [
line
for line in lines
if not line.startswith('This version') and not line.startswith('values for') and not line[0].isnumeric()
]
reply = {}
for line in lines:
name, fragment = line.split('=', 1)
name = name.strip()
fragment = fragment.strip()
value, svvalue = fragment.split('SV')
value = value.strip()
if ' ' in value:
value, _ = value.split(' ')
value = float(value)
svvalue = float(svvalue.split()[1])
if name in ('D', 'I'):
svvalue = svvalue / 60 # minutes to degree
reply[name] = value
reply[f'{name:s}_SV'] = svvalue
return reply
@typechecked
def _build(in_fn: str, out_fn: str):
proc = Popen(['gfortran', in_fn, '-o', out_fn])
proc.wait()
assert proc.returncode == 0
@typechecked
def _patch(src_fn: str):
with open(f'{src_fn:s}.patch', mode = 'rb') as f:
patch = f.read()
proc = Popen(['patch', src_fn], stdin = PIPE)
proc.communicate(input = patch)
assert proc.returncode == 0
@typechecked
def _download(down_url: str, mode: str = "binary") -> Union[str, bytes]:
assert mode in ("text", "binary")
assert isinstance(down_url, str)
httprequest = Request(down_url)
with urlopen(httprequest) as response:
assert response.status == 200
data = response.read()
if mode == 'text':
return data.decode('utf-8')
return data # mode == 'binary'
@typechecked
def main(clean: bool = False, parallel: bool = True):
src_fn = os.path.join(FLD, f'{CMD:s}.f')
if clean and os.path.exists(src_fn):
os.unlink(src_fn)
if not os.path.exists(src_fn):
raw = _download(URL)
with open(src_fn, mode = 'wb') as f:
f.write(raw)
_patch(src_fn)
cmd_fn = os.path.join(FLD, CMD)
if clean and os.path.exists(cmd_fn):
os.unlink(cmd_fn)
if not os.path.exists(cmd_fn):
_build(src_fn, cmd_fn)
data_fn = os.path.join(FLD, DATA)
if clean and os.path.exists(data_fn):
shutil.rmtree(data_fn)
if not os.path.exists(data_fn):
_compute_arrays(data_fn, parallel = parallel)
_verify_arrays(data_fn, parallel = parallel)
if __name__ == '__main__':
main()
|
def to_binary(n)
return n.to_s(2)
end
puts "Enter a number:"
number = gets.to_i
puts "Equivalent binary number: #{to_binary(number)}" |
class BackgroundTaskManager:
def __init__(self):
self.__task_queue__ = []
def add_task(self, task):
# Add the task to the task queue
self.__task_queue__.append(task)
def get_next_task(self):
# Retrieve and remove the next task from the task queue
if self.__task_queue__:
return self.__task_queue__.pop(0)
else:
return None
@property
def delay(self):
# Access the delay time for the next task in the queue
if self.__task_queue__:
return self.__task_queue__[0]['delay']
else:
return None |
package bd.edu.daffodilvarsity.classmanager.notification;
import android.content.DialogInterface;
import android.os.Bundle;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.LinearLayout;
import android.widget.TextView;
import android.widget.Toast;
import androidx.annotation.Nullable;
import androidx.appcompat.app.AlertDialog;
import androidx.fragment.app.Fragment;
import androidx.lifecycle.Observer;
import androidx.lifecycle.ViewModelProviders;
import androidx.recyclerview.widget.LinearLayoutManager;
import androidx.recyclerview.widget.RecyclerView;
import com.google.android.material.floatingactionbutton.FloatingActionButton;
import com.google.android.material.switchmaterial.SwitchMaterial;
import java.util.List;
import bd.edu.daffodilvarsity.classmanager.R;
import bd.edu.daffodilvarsity.classmanager.otherclasses.SharedPreferencesHelper;
/**
* A simple {@link Fragment} subclass.
*/
public class NotificationStudent extends Fragment implements View.OnClickListener {
private LinearLayout toggleNotification;
private NotificationStudentViewModel mViewModel;
private SwitchMaterial notificationSwitch;
private RecyclerView mRecyclerView;
private NotificatinStudentRecyclerViewAdapter mAdapter;
private FloatingActionButton fab;
private TextView noNotification;
public NotificationStudent() {
// Required empty public constructor
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
// Inflate the layout for this fragment
View view = inflater.inflate(R.layout.fragment_notification_student, container, false);
initializeVariables(view);
setCurrentNotificationStatus();
initializeRecyclerView();
return view;
}
private void initializeVariables(View view) {
fab = view.findViewById(R.id.clear_notifications);
fab.setOnClickListener(this);
toggleNotification = view.findViewById(R.id.toggle_notification);
toggleNotification.setOnClickListener(this);
notificationSwitch = view.findViewById(R.id.notification_switch);
notificationSwitch.setClickable(false);
mRecyclerView = view.findViewById(R.id.notification_recycler_view);
mAdapter = new NotificatinStudentRecyclerViewAdapter();
noNotification = view.findViewById(R.id.no_notification);
}
private void initializeRecyclerView() {
mRecyclerView.setLayoutManager(new LinearLayoutManager(getContext()));
mRecyclerView.setAdapter(mAdapter);
}
@Override
public void onActivityCreated(@Nullable Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
mViewModel = ViewModelProviders.of(this).get(NotificationStudentViewModel.class);
mViewModel.loadNotifications();
mViewModel.getNotifications().observe(getViewLifecycleOwner(), new Observer<List<NotificationObjStudent>>() {
@Override
public void onChanged(List<NotificationObjStudent> notificationObjStudents) {
if (mAdapter != null) {
mAdapter.updateRecyclerView(notificationObjStudents);
if (mAdapter.getItemCount() == 0) {
displayNoNotificationAvailable(true);
} else {
displayNoNotificationAvailable(false);
}
}
}
});
}
private void displayNoNotificationAvailable(boolean visible) {
if (visible) {
noNotification.setVisibility(View.VISIBLE);
} else {
noNotification.setVisibility(View.GONE);
}
}
private void setCurrentNotificationStatus() {
if (SharedPreferencesHelper.getStudentNotificatinStatus(getContext())) {
notificationSwitch.setChecked(true);
} else {
notificationSwitch.setChecked(false);
}
}
@Override
public void onClick(View view) {
if (view.getId() == R.id.toggle_notification) {
toggleNotification();
} else if (view.getId() == R.id.clear_notifications) {
deleteAllNotifications();
}
}
private void deleteAllNotifications() {
AlertDialog dialog = new AlertDialog.Builder(getContext())
.setTitle("Are you sure to clear all notifications?")
.setMessage("Once you clear them,there is no way to get them back.")
.setPositiveButton("proceed", new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialogInterface, int i) {
mViewModel.deleteAllNotifications();
}
})
.setNegativeButton("No",null)
.create();
dialog.show();
}
private void toggleNotification() {
if(notificationSwitch.isChecked()) {
SharedPreferencesHelper.enableStudentNotification(getContext(),false);
notificationSwitch.setChecked(false);
makeToast("Notification disabled");
} else {
SharedPreferencesHelper.enableStudentNotification(getContext(),true);
notificationSwitch.setChecked(true);
makeToast("Notification enabled");
}
}
private void makeToast(String text) {
if (getContext() != null) {
Toast.makeText(getContext(), text, Toast.LENGTH_SHORT).show();
}
}
}
|
composer -v > /dev/null 2>&1
COMPOSER_IS_INSTALLED=$?
if [[ $COMPOSER_IS_INSTALLED -ne 0 ]]; then
curl -sS https://getcomposer.org/installer | php
fi
php composer.phar install
./app/console --help
|
#!/bin/bash
# Sia host bandwidth price pinning script
#
# First: set siac path
# To use: run script with ./set-bandwidth-price.sh PRICE
#
sia_path=""
if [ -z ${sia_path} ]; then
echo "Set your sia path variable, example "/opt/sia""
exit 1
fi
if [ "$#" -ne 1 ]; then
echo "Missing price argument, run with "./set-bandwidth-price.sh PRICE""
exit 1
fi
if ! command -v jq &> /dev/null
then
echo "jq needs to be installed for script to function"
exit 1
fi
input_usd=$1
# Get price from Kraken
current_kraken_price_raw=$(curl -q -s "https://api.kraken.com/0/public/Ticker?pair=SCUSD" | jq '.result.SCUSD.a[0]')
# Remove Quotes
current_kraken_price=$(echo "$current_kraken_price_raw" | tr -d '"')
# USD to SC Conversion
sc_tb=$(echo "$input_usd / $current_kraken_price" | bc)
# Set host configuration
$sia_path/siac host config mindownloadbandwidthprice ${sc_tb}SC
|
import React from 'react';
import { Icon } from 'algae-ui';
export default () => (
<div className="icon-list">
<Icon type="alert" />
<Icon type="github" />
<Icon type="gift" />
<Icon type="apple" rotate={180} />
<Icon type="camera" style={{ fill: '#506dfe' }} />
</div>
);
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.