text stringlengths 1 1.05M |
|---|
<reponame>nycaozhongshe/admin-template<filename>src/api/banner.js
import request from '@/utils/request';
import { baseUrl } from './config';
export function createdBanner({}, params) {
return request.post(baseUrl + '/carouselFigure/addCarouselFigure', params);
}
export function delBanner({}, params) {
return request.post(baseUrl + '/carouselFigure/deletePictureById', params);
}
export function updateBanner({}, params) {
return request.post(baseUrl + '/carouselFigure/updatePictureById', params);
}
export function getBanner({}, params) {
return request.post(baseUrl + '/carouselFigure/getCarouselFigure', params);
}
|
#!/usr/bin/env bash
# Copyright 2018-2022 the Kubeapps contributors.
# SPDX-License-Identifier: Apache-2.0
set -o errexit
set -o nounset
set -o pipefail
# Constants
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." >/dev/null && pwd)"
USE_MULTICLUSTER_OIDC_ENV=${1:-false}
OLM_VERSION=${2:-"v0.18.2"}
DEV_TAG=${3:?missing dev tag}
IMG_MODIFIER=${4:-""}
DEX_IP=${5:-"172.18.0.2"}
ADDITIONAL_CLUSTER_IP=${6:-"172.18.0.3"}
# TODO(andresmgot): While we work with beta releases, the Bitnami pipeline
# removes the pre-release part of the tag
if [[ -n "${TEST_LATEST_RELEASE:-}" ]]; then
DEV_TAG=${DEV_TAG/-beta.*/}
fi
# Load Generic Libraries
# shellcheck disable=SC1090
. "${ROOT_DIR}/script/lib/libtest.sh"
# shellcheck disable=SC1090
. "${ROOT_DIR}/script/lib/liblog.sh"
# shellcheck disable=SC1090
. "${ROOT_DIR}/script/lib/libutil.sh"
# Auxiliar functions
########################
# Test Helm
# Globals:
# HELM_*
# Arguments: None
# Returns: None
#########################
testHelm() {
info "Running Helm tests..."
helm test -n kubeapps kubeapps-ci
}
########################
# Check if the pod that populates de OperatorHub catalog is running
# Globals: None
# Arguments: None
# Returns: None
#########################
isOperatorHubCatalogRunning() {
kubectl get pod -n olm -l olm.catalogSource=operatorhubio-catalog -o jsonpath='{.items[0].status.phase}' | grep Running
# Wait also for the catalog to be populated
kubectl get packagemanifests.packages.operators.coreos.com | grep prometheus
}
########################
# Install OLM
# Globals: None
# Arguments:
# $1: Version of OLM
# Returns: None
#########################
installOLM() {
local release=$1
info "Installing OLM ${release} ..."
url=https://github.com/operator-framework/operator-lifecycle-manager/releases/download/${release}
namespace=olm
kubectl apply -f "${url}/crds.yaml"
kubectl wait --for=condition=Established -f "${url}/crds.yaml"
kubectl apply -f "${url}/olm.yaml"
# wait for deployments to be ready
kubectl rollout status -w deployment/olm-operator --namespace="${namespace}"
kubectl rollout status -w deployment/catalog-operator --namespace="${namespace}"
retries=30
until [[ $retries == 0 ]]; do
new_csv_phase=$(kubectl get csv -n "${namespace}" packageserver -o jsonpath='{.status.phase}' 2>/dev/null || echo "Waiting for CSV to appear")
if [[ $new_csv_phase != "${csv_phase:-}" ]]; then
csv_phase=$new_csv_phase
echo "CSV \"packageserver\" phase: $csv_phase"
fi
if [[ "$new_csv_phase" == "Succeeded" ]]; then
break
fi
sleep 10
retries=$((retries - 1))
done
if [ $retries == 0 ]; then
echo "CSV \"packageserver\" failed to reach phase succeeded"
exit 1
fi
kubectl rollout status -w deployment/packageserver --namespace="${namespace}"
}
########################
# Install chartmuseum
# Globals: None
# Arguments:
# $1: Username
# $2: Password
# Returns: None
#########################
installChartmuseum() {
local user=$1
local password=$2
info "Installing ChartMuseum ..."
helm install chartmuseum --namespace kubeapps https://github.com/chartmuseum/charts/releases/download/chartmuseum-2.14.2/chartmuseum-2.14.2.tgz \
--set env.open.DISABLE_API=false \
--set persistence.enabled=true \
--set secret.AUTH_USER=$user \
--set secret.AUTH_PASS=$password
kubectl rollout status -w deployment/chartmuseum-chartmuseum --namespace=kubeapps
}
########################
# Push a chart to chartmusem
# Globals: None
# Arguments:
# $1: chart
# $2: version
# $3: chartmuseum username
# $4: chartmuseum password
# Returns: None
#########################
pushChart() {
local chart=$1
local version=$2
local user=$3
local password=$4
prefix="kubeapps-"
description="foo ${chart} chart for CI"
info "Adding ${chart}-${version} to ChartMuseum ..."
curl -LO "https://charts.bitnami.com/bitnami/${chart}-${version}.tgz"
# Mutate the chart name and description, then re-package the tarball
# For instance, the apache's Chart.yaml file becomes modified to:
# name: kubeapps-apache
# description: foo apache chart for CI
# consequently, the new packaged chart is "${prefix}${chart}-${version}.tgz"
# This workaround should mitigate https://github.com/kubeapps/kubeapps/issues/3339
mkdir ./${chart}-${version}
tar zxf ${chart}-${version}.tgz -C ./${chart}-${version}
sed -i "s/name: ${chart}/name: ${prefix}${chart}/" ./${chart}-${version}/${chart}/Chart.yaml
sed -i "0,/^\([[:space:]]*description: *\).*/s//\1${description}/" ./${chart}-${version}/${chart}/Chart.yaml
helm package ./${chart}-${version}/${chart} -d .
local POD_NAME=$(kubectl get pods --namespace kubeapps -l "app=chartmuseum" -l "release=chartmuseum" -o jsonpath="{.items[0].metadata.name}")
/bin/sh -c "kubectl port-forward $POD_NAME 8080:8080 --namespace kubeapps &"
sleep 2
curl -u "${user}:${password}" --data-binary "@${prefix}${chart}-${version}.tgz" http://localhost:8080/api/charts
pkill -f "kubectl port-forward $POD_NAME 8080:8080 --namespace kubeapps"
}
########################
# Install Kubeapps or upgrades it if it's already installed
# Arguments:
# $1: chart source
# Returns: None
#########################
installOrUpgradeKubeapps() {
local chartSource=$1
# Install Kubeapps
info "Installing Kubeapps from ${chartSource}..."
kubectl -n kubeapps delete secret localhost-tls || true
# See https://stackoverflow.com/a/36296000 for "${arr[@]+"${arr[@]}"}" notation.
cmd=(helm upgrade --install kubeapps-ci --namespace kubeapps "${chartSource}"
"${img_flags[@]}"
"${@:2}"
"${multiclusterFlags[@]+"${multiclusterFlags[@]}"}"
--set frontend.replicaCount=2
--set kubeops.replicaCount=2
--set dashboard.replicaCount=2
--set kubeappsapis.replicaCount=2
--set kubeops.enabled=true
--set postgresql.replication.enabled=false
--set postgresql.postgresqlPassword=password
--set redis.auth.password=password
--set apprepository.initialRepos[0].name=bitnami
--set apprepository.initialRepos[0].url=http://chartmuseum-chartmuseum.kubeapps:8080
--set apprepository.initialRepos[0].basicAuth.user=admin
--set apprepository.initialRepos[0].basicAuth.password=password
--set globalReposNamespaceSuffix=-repos-global
--wait)
echo "${cmd[@]}"
"${cmd[@]}"
}
info "IMAGE TAG TO BE TESTED: $DEV_TAG"
info "IMAGE_REPO_SUFFIX: $IMG_MODIFIER"
info "Cluster Version: $(kubectl version -o json | jq -r '.serverVersion.gitVersion')"
info "Kubectl Version: $(kubectl version -o json | jq -r '.clientVersion.gitVersion')"
# Use dev images or Bitnami if testing the latest release
image_prefix="kubeapps/"
kubeapps_apis_image="kubeapps-apis"
[[ -n "${TEST_LATEST_RELEASE:-}" ]] && image_prefix="bitnami/kubeapps-" && kubeapps_apis_image="apis"
images=(
"apprepository-controller"
"asset-syncer"
"assetsvc"
"dashboard"
"kubeops"
"pinniped-proxy"
"${kubeapps_apis_image}"
)
images=("${images[@]/#/${image_prefix}}")
images=("${images[@]/%/${IMG_MODIFIER}}")
img_flags=(
"--set" "apprepository.image.tag=${DEV_TAG}"
"--set" "apprepository.image.repository=${images[0]}"
"--set" "apprepository.syncImage.tag=${DEV_TAG}"
"--set" "apprepository.syncImage.repository=${images[1]}"
"--set" "assetsvc.image.tag=${DEV_TAG}"
"--set" "assetsvc.image.repository=${images[2]}"
"--set" "dashboard.image.tag=${DEV_TAG}"
"--set" "dashboard.image.repository=${images[3]}"
"--set" "kubeops.image.tag=${DEV_TAG}"
"--set" "kubeops.image.repository=${images[4]}"
"--set" "pinnipedProxy.image.tag=${DEV_TAG}"
"--set" "pinnipedProxy.image.repository=${images[5]}"
"--set" "kubeappsapis.image.tag=${DEV_TAG}"
"--set" "kubeappsapis.image.repository=${images[6]}"
)
if [ "$USE_MULTICLUSTER_OIDC_ENV" = true ]; then
multiclusterFlags=(
"--set" "ingress.enabled=true"
"--set" "ingress.hostname=localhost"
"--set" "ingress.tls=true"
"--set" "ingress.selfSigned=true"
"--set" "authProxy.enabled=true"
"--set" "authProxy.provider=oidc"
"--set" "authProxy.clientID=default"
"--set" "authProxy.clientSecret=ZXhhbXBsZS1hcHAtc2VjcmV0"
"--set" "authProxy.cookieSecret=bm90LWdvb2Qtc2VjcmV0Cg=="
"--set" "authProxy.additionalFlags[0]=\"--oidc-issuer-url=https://${DEX_IP}:32000\""
"--set" "authProxy.additionalFlags[1]=\"--scope=openid email groups audience:server:client_id:second-cluster audience:server:client_id:third-cluster\""
"--set" "authProxy.additionalFlags[2]=\"--ssl-insecure-skip-verify=true\""
"--set" "authProxy.additionalFlags[3]=\"--redirect-url=http://kubeapps-ci.kubeapps/oauth2/callback\""
"--set" "authProxy.additionalFlags[4]=\"--cookie-secure=false\""
"--set" "authProxy.additionalFlags[5]=\"--cookie-domain=kubeapps-ci.kubeapps\""
"--set" "authProxy.additionalFlags[6]=\"--whitelist-domain=kubeapps-ci.kubeapps\""
"--set" "authProxy.additionalFlags[7]=\"--set-authorization-header=true\""
"--set" "clusters[0].name=default"
"--set" "clusters[1].name=second-cluster"
"--set" "clusters[1].apiServiceURL=https://${ADDITIONAL_CLUSTER_IP}:6443"
"--set" "clusters[1].insecure=true"
"--set" "clusters[1].serviceToken=ZXlKaGJHY2lPaUpTVXpJMU5pSXNJbXRwWkNJNklsbHpiSEp5TlZwM1QwaG9WSE5PYkhVdE5GQkRablY2TW0wd05rUmtMVmxFWVV4MlZEazNaeTEyUmxFaWZRLmV5SnBjM01pT2lKcmRXSmxjbTVsZEdWekwzTmxjblpwWTJWaFkyTnZkVzUwSWl3aWEzVmlaWEp1WlhSbGN5NXBieTl6WlhKMmFXTmxZV05qYjNWdWRDOXVZVzFsYzNCaFkyVWlPaUprWldaaGRXeDBJaXdpYTNWaVpYSnVaWFJsY3k1cGJ5OXpaWEoyYVdObFlXTmpiM1Z1ZEM5elpXTnlaWFF1Ym1GdFpTSTZJbXQxWW1WaGNIQnpMVzVoYldWemNHRmpaUzFrYVhOamIzWmxjbmt0ZEc5clpXNHRjV295Ym1naUxDSnJkV0psY201bGRHVnpMbWx2TDNObGNuWnBZMlZoWTJOdmRXNTBMM05sY25acFkyVXRZV05qYjNWdWRDNXVZVzFsSWpvaWEzVmlaV0Z3Y0hNdGJtRnRaWE53WVdObExXUnBjMk52ZG1WeWVTSXNJbXQxWW1WeWJtVjBaWE11YVc4dmMyVnlkbWxqWldGalkyOTFiblF2YzJWeWRtbGpaUzFoWTJOdmRXNTBMblZwWkNJNkltVXhaakE1WmpSakxUTTRNemt0TkRJME15MWhZbUptTFRKaU5HWm1OREZrWW1RMllTSXNJbk4xWWlJNkluTjVjM1JsYlRwelpYSjJhV05sWVdOamIzVnVkRHBrWldaaGRXeDBPbXQxWW1WaGNIQnpMVzVoYldWemNHRmpaUzFrYVhOamIzWmxjbmtpZlEuTnh6V2dsUGlrVWpROVQ1NkpWM2xJN1VWTUVSR3J2bklPSHJENkh4dUVwR0luLWFUUzV5Q0pDa3Z0cTF6S3Z3b05sc2MyX0YxaTdFOUxWRGFwbC1UQlhleUN5Rl92S1B1TDF4dTdqZFBMZ1dKT1pQX3JMcXppaDV4ZlkxalFoOHNhdTRZclFJLUtqb3U1UkRRZ0tOQS1BaS1lRlFOZVh2bmlUNlBKYWVkc184V0t3dHRMMC1wdHpYRnBnOFl5dkx6N0U1UWdTR2tjNWpDVXlsS0RvZVRUaVRSOEc2RHFHYkFQQUYwREt0b3MybU9Geno4SlJYNHhoQmdvaUcxVTVmR1g4Z3hnTU1SV0VHRE9kaGMyeXRvcFdRUkRpYmhvaldNS3VDZlNua09zMDRGYTBkYmEwQ0NTbld2a29LZ3Z4QVR5aVVrWm9wV3VpZ1JJNFd5dDkzbXhR"
)
fi
helm repo add bitnami https://charts.bitnami.com/bitnami
helm dep up "${ROOT_DIR}/chart/kubeapps"
kubectl create ns kubeapps
GLOBAL_REPOS_NS=kubeapps
if [[ -n "${TEST_UPGRADE:-}" ]]; then
# To test the upgrade, first install the latest version published
info "Installing latest Kubeapps chart available"
installOrUpgradeKubeapps bitnami/kubeapps \
"--set" "apprepository.initialRepos={}"
info "Waiting for Kubeapps components to be ready (bitnami chart)..."
k8s_wait_for_deployment kubeapps kubeapps-ci
fi
installOrUpgradeKubeapps "${ROOT_DIR}/chart/kubeapps"
info "Waiting for Kubeapps components to be ready (local chart)..."
k8s_wait_for_deployment kubeapps kubeapps-ci
installChartmuseum admin password
pushChart apache 8.6.2 admin password
pushChart apache 8.6.3 admin password
# Ensure that we are testing the correct image
info ""
k8s_ensure_image kubeapps kubeapps-ci-internal-apprepository-controller "$DEV_TAG"
k8s_ensure_image kubeapps kubeapps-ci-internal-dashboard "$DEV_TAG"
k8s_ensure_image kubeapps kubeapps-ci-internal-kubeappsapis "$DEV_TAG"
# Wait for Kubeapps Pods
info "Waiting for Kubeapps components to be ready..."
deployments=(
"kubeapps-ci"
"kubeapps-ci-internal-apprepository-controller"
"kubeapps-ci-internal-dashboard"
"kubeapps-ci-internal-kubeappsapis"
)
for dep in "${deployments[@]}"; do
k8s_wait_for_deployment kubeapps "$dep"
info "Deployment ${dep} ready"
done
# Wait for Kubeapps Jobs
# Clean up existing jobs
kubectl delete jobs -n kubeapps --all
# Trigger update of the bitnami repository
kubectl patch apprepositories.kubeapps.com -n ${GLOBAL_REPOS_NS} bitnami -p='[{"op": "replace", "path": "/spec/resyncRequests", "value":1}]' --type=json
k8s_wait_for_job_completed kubeapps apprepositories.kubeapps.com/repo-name=bitnami
info "Job apprepositories.kubeapps.com/repo-name=bitnami ready"
info "All deployments ready. PODs:"
kubectl get pods -n kubeapps -o wide
# Wait for all the endpoints to be ready
kubectl get ep --namespace=kubeapps
svcs=(
"kubeapps-ci"
"kubeapps-ci-internal-dashboard"
"kubeapps-ci-internal-kubeappsapis"
)
for svc in "${svcs[@]}"; do
k8s_wait_for_endpoints kubeapps "$svc" 1
info "Endpoints for ${svc} available"
done
# Deactivate helm tests unless we are testing the latest release until
# we have released the code with per-namespace tests (since the helm
# tests for assetsvc needs to test the namespaced repo).
if [[ -z "${TEST_LATEST_RELEASE:-}" ]]; then
# Run helm tests
# Retry once if tests fail to avoid temporary issue
if ! retry_while testHelm "2" "1"; then
warn "PODS status on failure"
kubectl get pods -n kubeapps
for pod in $(kubectl get po -l='app.kubernetes.io/managed-by=Helm,app.kubernetes.io/instance=kubeapps-ci' -oname -n kubeapps); do
warn "LOGS for pod $pod ------------"
if [[ "$pod" =~ .*internal.* ]]; then
kubectl logs -n kubeapps "$pod"
else
kubectl logs -n kubeapps "$pod" nginx
kubectl logs -n kubeapps "$pod" auth-proxy
fi
done
echo
warn "LOGS for dashboard tests --------"
kubectl logs kubeapps-ci-dashboard-test --namespace kubeapps
exit 1
fi
info "Helm tests succeeded!"
fi
# Browser tests
cd "${ROOT_DIR}/integration"
kubectl apply -f manifests/executor.yaml
k8s_wait_for_deployment default integration
pod=$(kubectl get po -l run=integration -o jsonpath="{.items[0].metadata.name}")
## Copy config and latest tests
for f in *.js; do
kubectl cp "./${f}" "${pod}:/app/"
done
# Set tests to be run
# Playwright does not allow to ignore tests on command line, only in config file
testsToRun=("tests/main/")
# Skip the multicluster scenario for GKE
if [[ -z "${GKE_BRANCH-}" ]]; then
testsToRun+=("tests/multicluster/")
fi
testsArgs="$(printf "%s " "${testsToRun[@]}")"
kubectl cp ./tests "${pod}:/app/"
info "Copied tests to integration pod ${pod}"
## Create admin user
kubectl create serviceaccount kubeapps-operator -n kubeapps
kubectl create clusterrolebinding kubeapps-operator-admin --clusterrole=cluster-admin --serviceaccount kubeapps:kubeapps-operator
kubectl create clusterrolebinding kubeapps-repositories-write --clusterrole kubeapps:kubeapps:apprepositories-write --serviceaccount kubeapps:kubeapps-operator
## Create view user
kubectl create serviceaccount kubeapps-view -n kubeapps
kubectl create role view-secrets --verb=get,list,watch --resource=secrets
kubectl create rolebinding kubeapps-view-secret --role view-secrets --serviceaccount kubeapps:kubeapps-view
kubectl create clusterrolebinding kubeapps-view --clusterrole=view --serviceaccount kubeapps:kubeapps-view
## Create edit user
kubectl create serviceaccount kubeapps-edit -n kubeapps
kubectl create rolebinding kubeapps-edit -n kubeapps --clusterrole=edit --serviceaccount kubeapps:kubeapps-edit
kubectl create rolebinding kubeapps-edit -n default --clusterrole=edit --serviceaccount kubeapps:kubeapps-edit
kubectl create rolebinding kubeapps-repositories-read -n kubeapps --clusterrole kubeapps:kubeapps:apprepositories-read --serviceaccount kubeapps:kubeapps-edit
## Give the cluster some time to avoid issues like
## https://circleci.com/gh/kubeapps/kubeapps/16102
retry_while "kubectl get -n kubeapps serviceaccount kubeapps-operator -o name" "5" "1"
retry_while "kubectl get -n kubeapps serviceaccount kubeapps-view -o name" "5" "1"
retry_while "kubectl get -n kubeapps serviceaccount kubeapps-edit -o name" "5" "1"
## Retrieve tokens
admin_token="$(kubectl get -n kubeapps secret "$(kubectl get -n kubeapps serviceaccount kubeapps-operator -o jsonpath='{.secrets[].name}')" -o go-template='{{.data.token | base64decode}}' && echo)"
view_token="$(kubectl get -n kubeapps secret "$(kubectl get -n kubeapps serviceaccount kubeapps-view -o jsonpath='{.secrets[].name}')" -o go-template='{{.data.token | base64decode}}' && echo)"
edit_token="$(kubectl get -n kubeapps secret "$(kubectl get -n kubeapps serviceaccount kubeapps-edit -o jsonpath='{.secrets[].name}')" -o go-template='{{.data.token | base64decode}}' && echo)"
info "Running main Integration tests without k8s API access..."
if ! kubectl exec -it "$pod" -- /bin/sh -c "CI_TIMEOUT=40 INTEGRATION_ENTRYPOINT=http://kubeapps-ci.kubeapps USE_MULTICLUSTER_OIDC_ENV=${USE_MULTICLUSTER_OIDC_ENV} ADMIN_TOKEN=${admin_token} VIEW_TOKEN=${view_token} EDIT_TOKEN=${edit_token} yarn test ${testsArgs}"; then
## Integration tests failed, get report screenshot
warn "PODS status on failure"
kubectl cp "${pod}:/app/reports" ./reports
exit 1
fi
info "Main integration tests succeeded!!"
## Upgrade and run operator test
# Operators are not supported in GKE 1.14 and flaky in 1.15, skipping test
if [[ -z "${GKE_BRANCH-}" ]] && [[ -n "${TEST_OPERATORS-}" ]]; then
installOLM "${OLM_VERSION}"
# Update Kubeapps settings to enable operators and hence proxying
# to k8s API server.
info "Installing latest Kubeapps chart available"
installOrUpgradeKubeapps "${ROOT_DIR}/chart/kubeapps" \
"--set" "featureFlags.operators=true"
info "Waiting for Kubeapps components to be ready (bitnami chart)..."
k8s_wait_for_deployment kubeapps kubeapps-ci
## Wait for the Operator catalog to be populated
info "Waiting for the OperatorHub Catalog to be ready ..."
retry_while isOperatorHubCatalogRunning 24
info "Running operator integration test with k8s API access..."
if ! kubectl exec -it "$pod" -- /bin/sh -c "CI_TIMEOUT=20 INTEGRATION_ENTRYPOINT=http://kubeapps-ci.kubeapps USE_MULTICLUSTER_OIDC_ENV=${USE_MULTICLUSTER_OIDC_ENV} ADMIN_TOKEN=${admin_token} VIEW_TOKEN=${view_token} EDIT_TOKEN=${edit_token} yarn test \"tests/operators/\""; then
## Integration tests failed, get report screenshot
warn "PODS status on failure"
kubectl cp "${pod}:/app/reports" ./reports
exit 1
fi
info "Operator integration tests (with k8s API access) succeeded!!"
fi
info "Integration tests succeeded!"
|
#!/bin/sh
set -e
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
# Used as a return value for each invocation of `strip_invalid_archs` function.
STRIP_BINARY_RETVAL=0
# This protects against multiple targets copying the same framework dependency at the same time. The solution
# was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html
RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????")
# Copies and strips a vendored framework
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# Use filter instead of exclude so missing patterns don't throw errors.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u && exit ${PIPESTATUS[0]})
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Copies and strips a vendored dSYM
install_dsym() {
local source="$1"
if [ -r "$source" ]; then
# Copy the dSYM into a the targets temp dir.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${DERIVED_FILES_DIR}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${DERIVED_FILES_DIR}"
local basename
basename="$(basename -s .framework.dSYM "$source")"
binary="${DERIVED_FILES_DIR}/${basename}.framework.dSYM/Contents/Resources/DWARF/${basename}"
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"Mach-O dSYM companion"* ]]; then
strip_invalid_archs "$binary"
fi
if [[ $STRIP_BINARY_RETVAL == 1 ]]; then
# Move the stripped file into its final destination.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${DERIVED_FILES_DIR}/${basename}.framework.dSYM\" \"${DWARF_DSYM_FOLDER_PATH}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${DERIVED_FILES_DIR}/${basename}.framework.dSYM" "${DWARF_DSYM_FOLDER_PATH}"
else
# The dSYM was not stripped at all, in this case touch a fake folder so the input/output paths from Xcode do not reexecute this script because the file is missing.
touch "${DWARF_DSYM_FOLDER_PATH}/${basename}.framework.dSYM"
fi
fi
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY}" -a "${CODE_SIGNING_REQUIRED}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identitiy
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS} --preserve-metadata=identifier,entitlements '$1'"
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
code_sign_cmd="$code_sign_cmd &"
fi
echo "$code_sign_cmd"
eval "$code_sign_cmd"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current target binary
binary_archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | awk '{$1=$1;print}' | rev)"
# Intersect them with the architectures we are building for
intersected_archs="$(echo ${ARCHS[@]} ${binary_archs[@]} | tr ' ' '\n' | sort | uniq -d)"
# If there are no archs supported by this binary then warn the user
if [[ -z "$intersected_archs" ]]; then
echo "warning: [CP] Vendored binary '$binary' contains architectures ($binary_archs) none of which match the current build architectures ($ARCHS)."
STRIP_BINARY_RETVAL=0
return
fi
stripped=""
for arch in $binary_archs; do
if ! [[ "${ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary" || exit 1
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
STRIP_BINARY_RETVAL=1
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/MXBaseUtils/MXBaseUtils.framework"
install_framework "${BUILT_PRODUCTS_DIR}/MXModuleConnector/MXModuleConnector.framework"
install_framework "${BUILT_PRODUCTS_DIR}/Masonry/Masonry.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/MXBaseUtils/MXBaseUtils.framework"
install_framework "${BUILT_PRODUCTS_DIR}/MXModuleConnector/MXModuleConnector.framework"
install_framework "${BUILT_PRODUCTS_DIR}/Masonry/Masonry.framework"
fi
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
wait
fi
|
from torch.utils.data import Dataset
import torch
import numpy as np
from skimage import exposure
from ..sequences import (WholeVolumeToSurfaceSequence, HCPRegressionSequence, get_metric_data,
WholeVolumeAutoEncoderSequence, WholeVolumeSegmentationSequence, WindowedAutoEncoderSequence,
SubjectPredictionSequence, fetch_data_for_point, WholeVolumeCiftiSupervisedRegressionSequence,
WholeVolumeSupervisedRegressionSequence)
from ..utils import nib_load_files
class WholeBrainCIFTI2DenseScalarDataset(WholeVolumeToSurfaceSequence, Dataset):
def __init__(self, *args, batch_size=1, shuffle=False, **kwargs):
super().__init__(*args, batch_size=batch_size, shuffle=shuffle, **kwargs)
def __len__(self):
return len(self.epoch_filenames)
def __getitem__(self, idx):
feature_filename, surface_filenames, metric_filenames, subject_id = self.epoch_filenames[idx]
metrics = nib_load_files(metric_filenames)
x = self.resample_input(feature_filename)
y = self.get_metric_data(metrics, subject_id)
return torch.from_numpy(x).float().permute(3, 0, 1, 2), torch.from_numpy(y).float()
def get_metric_data(self, metrics, subject_id):
return get_metric_data(metrics, self.metric_names, self.surface_names, subject_id).T.ravel()
class HCPRegressionDataset(HCPRegressionSequence, Dataset):
def __init__(self, *args, points_per_subject=1, **kwargs):
super().__init__(*args, batch_size=points_per_subject, **kwargs)
def __len__(self):
return len(self.epoch_filenames)
def __getitem__(self, idx):
x, y = self.fetch_hcp_subject_batch(*self.epoch_filenames[idx])
return torch.from_numpy(np.moveaxis(np.asarray(x), -1, 1)).float(), torch.from_numpy(np.asarray(y)).float()
class HCPSubjectDataset(SubjectPredictionSequence):
def __init__(self, *args, batch_size=None, **kwargs):
if batch_size is not None:
print("Ignoring the set batch_size")
super().__init__(*args, batch_size=1, **kwargs)
def __getitem__(self, idx):
x = self.fetch_data_for_index(idx)
return torch.from_numpy(np.moveaxis(np.asarray(x), -1, 0)).float()
def __len__(self):
return len(self.vertices)
def fetch_data_for_index(self, idx):
return fetch_data_for_point(self.vertices[idx], self.feature_image, window=self.window, flip=self.flip,
spacing=self.spacing)
class AEDataset(WholeVolumeAutoEncoderSequence, Dataset):
def __init__(self, *args, batch_size=1, shuffle=False, metric_names=None, **kwargs):
super().__init__(*args, batch_size=batch_size, shuffle=shuffle, metric_names=metric_names, **kwargs)
def __len__(self):
return len(self.epoch_filenames)
def __getitem__(self, idx):
item = self.epoch_filenames[idx]
x, y = self.resample_input(item)
return (torch.from_numpy(np.moveaxis(np.asarray(x), -1, 0)).float(),
torch.from_numpy(np.moveaxis(np.asarray(y), -1, 0)).float())
class WholeVolumeSegmentationDataset(WholeVolumeSegmentationSequence, Dataset):
def __init__(self, *args, batch_size=1, shuffle=False, metric_names=None, **kwargs):
super().__init__(*args, batch_size=batch_size, shuffle=shuffle, metric_names=metric_names, **kwargs)
def __len__(self):
return len(self.epoch_filenames)
def __getitem__(self, idx):
item = self.epoch_filenames[idx] # list len(item) = 5
# example of item
# [
# ['MICCAI_BraTS2020_TrainingData/BraTS20_Training_224/BraTS20_Training_224_t1.nii.gz',
# 'MICCAI_BraTS2020_TrainingData/BraTS20_Training_224/BraTS20_Training_224_t1ce.nii.gz',
# 'MICCAI_BraTS2020_TrainingData/BraTS20_Training_224/BraTS20_Training_224_flair.nii.gz',
# 'MICCAI_BraTS2020_TrainingData/BraTS20_Training_224/BraTS20_Training_224_t2.nii.gz'],
# None,
# 'MICCAI_BraTS2020_TrainingData/BraTS20_Training_224/BraTS20_Training_224_seg.nii.gz',
# None,
# '224']
x, y = self.resample_input(item) # tuple x.shape = (80, 96, 64, 4) y.shape = (80, 96, 64, 3)
x1 = x[:, :, :, 0]
x2 = x[:, :, :, 1]
x3 = x[:, :, :, 2]
x4 = x[:, :, :, 3]
x1 = exposure.equalize_hist(x1)
x2 = exposure.equalize_hist(x2)
x3 = exposure.equalize_hist(x3)
x4 = exposure.equalize_hist(x4)
x_equalize_hist = np.stack([x1, x2, x3, x4], axis=3)
x = np.concatenate([x, x_equalize_hist], axis=3)
# print('x', x.shape)
# print('x', x.shape, 'y', y.shape)
return (torch.from_numpy(np.moveaxis(np.copy(x), -1, 0)).float(),
torch.from_numpy(np.moveaxis(np.copy(y), -1, 0)).byte())
class WholeVolumeSupervisedRegressionDataset(WholeVolumeSupervisedRegressionSequence, Dataset):
def __init__(self, *args, batch_size=1, shuffle=False, **kwargs):
super().__init__(*args, batch_size=batch_size, shuffle=shuffle, **kwargs)
def __len__(self):
return len(self.epoch_filenames)
def __getitem__(self, idx):
item = self.epoch_filenames[idx]
x, y = self.resample_input(item)
return (torch.from_numpy(np.moveaxis(np.asarray(x), -1, 0)).float(),
torch.from_numpy(np.moveaxis(np.asarray(y), -1, 0)).float())
class WholeVolumeCiftiSupervisedRegressionDataset(WholeVolumeCiftiSupervisedRegressionSequence,
WholeVolumeSupervisedRegressionDataset):
pass
class WindowedAEDataset(WindowedAutoEncoderSequence, Dataset):
def __init__(self, *args, points_per_subject=1, **kwargs):
super().__init__(*args, batch_size=points_per_subject, **kwargs)
def __len__(self):
return len(self.epoch_filenames)
def __getitem__(self, idx):
x, y = self.fetch_hcp_subject_batch(*self.epoch_filenames[idx])
return (torch.from_numpy(np.moveaxis(np.asarray(x), -1, 1)).float(),
torch.from_numpy(np.moveaxis(np.asarray(y), -1, 1)).float())
|
import hashString from '@emotion/hash';
import unitless from '@emotion/unitless';
import memoize from '@emotion/memoize';
var hyphenateRegex = /[A-Z]|^ms/g;
var animationRegex = /_EMO_([^_]+?)_([^]*?)_EMO_/g;
var processStyleName = memoize(function (styleName) {
return styleName.replace(hyphenateRegex, '-$&').toLowerCase();
});
var processStyleValue = function processStyleValue(key, value) {
if (value == null || typeof value === 'boolean') {
return '';
}
switch (key) {
case 'animation':
case 'animationName':
{
if (typeof value === 'string') {
value = value.replace(animationRegex, function (match, p1, p2) {
cursor = {
name: p1,
styles: p2,
next: cursor
};
return p1;
});
}
}
}
if (unitless[key] !== 1 && key.charCodeAt(1) !== 45 && // custom properties
typeof value === 'number' && value !== 0) {
return value + 'px';
}
return value;
};
if (process.env.NODE_ENV !== 'production') {
var contentValuePattern = /(attr|calc|counters?|url)\(/;
var contentValues = ['normal', 'none', 'counter', 'open-quote', 'close-quote', 'no-open-quote', 'no-close-quote', 'initial', 'inherit', 'unset'];
var oldProcessStyleValue = processStyleValue;
var msPattern = /^-ms-/;
var hyphenPattern = /-(.)/g;
var hyphenatedCache = {};
processStyleValue = function processStyleValue(key, value) {
if (key === 'content') {
if (typeof value !== 'string' || contentValues.indexOf(value) === -1 && !contentValuePattern.test(value) && (value.charAt(0) !== value.charAt(value.length - 1) || value.charAt(0) !== '"' && value.charAt(0) !== "'")) {
console.error("You seem to be using a value for 'content' without quotes, try replacing it with `content: '\"" + value + "\"'`");
}
}
var processed = oldProcessStyleValue(key, value);
var isCssVariable = key.charCodeAt(1) === 45;
if (processed !== '' && !isCssVariable && key.indexOf('-') !== -1 && hyphenatedCache[key] === undefined) {
hyphenatedCache[key] = true;
console.error("Using kebab-case for css properties in objects is not supported. Did you mean " + key.replace(msPattern, 'ms-').replace(hyphenPattern, function (str, char) {
return char.toUpperCase();
}) + "?");
}
return processed;
};
}
var shouldWarnAboutInterpolatingClassNameFromCss = true;
function handleInterpolation(mergedProps, registered, interpolation, couldBeSelectorInterpolation) {
if (interpolation == null) {
return '';
}
if (interpolation.__emotion_styles !== undefined) {
if (process.env.NODE_ENV !== 'production' && interpolation.toString() === 'NO_COMPONENT_SELECTOR') {
throw new Error('Component selectors can only be used in conjunction with babel-plugin-emotion.');
}
return interpolation;
}
switch (typeof interpolation) {
case 'boolean':
{
return '';
}
case 'object':
{
if (interpolation.anim === 1) {
cursor = {
name: interpolation.name,
styles: interpolation.styles,
next: cursor
};
return interpolation.name;
}
if (interpolation.styles !== undefined) {
var next = interpolation.next;
if (next !== undefined) {
// not the most efficient thing ever but this is a pretty rare case
// and there will be very few iterations of this generally
while (next !== undefined) {
cursor = {
name: next.name,
styles: next.styles,
next: cursor
};
next = next.next;
}
}
var styles = interpolation.styles;
if (process.env.NODE_ENV !== 'production' && interpolation.map !== undefined) {
styles += interpolation.map;
}
return styles;
}
return createStringFromObject(mergedProps, registered, interpolation);
}
case 'function':
{
if (mergedProps !== undefined) {
var previousCursor = cursor;
var result = interpolation(mergedProps);
cursor = previousCursor;
return handleInterpolation(mergedProps, registered, result, couldBeSelectorInterpolation);
} else if (process.env.NODE_ENV !== 'production') {
console.error('Functions that are interpolated in css calls will be stringified.\n' + 'If you want to have a css call based on props, create a function that returns a css call like this\n' + 'let dynamicStyle = (props) => css`color: ${props.color}`\n' + 'It can be called directly with props or interpolated in a styled call like this\n' + "let SomeComponent = styled('div')`${dynamicStyle}`");
}
}
// eslint-disable-next-line no-fallthrough
default:
{
if (registered == null) {
return interpolation;
}
var cached = registered[interpolation];
if (process.env.NODE_ENV !== 'production' && couldBeSelectorInterpolation && shouldWarnAboutInterpolatingClassNameFromCss && cached !== undefined) {
console.error('Interpolating a className from css`` is not recommended and will cause problems with composition.\n' + 'Interpolating a className from css`` will be completely unsupported in a future major version of Emotion');
shouldWarnAboutInterpolatingClassNameFromCss = false;
}
return cached !== undefined && !couldBeSelectorInterpolation ? cached : interpolation;
}
}
}
function createStringFromObject(mergedProps, registered, obj) {
var string = '';
if (Array.isArray(obj)) {
for (var i = 0; i < obj.length; i++) {
string += handleInterpolation(mergedProps, registered, obj[i], false);
}
} else {
for (var _key in obj) {
var value = obj[_key];
if (typeof value !== 'object') {
if (registered != null && registered[value] !== undefined) {
string += _key + "{" + registered[value] + "}";
} else {
string += processStyleName(_key) + ":" + processStyleValue(_key, value) + ";";
}
} else {
if (_key === 'NO_COMPONENT_SELECTOR' && process.env.NODE_ENV !== 'production') {
throw new Error('Component selectors can only be used in conjunction with babel-plugin-emotion.');
}
if (Array.isArray(value) && typeof value[0] === 'string' && (registered == null || registered[value[0]] === undefined)) {
for (var _i = 0; _i < value.length; _i++) {
string += processStyleName(_key) + ":" + processStyleValue(_key, value[_i]) + ";";
}
} else {
string += _key + "{" + handleInterpolation(mergedProps, registered, value, false) + "}";
}
}
}
}
return string;
}
var labelPattern = /label:\s*([^\s;\n{]+)\s*;/g;
var sourceMapPattern;
if (process.env.NODE_ENV !== 'production') {
sourceMapPattern = /\/\*#\ssourceMappingURL=data:application\/json;\S+\s+\*\//;
} // this is the cursor for keyframes
// keyframes are stored on the SerializedStyles object as a linked list
var cursor;
var serializeStyles = function serializeStyles(args, registered, mergedProps) {
if (args.length === 1 && typeof args[0] === 'object' && args[0] !== null && args[0].styles !== undefined) {
return args[0];
}
var stringMode = true;
var styles = '';
cursor = undefined;
var strings = args[0];
if (strings == null || strings.raw === undefined) {
stringMode = false;
styles += handleInterpolation(mergedProps, registered, strings, false);
} else {
styles += strings[0];
} // we start at 1 since we've already handled the first arg
for (var i = 1; i < args.length; i++) {
styles += handleInterpolation(mergedProps, registered, args[i], styles.charCodeAt(styles.length - 1) === 46);
if (stringMode) {
styles += strings[i];
}
}
var sourceMap;
if (process.env.NODE_ENV !== 'production') {
styles = styles.replace(sourceMapPattern, function (match) {
sourceMap = match;
return '';
});
} // using a global regex with .exec is stateful so lastIndex has to be reset each time
labelPattern.lastIndex = 0;
var identifierName = '';
var match; // https://esbench.com/bench/5b809c2cf2949800a0f61fb5
while ((match = labelPattern.exec(styles)) !== null) {
identifierName += '-' + // $FlowFixMe we know it's not null
match[1];
}
var name = hashString(styles) + identifierName;
if (process.env.NODE_ENV !== 'production') {
return {
name: name,
styles: styles,
map: sourceMap,
next: cursor
};
}
return {
name: name,
styles: styles,
next: cursor
};
};
export { serializeStyles };
|
//给定两个字符串 a 和 b,寻找重复叠加字符串 a 的最小次数,使得字符串 b 成为叠加后的字符串 a 的子串,如果不存在则返回 -1。
//
// 注意:字符串 "abc" 重复叠加 0 次是 "",重复叠加 1 次是 "abc",重复叠加 2 次是 "abcabc"。
//
//
//
// 示例 1:
//
// 输入:a = "abcd", b = "cdabcdab"
//输出:3
//解释:a 重复叠加三遍后为 "abcdabcdabcd", 此时 b 是其子串。
//
//
// 示例 2:
//
// 输入:a = "a", b = "aa"
//输出:2
//
//
// 示例 3:
//
// 输入:a = "a", b = "a"
//输出:1
//
//
// 示例 4:
//
// 输入:a = "abc", b = "wxyz"
//输出:-1
//
//
//
//
// 提示:
//
//
// 1 <= a.length <= 10⁴
// 1 <= b.length <= 10⁴
// a 和 b 由小写英文字母组成
//
// Related Topics 字符串 字符串匹配 👍 194 👎 0
package algorithm_600
import (
"math"
"math/rand"
"strings"
"time"
)
func repeatedStringMatch(a string, b string) int {
an, bn := len(a), len(b)
index := strStr(a, b)
if index == -1 {
return -1
}
if an-index >= bn {
return 1
}
return (bn+index-an-1)/an + 2
}
func strStr(a string, b string) int {
n, m := len(a), len(b)
if m == 0 {
return 0
}
var k1 int = 1000000000 + 7
var k2 int = 1337
rand.Seed(time.Now().Unix())
var kMod1 int64 = int64(rand.Intn(k1)) + int64(k1)
var kMod2 int64 = int64(rand.Intn(k2)) + int64(k2)
var hash_b int64 = 0
for i := 0; i < m; i++ {
hash_b = (hash_b*kMod2 + int64(b[i])) % kMod1
}
var hash_a int64 = 0
var extra int64 = 1
for i := 0; i < m-1; i++ {
hash_a = (hash_a*kMod2 + int64(a[i%n])) % kMod1
extra = (extra * kMod2) % kMod1
}
for i := m - 1; (i - m + 1) < n; i++ {
hash_a = (hash_a*kMod2 + int64(a[i%n])) % kMod1
if hash_a == hash_b {
return i - m + 1
}
hash_a = (hash_a - extra*int64(a[(i-m+1)%n])) % kMod1
hash_a = (hash_a + kMod1) % kMod1
}
return -1
}
// 暴力解
func repeatedStringMatch0(a string, b string) int {
na, nb := len(a), len(b)
res := 1
if na < nb {
res = int(math.Ceil(float64(nb) / float64(na)))
}
if strings.Contains(strings.Repeat(a, res), b) {
return res
}
if strings.Contains(strings.Repeat(a, res+1), b) {
return res + 1
}
return -1
} |
//Using Java
class SortData {
public static void sortAscending(int[] dataArray) {
int n = dataArray.length;
int temp = 0;
for(int i=0; i < n; i++){
for(int j=1; j < (n-i); j++){
if(dataArray[j-1] > dataArray[j]){
temp = dataArray[j-1];
dataArray[j-1] = dataArray[j];
dataArray[j] = temp;
}
}
}
}
public static void sortDescending(int[] dataArray) {
int n = dataArray.length;
int temp = 0;
for(int i=0; i < n; i++){
for(int j=1; j < (n-i); j++){
if(dataArray[j-1] < dataArray[j]){
temp = dataArray[j-1];
dataArray[j-1] = dataArray[j];
dataArray[j] = temp;
}
}
}
}
} |
const jsondiffpatch = require('jsondiffpatch')
const chalk = require('chalk')
const differ = jsondiffpatch.create({
objectHash: function (obj, index) {
// try to find an id property, otherwise just use the index in the array
return obj.$$diffId || obj.__id || obj.id || obj.name || '$$index:' + index
},
textDiff: {
// default 60, minimum string length (left and right sides) to use text diff algorythm: google-diff-match-patch
minLength: 200
},
propertyFilter: function (name, context) {
return !['$$diffId', 'created', 'updated'].includes(name)
},
cloneDiffValues: true,
arrays: {
// default true, detect items moved inside the array (otherwise they will be registered as remove+add)
detectMove: true,
// default false, the value of items moved is not included in deltas
includeValueOnMove: false
}
})
function getDeepWithDiff (object, property) {
const path = property.split('.')
return path.reduce((object, property, i) => {
const isLast = path.length === i + 1
if (typeof object === 'object') {
let nextProperty = object[property]
if (isLast) {
// If previous value was different than array we have diff
// [previous value, new array value]
if (Array.isArray(nextProperty) && Array.isArray(nextProperty[1])) {
return nextProperty[1]
}
return nextProperty
} else {
if (Array.isArray(nextProperty)) {
return nextProperty[1] || nextProperty[0]
}
return nextProperty
}
}
}, object)
}
function getDiffSummary (delta, property) {
const diff = getDeepWithDiff(delta, property)
const changes = {
created: 0,
updated: 0,
deleted: 0
}
// If we just created array with values
if (Array.isArray(diff)) {
changes.created = diff.length
} else {
Object.entries(diff).forEach(([key, delta]) => {
if (key !== '_t') {
if (key.startsWith('_')) {
if (Array.isArray(delta) && delta.length === 3 && delta[2] === 0) {
changes.deleted++
}
} else {
if (Array.isArray(delta)) {
if (delta.length === 1) {
changes.created++
}
} else {
changes.updated++
}
}
}
})
}
return changes
}
function printDiff (before, after) {
const delta = differ.diff(before, after)
if (delta !== undefined) {
return {
printOverview (property) {
const changes = getDiffSummary(delta, property)
const split = property.split('.')
const propertyName = split[split.length - 1]
console.log(`Summary of ${propertyName}:`)
let createdMessage = `Created: ${changes.created}`
if (changes.created) {
createdMessage = chalk.green(createdMessage)
}
console.log(createdMessage)
let updatedMessage = `Updated: ${changes.updated}`
if (changes.updated) {
updatedMessage = chalk.yellow(updatedMessage)
}
console.log(updatedMessage)
let deletedMessage = `Deleted: ${changes.deleted}`
if (changes.deleted) {
deletedMessage = chalk.red(deletedMessage)
}
console.log(deletedMessage)
console.log()
},
printDiff () {
jsondiffpatch.console.log(delta)
}
}
}
}
module.exports = printDiff
|
<filename>src/api/services/AccountService.ts
import { Service } from 'typedi';
import { OrmRepository } from 'typeorm-typedi-extensions';
import { Account } from '../models/Account';
import { Logger, LoggerInterface } from '../../decorators/Logger';
import { AccountRepository } from '../repositories/AccountRepository';
@Service()
export class AccountService {
constructor(
@OrmRepository() private accountRepository: AccountRepository,
@Logger(__filename) private log: LoggerInterface
) {}
public find(): Promise<Account[]> {
this.log.info('Find all accounts');
return this.accountRepository.find();
}
public findOne(id: string): Promise<Account | undefined> {
this.log.info('Find one account');
return this.accountRepository.findOne({ id });
}
}
|
#!/usr/bin/env bash
#
# Copyright (c) 2020 Project CHIP Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
SOURCE=${BASH_SOURCE[0]}
SOURCE_DIR=$(cd "$(dirname "$SOURCE")" >/dev/null 2>&1 && pwd)
REPO_DIR=$SOURCE_DIR/../../
TEST_DIR=$REPO_DIR/src/test_driver/linux-cirque
LOG_DIR=${LOG_DIR:-$(mktemp -d)}
GITHUB_ACTION_RUN=${GITHUB_ACTION_RUN:-"0"}
# The image build will clone its own ot-br-posix checkout due to limitations of git submodule.
# Using the same ot-br-posix version as chip
OPENTHREAD=$REPO_DIR/third_party/openthread/repo
OPENTHREAD_CHECKOUT=$(cd "$REPO_DIR" && git rev-parse :third_party/openthread/repo)
CIRQUE_CACHE_PATH=${GITHUB_CACHE_PATH:-"/tmp/cirque-cache/"}
OT_SIMULATION_CACHE="$CIRQUE_CACHE_PATH/ot-simulation-cmake.tgz"
OT_SIMULATION_CACHE_STAMP_FILE="$CIRQUE_CACHE_PATH/ot-simulation.commit"
# Append test name here to add more tests for run_all_tests
CIRQUE_TESTS=(
"EchoTest"
"EchoOverTcpTest"
"MobileDeviceTest"
"InteractionModelTest"
)
BOLD_GREEN_TEXT="\033[1;32m"
BOLD_YELLOW_TEXT="\033[1;33m"
BOLD_RED_TEXT="\033[1;31m"
RESET_COLOR="\033[0m"
function __cirquetest_start_flask() {
echo 'Start Flask'
cd "$REPO_DIR"/third_party/cirque/repo
# When running the ManualTests, if Ctrl-C is send to the shell, it will stop flask as well.
# This is not expected. Start a new session to prevent it from receiving signals
setsid bash -c 'FLASK_APP=cirque/restservice/service.py \
PATH="'"$PATH"'":"'"$REPO_DIR"'"/third_party/openthread/repo/build/simulation/examples/apps/ncp/ \
python3 -m flask run >"'"$LOG_DIR"'"/"'"$CURRENT_TEST"'"/flask.log 2>&1' &
FLASK_PID=$!
echo "Flask running in backgroud with pid $FLASK_PID"
}
function __cirquetest_clean_flask() {
echo "Cleanup Flask pid $FLASK_PID"
kill -SIGTERM -"$FLASK_PID"
}
function __cirquetest_build_ot() {
echo -e "[$BOLD_YELLOW_TEXT""INFO""$RESET_COLOR] Cache miss, build openthread simulation."
script/cmake-build simulation -DOT_THREAD_VERSION=1.2 -DOT_MTD=OFF -DOT_FTD=OFF
tar czf "$OT_SIMULATION_CACHE" build
echo "$OPENTHREAD_CHECKOUT" >"$OT_SIMULATION_CACHE_STAMP_FILE"
}
function __cirquetest_build_ot_lazy() {
pushd .
cd "$REPO_DIR"/third_party/openthread/repo
([[ -f "$OT_SIMULATION_CACHE_STAMP_FILE" ]] &&
[[ "$(cat "$OT_SIMULATION_CACHE_STAMP_FILE")" = "$OPENTHREAD_CHECKOUT" ]] &&
[[ -f "$OT_SIMULATION_CACHE" ]] &&
tar zxf "$OT_SIMULATION_CACHE") ||
__cirquetest_build_ot
popd
}
function __cirquetest_self_hash() {
shasum "$SOURCE" | awk '{ print $1 }'
}
function cirquetest_cachekey() {
echo "$("$REPO_DIR"/integrations/docker/ci-only-images/chip-cirque-device-base/cachekey.sh).openthread.$OPENTHREAD_CHECKOUT.cirque_test.$(__cirquetest_self_hash)"
}
function cirquetest_cachekeyhash() {
cirquetest_cachekey | shasum | awk '{ print $1 }'
}
function cirquetest_bootstrap() {
set -ex
cd "$REPO_DIR"/third_party/cirque/repo
pip3 install pycodestyle==2.5.0 wheel
make NO_GRPC=1 install -j
"$REPO_DIR"/integrations/docker/ci-only-images/chip-cirque-device-base/build.sh
__cirquetest_build_ot_lazy
pip3 install -r requirements_nogrpc.txt
}
function cirquetest_run_test() {
# Start Cirque flash server
export CURRENT_TEST="$1"
export DEVICE_LOG_DIR="$LOG_DIR/$CURRENT_TEST"/device_logs
shift
mkdir -p "$DEVICE_LOG_DIR"
__cirquetest_start_flask
sleep 5
"$TEST_DIR/$CURRENT_TEST.py" "$@"
exitcode=$?
__cirquetest_clean_flask
# TODO: Do docker system prune, we cannot filter which container
# is created by cirque now. This will be implemented later. Currently, only do this on CI
# After test finished, the container is perserved and networks will not be deleted
# This is useful when running tests on local workstation, but not for CI.
if [[ "x$CLEANUP_DOCKER_FOR_CI" = "x1" ]]; then
echo "Do docker container and network prune"
# TODO: Filter cirque containers ?
if ! grep docker.sock /proc/1/mountinfo; then
docker ps -aq | xargs docker stop >/dev/null 2>&1
fi
docker container prune -f >/dev/null 2>&1
docker network prune -f >/dev/null 2>&1
fi
echo "Test log can be found at $DEVICE_LOG_DIR"
return "$exitcode"
}
function cirquetest_run_all_tests() {
# shellharden requires quotes around variables, which will break for-each loops
# This is the workaround
echo "Logs will be stored at $LOG_DIR"
test_pass=1
mkdir -p "$LOG_DIR"
for test_name in "${CIRQUE_TESTS[@]}"; do
echo "[ RUN] $test_name"
if cirquetest_run_test "$test_name" >"$LOG_DIR/$test_name.log" 2>&1; then
echo -e "[$BOLD_GREEN_TEXT""PASS""$RESET_COLOR] $test_name"
else
echo -e "[$BOLD_RED_TEXT""FAIL""$RESET_COLOR] $test_name (Exitcode: $exitcode)"
test_pass=0
fi
done
if [[ "x$GITHUB_ACTION_RUN" = "x1" ]]; then
echo -e "[$BOLD_YELLOW_TEXT""INFO""$RESET_COLOR] Logs will be uploaded to artifacts."
fi
if ((test_pass)); then
echo -e "[$BOLD_GREEN_TEXT""PASS""$RESET_COLOR] Test finished, test log can be found at $LOG_DIR"
return 0
else
echo -e "[$BOLD_RED_TEXT""FAIL""$RESET_COLOR] Test failed, test log can be found at $LOG_DIR"
return 1
fi
}
subcommand=$1
shift
case $subcommand in
*)
cirquetest_"$subcommand" "$@"
exitcode=$?
if ((exitcode == 127)); then
echo "Unknown command: $subcommand" >&2
fi
exit "$exitcode"
;;
esac
|
/*
Copyright (c) 2012-2017 <NAME> <<EMAIL>>
This file is part of SimpleScreenRecorder.
SimpleScreenRecorder is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
SimpleScreenRecorder is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with SimpleScreenRecorder. If not, see <http://www.gnu.org/licenses/>.
*/
#include "PageOutput.h"
#include "Dialogs.h"
#include "EnumStrings.h"
#include "HiddenScrollArea.h"
#include "Icons.h"
#include "Logger.h"
#include "Main.h"
#include "MainWindow.h"
#include "PageInput.h"
#include "AVWrapper.h"
#include "VideoEncoder.h"
#include "AudioEncoder.h"
ENUMSTRINGS(PageOutput::enum_container) = {
{PageOutput::CONTAINER_MKV, "mkv"},
{PageOutput::CONTAINER_MP4, "mp4"},
{PageOutput::CONTAINER_WEBM, "webm"},
{PageOutput::CONTAINER_OGG, "ogg"},
{PageOutput::CONTAINER_OTHER, "other"},
};
ENUMSTRINGS(PageOutput::enum_video_codec) = {
{PageOutput::VIDEO_CODEC_H264, "h264"},
{PageOutput::VIDEO_CODEC_VP8, "vp8"},
{PageOutput::VIDEO_CODEC_THEORA, "theora"},
{PageOutput::VIDEO_CODEC_OTHER, "other"},
};
ENUMSTRINGS(PageOutput::enum_audio_codec) = {
{PageOutput::AUDIO_CODEC_VORBIS, "vorbis"},
{PageOutput::AUDIO_CODEC_MP3, "mp3"},
{PageOutput::AUDIO_CODEC_AAC, "aac"},
{PageOutput::AUDIO_CODEC_UNCOMPRESSED, "uncompressed"},
{PageOutput::AUDIO_CODEC_OTHER, "other"},
};
ENUMSTRINGS(PageOutput::enum_h264_preset) = {
{PageOutput::H264_PRESET_ULTRAFAST, "ultrafast"},
{PageOutput::H264_PRESET_SUPERFAST, "superfast"},
{PageOutput::H264_PRESET_VERYFAST, "veryfast"},
{PageOutput::H264_PRESET_FASTER, "faster"},
{PageOutput::H264_PRESET_FAST, "fast"},
{PageOutput::H264_PRESET_MEDIUM, "medium"},
{PageOutput::H264_PRESET_SLOW, "slow"},
{PageOutput::H264_PRESET_SLOWER, "slower"},
{PageOutput::H264_PRESET_VERYSLOW, "veryslow"},
{PageOutput::H264_PRESET_PLACEBO, "placebo"},
};
static bool MatchSuffix(const QString& suffix, const QStringList& suffixes) {
return ((suffix.isEmpty() && suffixes.isEmpty()) || suffixes.contains(suffix, Qt::CaseInsensitive));
}
PageOutput::PageOutput(MainWindow* main_window)
: QWidget(main_window->centralWidget()) {
m_main_window = main_window;
m_old_container = (enum_container) 0;
m_old_container_av = 0;
// main codecs
// (initializer lists should use explicit types for Clang)
m_containers = {
ContainerData({"Matroska (MKV)", "matroska", QStringList({"mkv"}), tr("%1 files", "This appears in the file dialog, e.g. 'MP4 files'").arg("Matroska") + " (*.mkv)",
{VIDEO_CODEC_H264, VIDEO_CODEC_VP8, VIDEO_CODEC_THEORA},
{AUDIO_CODEC_VORBIS, AUDIO_CODEC_MP3, AUDIO_CODEC_AAC, AUDIO_CODEC_UNCOMPRESSED}}),
ContainerData({"MP4", "mp4", QStringList({"mp4"}), tr("%1 files", "This appears in the file dialog, e.g. 'MP4 files'").arg("MP4") + " (*.mp4)",
{VIDEO_CODEC_H264},
{AUDIO_CODEC_VORBIS, AUDIO_CODEC_MP3, AUDIO_CODEC_AAC}}),
ContainerData({"WebM", "webm", QStringList({"webm"}), tr("%1 files", "This appears in the file dialog, e.g. 'MP4 files'").arg("WebM") + " (*.webm)",
{VIDEO_CODEC_VP8},
{AUDIO_CODEC_VORBIS}}),
ContainerData({"OGG", "ogg", QStringList({"ogg"}), tr("%1 files", "This appears in the file dialog, e.g. 'MP4 files'").arg("OGG") + " (*.ogg)",
{VIDEO_CODEC_THEORA},
{AUDIO_CODEC_VORBIS}}),
ContainerData({tr("Other..."), "other", QStringList(), "", std::set<enum_video_codec>({}), std::set<enum_audio_codec>({})}),
};
m_video_codecs = {
{"H.264" , "libx264" },
{"VP8" , "libvpx" },
{"Theora" , "libtheora"},
{tr("Other..."), "other" },
};
m_audio_codecs = {
{"Vorbis" , "libvorbis" },
{"MP3" , "libmp3lame" },
{"AAC" , "libvo_aacenc"},
{tr("Uncompressed"), "pcm_s16le" },
{tr("Other...") , "other" },
};
// alternative aac codec
if(!AVCodecIsInstalled(m_audio_codecs[AUDIO_CODEC_AAC].avname)) {
m_audio_codecs[AUDIO_CODEC_AAC].avname = "aac";
}
// load AV container list
m_containers_av.clear();
for(AVOutputFormat *format = av_oformat_next(NULL); format != NULL; format = av_oformat_next(format)) {
if(format->video_codec == AV_CODEC_ID_NONE)
continue;
ContainerData c;
c.name = format->long_name;
c.avname = format->name;
c.suffixes = QString(format->extensions).split(',', QString::SkipEmptyParts);
if(c.suffixes.isEmpty()) {
c.filter = "";
} else {
c.filter = tr("%1 files", "This appears in the file dialog, e.g. 'MP4 files'").arg(c.avname) + " (*." + c.suffixes[0];
for(int i = 1; i < c.suffixes.size(); ++i) {
c.suffixes[i] = c.suffixes[i].trimmed(); // needed because libav/ffmpeg isn't very consistent when they say 'comma-separated'
c.filter += " *." + c.suffixes[i];
}
c.filter += ")";
}
m_containers_av.push_back(c);
}
std::sort(m_containers_av.begin(), m_containers_av.end());
// load AV codec list
m_video_codecs_av.clear();
m_audio_codecs_av.clear();
for(AVCodec *codec = av_codec_next(NULL); codec != NULL; codec = av_codec_next(codec)) {
if(!av_codec_is_encoder(codec))
continue;
if(codec->type == AVMEDIA_TYPE_VIDEO && VideoEncoder::AVCodecIsSupported(codec->name)) {
VideoCodecData c;
c.name = codec->long_name;
c.avname = codec->name;
m_video_codecs_av.push_back(c);
}
if(codec->type == AVMEDIA_TYPE_AUDIO && AudioEncoder::AVCodecIsSupported(codec->name)) {
AudioCodecData c;
c.name = codec->long_name;
c.avname = codec->name;
m_audio_codecs_av.push_back(c);
}
}
std::sort(m_video_codecs_av.begin(), m_video_codecs_av.end());
std::sort(m_audio_codecs_av.begin(), m_audio_codecs_av.end());
if(m_containers_av.empty()) {
Logger::LogError("[PageOutput::PageOutput] " + tr("Error: Could not find any suitable container in libavformat!"));
throw LibavException();
}
if(m_video_codecs_av.empty()) {
Logger::LogError("[PageOutput::PageOutput] " + tr("Error: Could not find any suitable video codec in libavcodec!"));
throw LibavException();
}
if(m_audio_codecs_av.empty()) {
Logger::LogError("[PageOutput::PageOutput] " + tr("Error: Could not find any suitable audio codec in libavcodec!"));
throw LibavException();
}
HiddenScrollArea *scrollarea = new HiddenScrollArea(this);
QWidget *scrollarea_contents = new QWidget(scrollarea);
scrollarea->setWidget(scrollarea_contents);
{
m_profile_box = new ProfileBox(tr("Output profile"), scrollarea_contents, "output-profiles", &LoadProfileSettingsCallback, &SaveProfileSettingsCallback, this);
QGroupBox *groupbox_file = new QGroupBox(tr("File"), scrollarea_contents);
{
QLabel *label_file = new QLabel(tr("Save as:"), groupbox_file);
m_lineedit_file = new QLineEdit(groupbox_file);
m_lineedit_file->setToolTip(tr("The recording will be saved to this location."));
QPushButton *button_browse = new QPushButton(tr("Browse..."), groupbox_file);
m_checkbox_separate_files = new QCheckBox(tr("Separate file per segment"), groupbox_file);
m_checkbox_separate_files->setToolTip(tr("If checked, a separate video file will be created every time you pause and resume the recording."
"If unchecked, all recorded segments will be combined into a single video file."));
m_checkbox_add_timestamp = new QCheckBox(tr("Add timestamp"), groupbox_file);
m_checkbox_add_timestamp->setToolTip(tr("If checked, the current date and time will be appended to the file name automatically.\n"
"If the original file name is 'test.mkv', the video will be saved as 'test-YYYY-MM-DD_HH.MM.SS.mkv'."));
QLabel *label_container = new QLabel(tr("Container:"), groupbox_file);
m_combobox_container = new QComboBox(groupbox_file);
for(unsigned int i = 0; i < CONTAINER_COUNT; ++i) {
QString name = "\u200e" + m_containers[i].name + "\u200e";
if(i != CONTAINER_OTHER && !AVFormatIsInstalled(m_containers[i].avname))
name += " \u200e" + tr("(not installed)") + "\u200e";
m_combobox_container->addItem(name);
}
m_combobox_container->setToolTip(tr("The container (file format) that will be used to save the recording.\n"
"Note that not all codecs are supported by all containers, and that not all media players can read all file formats.\n"
"- Matroska (MKV) supports all the codecs, but is less well-known.\n"
"- MP4 is the most well-known format and will play on almost any modern media player, but supports only H.264 video\n"
" (and many media players only support AAC audio).\n"
"- WebM is intended for embedding video into websites (with the HTML5 <video> tag). The format was created by Google.\n"
" WebM is supported by default in Firefox, Chrome and Opera, and plugins are available for Internet Explorer and Safari.\n"
" It supports only VP8 and Vorbis.\n"
"- OGG supports only Theora and Vorbis."));
m_label_container_av = new QLabel(tr("Container name:"), groupbox_file);
m_combobox_container_av = new QComboBox(groupbox_file);
for(unsigned int i = 0; i < m_containers_av.size(); ++i) {
ContainerData &c = m_containers_av[i];
m_combobox_container_av->addItem(c.avname);
}
m_combobox_container_av->setToolTip(tr("For advanced users. You can use any libav/ffmpeg format, but many of them are not useful or may not work."));
connect(m_combobox_container, SIGNAL(activated(int)), this, SLOT(OnUpdateSuffixAndContainerFields()));
connect(m_combobox_container_av, SIGNAL(activated(int)), this, SLOT(OnUpdateSuffixAndContainerFields()));
connect(button_browse, SIGNAL(clicked()), this, SLOT(OnBrowse()));
QGridLayout *layout = new QGridLayout(groupbox_file);
layout->addWidget(label_file, 0, 0);
layout->addWidget(m_lineedit_file, 0, 1);
layout->addWidget(button_browse, 0, 2);
{
QHBoxLayout *layout2 = new QHBoxLayout();
layout->addLayout(layout2, 1, 0, 1, 3);
layout2->addWidget(m_checkbox_separate_files);
layout2->addWidget(m_checkbox_add_timestamp);
}
layout->addWidget(label_container, 2, 0);
layout->addWidget(m_combobox_container, 2, 1, 1, 2);
layout->addWidget(m_label_container_av, 3, 0);
layout->addWidget(m_combobox_container_av, 3, 1, 1, 2);
}
QGroupBox *groupbox_video = new QGroupBox(tr("Video"), scrollarea_contents);
{
QLabel *label_video_codec = new QLabel(tr("Codec:"), groupbox_video);
m_combobox_video_codec = new QComboBox(groupbox_video);
for(unsigned int i = 0; i < VIDEO_CODEC_COUNT; ++i) {
m_combobox_video_codec->addItem(m_video_codecs[i].name);
}
m_combobox_video_codec->setToolTip(tr("The codec that will be used to compress the video stream.\n"
"- H.264 (libx264) is by far the best codec - high quality and very fast.\n"
"- VP8 (libvpx) is quite good but also quite slow.\n"
"- Theora (libtheora) isn't really recommended because the quality isn't very good."));
m_label_video_codec_av = new QLabel(tr("Codec name:"), groupbox_video);
m_combobox_video_codec_av = new QComboBox(groupbox_video);
for(unsigned int i = 0; i < m_video_codecs_av.size(); ++i) {
VideoCodecData &c = m_video_codecs_av[i];
m_combobox_video_codec_av->addItem(c.avname);
}
m_combobox_video_codec_av->setToolTip(tr("For advanced users. You can use any libav/ffmpeg video codec, but many of them are not useful or may not work."));
m_label_video_kbit_rate = new QLabel(tr("Bit rate (in kbit/s):"), groupbox_video);
m_lineedit_video_kbit_rate = new QLineEdit(groupbox_video);
m_lineedit_video_kbit_rate->setToolTip(tr("The video bit rate (in kilobit per second). A higher value means a higher quality."
"\nIf you have no idea where to start, try 5000 and change it if needed."));
m_label_h264_crf = new QLabel(tr("Constant rate factor:", "libx264 setting: don't translate this unless you can come up with something sensible"), groupbox_video);
m_slider_h264_crf = new QSlider(Qt::Horizontal, groupbox_video);
m_slider_h264_crf->setRange(0, 51);
m_slider_h264_crf->setSingleStep(1);
m_slider_h264_crf->setPageStep(5);
m_slider_h264_crf->setToolTip(tr("This setting changes the video quality. A lower value means a higher quality.\n"
"The allowed range is 0-51 (0 means lossless, the default is 23)."));
m_label_h264_crf_value = new QLabel(groupbox_video);
m_label_h264_crf_value->setNum(m_slider_h264_crf->value());
m_label_h264_crf_value->setAlignment(Qt::AlignRight | Qt::AlignVCenter);
m_label_h264_crf_value->setMinimumWidth(QFontMetrics(m_label_h264_crf_value->font()).width("99") + 2);
m_label_h264_preset = new QLabel(tr("Preset:", "libx264 setting: don't translate this unless you can come up with something sensible"), groupbox_video);
m_combobox_h264_preset = new QComboBox(groupbox_video);
for(unsigned int i = 0; i < H264_PRESET_COUNT; ++i) {
m_combobox_h264_preset->addItem(EnumToString((enum_h264_preset) i));
}
m_combobox_h264_preset->setToolTip(tr("The encoding speed. A higher speed uses less CPU (making higher recording frame rates possible),\n"
"but results in larger files. The quality shouldn't be affected too much."));
m_label_vp8_cpu_used = new QLabel(tr("CPU used:", "libvpx setting: don't translate this unless you can come up with something sensible"), groupbox_video);
m_combobox_vp8_cpu_used = new QComboBox(groupbox_video);
m_combobox_vp8_cpu_used->addItem("5 (" + tr("fastest") + ")");
m_combobox_vp8_cpu_used->addItem("4");
m_combobox_vp8_cpu_used->addItem("3");
m_combobox_vp8_cpu_used->addItem("2");
m_combobox_vp8_cpu_used->addItem("1");
m_combobox_vp8_cpu_used->addItem("0 (" + tr("slowest") + ")");
m_combobox_vp8_cpu_used->setToolTip(tr("The encoding speed. A higher value uses *less* CPU time. (I didn't choose the name, this is the name\n"
"used by the VP8 encoder). Higher values result in lower quality video, unless you increase the bit rate too."));
m_label_video_options = new QLabel(tr("Custom options:"), groupbox_video);
m_lineedit_video_options = new QLineEdit(groupbox_video);
m_lineedit_video_options->setToolTip(tr("Custom codec options separated by commas (e.g. option1=value1,option2=value2,option3=value3)"));
m_checkbox_video_allow_frame_skipping = new QCheckBox(tr("Allow frame skipping"), groupbox_video);
m_checkbox_video_allow_frame_skipping->setToolTip(tr("If checked, the video encoder will be allowed to skip frames if the input frame rate is\n"
"lower than the output frame rate. If not checked, input frames will be duplicated to fill the holes.\n"
"This increases the file size and CPU usage, but reduces the latency for live streams in some cases.\n"
"It shouldn't affect the appearance of the video."));
connect(m_combobox_video_codec, SIGNAL(activated(int)), this, SLOT(OnUpdateVideoCodecFields()));
connect(m_slider_h264_crf, SIGNAL(valueChanged(int)), m_label_h264_crf_value, SLOT(setNum(int)));
QGridLayout *layout = new QGridLayout(groupbox_video);
layout->addWidget(label_video_codec, 0, 0);
layout->addWidget(m_combobox_video_codec, 0, 1, 1, 2);
layout->addWidget(m_label_video_codec_av, 1, 0);
layout->addWidget(m_combobox_video_codec_av, 1, 1, 1, 2);
layout->addWidget(m_label_video_kbit_rate, 2, 0);
layout->addWidget(m_lineedit_video_kbit_rate, 2, 1, 1, 2);
layout->addWidget(m_label_h264_crf, 3, 0);
layout->addWidget(m_slider_h264_crf, 3, 1);
layout->addWidget(m_label_h264_crf_value, 3, 2);
layout->addWidget(m_label_h264_preset, 4, 0);
layout->addWidget(m_combobox_h264_preset, 4, 1, 1, 2);
layout->addWidget(m_label_vp8_cpu_used, 5, 0);
layout->addWidget(m_combobox_vp8_cpu_used, 5, 1, 1, 2);
layout->addWidget(m_label_video_options, 6, 0);
layout->addWidget(m_lineedit_video_options, 6, 1, 1, 2);
layout->addWidget(m_checkbox_video_allow_frame_skipping, 7, 0, 1, 3);
}
m_groupbox_audio = new QGroupBox(tr("Audio"), scrollarea_contents);
{
QLabel *label_audio_codec = new QLabel(tr("Codec:"), m_groupbox_audio);
m_combobox_audio_codec = new QComboBox(m_groupbox_audio);
for(unsigned int i = 0; i < AUDIO_CODEC_COUNT; ++i) {
m_combobox_audio_codec->addItem(m_audio_codecs[i].name);
}
m_combobox_audio_codec->setToolTip(tr("The codec that will be used to compress the audio stream. You shouldn't worry too much about\n"
"this, because the size of the audio data is usually negligible compared to the size of the video data.\n"
"And if you're only recording your own voice (i.e. no music), the quality won't matter that much anyway.\n"
"- Vorbis (libvorbis) is great, this is the recommended codec.\n"
"- MP3 (libmp3lame) is reasonably good.\n"
"- AAC is a good codec, but the implementations used here (libvo_aacenc or the experimental ffmpeg aac encoder)\n"
" are pretty bad. Only use it if you have no other choice.\n"
"- Uncompressed will simply store the sound data without compressing it. The file will be quite large, but it's very fast."));
m_label_audio_codec_av = new QLabel(tr("Codec name:"), m_groupbox_audio);
m_combobox_audio_codec_av = new QComboBox(m_groupbox_audio);
for(unsigned int i = 0; i < m_audio_codecs_av.size(); ++i) {
AudioCodecData &c = m_audio_codecs_av[i];
m_combobox_audio_codec_av->addItem(c.avname);
}
m_combobox_audio_codec_av->setToolTip(tr("For advanced users. You can use any libav/ffmpeg audio codec, but many of them are not useful or may not work."));
m_label_audio_kbit_rate = new QLabel(tr("Bit rate (in kbit/s):"), m_groupbox_audio);
m_lineedit_audio_kbit_rate = new QLineEdit(m_groupbox_audio);
m_lineedit_audio_kbit_rate->setToolTip(tr("The audio bit rate (in kilobit per second). A higher value means a higher quality. The typical value is 128."));
m_label_audio_options = new QLabel(tr("Custom options:"), m_groupbox_audio);
m_lineedit_audio_options = new QLineEdit(m_groupbox_audio);
m_lineedit_audio_options->setToolTip(tr("Custom codec options separated by commas (e.g. option1=value1,option2=value2,option3=value3)"));
connect(m_combobox_audio_codec, SIGNAL(activated(int)), this, SLOT(OnUpdateAudioCodecFields()));
QGridLayout *layout = new QGridLayout(m_groupbox_audio);
layout->addWidget(label_audio_codec, 0, 0);
layout->addWidget(m_combobox_audio_codec, 0, 1);
layout->addWidget(m_label_audio_codec_av, 1, 0);
layout->addWidget(m_combobox_audio_codec_av, 1, 1);
layout->addWidget(m_label_audio_kbit_rate, 2, 0);
layout->addWidget(m_lineedit_audio_kbit_rate, 2, 1);
layout->addWidget(m_label_audio_options, 3, 0);
layout->addWidget(m_lineedit_audio_options, 3, 1);
}
QVBoxLayout *layout = new QVBoxLayout(scrollarea_contents);
layout->addWidget(m_profile_box);
layout->addWidget(groupbox_file);
layout->addWidget(groupbox_video);
layout->addWidget(m_groupbox_audio);
layout->addStretch();
}
QPushButton *button_back = new QPushButton(g_icon_go_previous, tr("Back"), this);
QPushButton *button_continue = new QPushButton(g_icon_go_next, tr("Continue"), this);
connect(button_back, SIGNAL(clicked()), m_main_window, SLOT(GoPageInput()));
connect(button_continue, SIGNAL(clicked()), this, SLOT(OnContinue()));
QVBoxLayout *layout = new QVBoxLayout(this);
layout->setContentsMargins(0, 0, 0, 0);
layout->addWidget(scrollarea);
{
QHBoxLayout *layout2 = new QHBoxLayout();
layout->addLayout(layout2);
layout2->addSpacing(style()->pixelMetric(QStyle::PM_LayoutLeftMargin));
layout2->addWidget(button_back);
layout2->addWidget(button_continue);
layout2->addSpacing(style()->pixelMetric(QStyle::PM_LayoutRightMargin));
}
layout->addSpacing(style()->pixelMetric(QStyle::PM_LayoutBottomMargin));
// temporary settings to calculate the worst-case size
SetContainer(CONTAINER_OTHER);
SetVideoCodec(VIDEO_CODEC_OTHER);
SetAudioCodec(AUDIO_CODEC_OTHER);
OnUpdateContainerFields();
OnUpdateVideoCodecFields();
OnUpdateAudioCodecFields();
}
void PageOutput::LoadSettings(QSettings* settings) {
SetProfile(m_profile_box->FindProfile(settings->value("output/profile", QString()).toString()));
LoadProfileSettings(settings);
}
void PageOutput::SaveSettings(QSettings* settings) {
settings->setValue("output/profile", m_profile_box->GetProfileName());
SaveProfileSettings(settings);
}
void PageOutput::LoadProfileSettingsCallback(QSettings* settings, void* userdata) {
PageOutput *page = (PageOutput*) userdata;
page->LoadProfileSettings(settings);
}
void PageOutput::SaveProfileSettingsCallback(QSettings* settings, void* userdata) {
PageOutput *page = (PageOutput*) userdata;
page->SaveProfileSettings(settings);
}
void PageOutput::LoadProfileSettings(QSettings* settings) {
// choose default container and codecs
enum_container default_container = (enum_container) 0;
for(unsigned int i = 0; i < CONTAINER_OTHER; ++i) {
if(AVFormatIsInstalled(m_containers[i].avname)) {
default_container = (enum_container) i;
break;
}
}
enum_video_codec default_video_codec = (enum_video_codec) 0;
for(unsigned int i = 0; i < VIDEO_CODEC_OTHER; ++i) {
if(AVCodecIsInstalled(m_video_codecs[i].avname) && m_containers[default_container].supported_video_codecs.count((enum_video_codec) i)) {
default_video_codec = (enum_video_codec) i;
break;
}
}
enum_audio_codec default_audio_codec = (enum_audio_codec) 0;
for(unsigned int i = 0; i < VIDEO_CODEC_OTHER; ++i) {
if(AVCodecIsInstalled(m_audio_codecs[i].avname) && m_containers[default_container].supported_audio_codecs.count((enum_audio_codec) i)) {
default_audio_codec = (enum_audio_codec) i;
break;
}
}
// choose default file name
#if QT_VERSION < QT_VERSION_CHECK(5, 0, 0)
QString dir_videos = QDesktopServices::storageLocation(QDesktopServices::MoviesLocation);
QString dir_documents = QDesktopServices::storageLocation(QDesktopServices::DocumentsLocation);
#else
QString dir_videos = QStandardPaths::writableLocation(QStandardPaths::MoviesLocation);
QString dir_documents = QStandardPaths::writableLocation(QStandardPaths::DocumentsLocation);
#endif
QString dir_home = QDir::homePath();
QString best_dir = (QDir(dir_videos).exists())? dir_videos : (QDir(dir_documents).exists())? dir_documents : dir_home;
QString default_file = best_dir + "/simplescreenrecorder." + m_containers[default_container].suffixes[0];
// load settings
SetFile(settings->value("output/file", default_file).toString());
SetSeparateFiles(settings->value("output/separate_files", false).toBool());
SetAddTimestamp(settings->value("output/add_timestamp", true).toBool());
SetContainer(StringToEnum(settings->value("output/container", QString()).toString(), default_container));
SetContainerAV(FindContainerAV(settings->value("output/container_av", QString()).toString()));
SetVideoCodec(StringToEnum(settings->value("output/video_codec", QString()).toString(), default_video_codec));
SetVideoCodecAV(FindVideoCodecAV(settings->value("output/video_codec_av", QString()).toString()));
SetVideoKBitRate(settings->value("output/video_kbit_rate", 5000).toUInt());
SetH264CRF(settings->value("output/video_h264_crf", 23).toUInt());
SetH264Preset((enum_h264_preset) settings->value("output/video_h264_preset", H264_PRESET_SUPERFAST).toUInt());
SetVP8CPUUsed(settings->value("output/video_vp8_cpu_used", 5).toUInt());
SetVideoOptions(settings->value("output/video_options", "").toString());
SetVideoAllowFrameSkipping(settings->value("output/video_allow_frame_skipping", true).toBool());
SetAudioCodec(StringToEnum(settings->value("output/audio_codec", QString()).toString(), default_audio_codec));
SetAudioCodecAV(FindAudioCodecAV(settings->value("output/audio_codec_av", QString()).toString()));
SetAudioKBitRate(settings->value("output/audio_kbit_rate", 128).toUInt());
SetAudioOptions(settings->value("output/audio_options", "").toString());
// update things
OnUpdateContainerFields();
OnUpdateVideoCodecFields();
OnUpdateAudioCodecFields();
}
void PageOutput::SaveProfileSettings(QSettings* settings) {
settings->setValue("output/file", GetFile());
settings->setValue("output/separate_files", GetSeparateFiles());
settings->setValue("output/add_timestamp", GetAddTimestamp());
settings->setValue("output/container", EnumToString(GetContainer()));
settings->setValue("output/container_av", m_containers_av[GetContainerAV()].avname);
settings->setValue("output/video_codec", EnumToString(GetVideoCodec()));
settings->setValue("output/video_codec_av", m_video_codecs_av[GetVideoCodecAV()].avname);
settings->setValue("output/video_kbit_rate", GetVideoKBitRate());
settings->setValue("output/video_h264_crf", GetH264CRF());
settings->setValue("output/video_h264_preset", GetH264Preset());
settings->setValue("output/video_vp8_cpu_used", GetVP8CPUUsed());
settings->setValue("output/video_options", GetVideoOptions());
settings->setValue("output/video_allow_frame_skipping", GetVideoAllowFrameSkipping());
settings->setValue("output/audio_codec", EnumToString(GetAudioCodec()));
settings->setValue("output/audio_codec_av", m_audio_codecs_av[GetAudioCodecAV()].avname);
settings->setValue("output/audio_kbit_rate", GetAudioKBitRate());
settings->setValue("output/audio_options", GetAudioOptions());
}
void PageOutput::PageStart() {
// only show audio settings if audio is enabled
m_groupbox_audio->setVisible(m_main_window->GetPageInput()->GetAudioEnabled());
}
bool PageOutput::Validate() {
QString file = GetFile();
if(file.isEmpty()) {
MessageBox(QMessageBox::Critical, this, MainWindow::WINDOW_CAPTION, tr("You did not select an output file!"), BUTTON_OK, BUTTON_OK);
return false;
}
/*if(GetFileProtocol().isNull() && !GetSeparateFiles() && QFileInfo(file).exists()) {
if(MessageBox(QMessageBox::Warning, this, MainWindow::WINDOW_CAPTION,
tr("The file '%1' already exists. Are you sure that you want to overwrite it?").arg(QFileInfo(file).fileName()),
BUTTON_YES | BUTTON_NO, BUTTON_YES) != BUTTON_YES) {
return false;
}
}*/
return true;
}
QString PageOutput::GetFileProtocol() {
QRegExp protocol_regex("^([a-z0-9]+)://", Qt::CaseInsensitive, QRegExp::RegExp);
if(protocol_regex.indexIn(GetFile()) < 0) {
return QString();
}
return protocol_regex.cap(1);
}
QString PageOutput::GetContainerAVName() {
enum_container container = GetContainer();
if(container != CONTAINER_OTHER)
return m_containers[container].avname;
else
return m_containers_av[GetContainerAV()].avname;
}
QString PageOutput::GetVideoCodecAVName() {
enum_video_codec video_codec = GetVideoCodec();
if(video_codec != VIDEO_CODEC_OTHER)
return m_video_codecs[video_codec].avname;
else
return m_video_codecs_av[GetVideoCodecAV()].avname;
}
QString PageOutput::GetAudioCodecAVName() {
enum_audio_codec audio_codec = GetAudioCodec();
if(audio_codec != AUDIO_CODEC_OTHER)
return m_audio_codecs[audio_codec].avname;
else
return m_audio_codecs_av[GetAudioCodecAV()].avname;
}
unsigned int PageOutput::FindContainerAV(const QString& name) {
for(unsigned int i = 0; i < m_containers_av.size(); ++i) {
if(m_containers_av[i].avname == name)
return i;
}
return 0;
}
unsigned int PageOutput::FindVideoCodecAV(const QString& name) {
for(unsigned int i = 0; i < m_video_codecs_av.size(); ++i) {
if(m_video_codecs_av[i].avname == name)
return i;
}
return 0;
}
unsigned int PageOutput::FindAudioCodecAV(const QString& name) {
for(unsigned int i = 0; i < m_audio_codecs_av.size(); ++i) {
if(m_audio_codecs_av[i].avname == name)
return i;
}
return 0;
}
void PageOutput::OnUpdateSuffixAndContainerFields() {
// change file extension
enum_container new_container = GetContainer();
unsigned int new_container_av = GetContainerAV();
if(GetFileProtocol().isNull()) {
QStringList old_suffixes = (m_old_container == CONTAINER_OTHER)? m_containers_av[m_old_container_av].suffixes : m_containers[m_old_container].suffixes;
QStringList new_suffixes = (new_container == CONTAINER_OTHER)? m_containers_av[new_container_av].suffixes : m_containers[new_container].suffixes;
QString file = GetFile();
if(!file.isEmpty()) {
QFileInfo fi(file);
if(MatchSuffix(fi.suffix(), old_suffixes) && !MatchSuffix(fi.suffix(), new_suffixes)) {
if(new_suffixes.isEmpty())
m_lineedit_file->setText(fi.path() + "/" + fi.completeBaseName());
else
m_lineedit_file->setText(fi.path() + "/" + fi.completeBaseName() + "." + new_suffixes[0]);
}
}
}
// update fields
OnUpdateContainerFields();
}
void PageOutput::OnUpdateContainerFields() {
enum_container container = GetContainer();
unsigned int container_av = GetContainerAV();
// show/hide fields
GroupVisible({m_label_container_av, m_combobox_container_av}, (container == CONTAINER_OTHER));
// mark uninstalled or unsupported codecs
for(unsigned int i = 0; i < VIDEO_CODEC_OTHER; ++i) {
QString name = m_video_codecs[i].name;
if(!AVCodecIsInstalled(m_video_codecs[i].avname))
name += " (" + tr("not installed") + ")";
else if(container != CONTAINER_OTHER && !m_containers[container].supported_video_codecs.count((enum_video_codec) i))
name += " (" + tr("not supported by container") + ")";
m_combobox_video_codec->setItemText(i, name);
}
for(unsigned int i = 0; i < AUDIO_CODEC_OTHER; ++i) {
QString name = m_audio_codecs[i].name;
if(!AVCodecIsInstalled(m_audio_codecs[i].avname))
name += " (" + tr("not installed") + ")";
else if(container != CONTAINER_OTHER && !m_containers[container].supported_audio_codecs.count((enum_audio_codec) i))
name += " (" + tr("not supported by container") + ")";
m_combobox_audio_codec->setItemText(i, name);
}
m_old_container = container;
m_old_container_av = container_av;
}
void PageOutput::OnUpdateVideoCodecFields() {
enum_video_codec codec = GetVideoCodec();
MultiGroupVisible({
{{m_label_video_kbit_rate, m_lineedit_video_kbit_rate}, (codec != VIDEO_CODEC_H264)},
{{m_label_h264_crf, m_slider_h264_crf, m_label_h264_crf_value, m_label_h264_preset, m_combobox_h264_preset}, (codec == VIDEO_CODEC_H264)},
{{m_label_vp8_cpu_used, m_combobox_vp8_cpu_used}, (codec == VIDEO_CODEC_VP8)},
{{m_label_video_codec_av, m_combobox_video_codec_av, m_label_video_options, m_lineedit_video_options}, (codec == VIDEO_CODEC_OTHER)},
});
}
void PageOutput::OnUpdateAudioCodecFields() {
enum_audio_codec codec = GetAudioCodec();
MultiGroupVisible({
{{m_label_audio_kbit_rate, m_lineedit_audio_kbit_rate}, (codec != AUDIO_CODEC_UNCOMPRESSED)},
{{m_label_audio_codec_av, m_combobox_audio_codec_av, m_label_audio_options, m_lineedit_audio_options}, (codec == AUDIO_CODEC_OTHER)},
});
}
void PageOutput::OnBrowse() {
QString filters;
for(int i = 0; i < CONTAINER_OTHER; ++i) {
if(i != 0)
filters += ";;";
filters += m_containers[i].filter;
}
for(unsigned int i = 0; i < m_containers_av.size(); ++i) {
if(!m_containers_av[i].filter.isEmpty())
filters += ";;" + m_containers_av[i].filter;
}
enum_container container = GetContainer();
unsigned int container_av = GetContainerAV();
QString selected_filter = (container == CONTAINER_OTHER)? m_containers_av[container_av].filter : m_containers[container].filter;
QString selected_file = QFileDialog::getSaveFileName(this, tr("Save recording as"),
GetFile(), filters, &selected_filter, QFileDialog::DontConfirmOverwrite);
if(selected_file.isNull())
return;
m_lineedit_file->clear();
QFileInfo fi(selected_file);
if(fi.suffix().isEmpty()) {
QStringList suffixes = (container == CONTAINER_OTHER)? m_containers_av[container_av].suffixes : m_containers[container].suffixes;
if(!suffixes.isEmpty())
selected_file += "." + suffixes[0];
} else {
bool found = false;
for(int i = 0; i < CONTAINER_OTHER; ++i) {
if(m_containers[i].suffixes.contains(fi.suffix(), Qt::CaseInsensitive)) {
SetContainer((enum_container) i);
found = true;
break;
}
}
if(!found) {
for(unsigned int i = 0; i < m_containers_av.size(); ++i) {
if(m_containers_av[i].suffixes.contains(fi.suffix(), Qt::CaseInsensitive)) {
SetContainer(CONTAINER_OTHER);
SetContainerAV(i);
break;
}
}
}
}
SetFile(selected_file);
OnUpdateContainerFields();
}
void PageOutput::OnContinue() {
if(!Validate())
return;
m_main_window->GoPageRecord();
}
|
#!/bin/bash
#
# Copyright IBM Corp All Rights Reserved
#
# SPDX-License-Identifier: Apache-2.0
#
# Exit on first error
set -e
# don't rewrite paths for Windows Git Bash users
export MSYS_NO_PATHCONV=1
starttime=$(date +%s)
LANGUAGE=${1:-"golang"}
CC_SRC_PATH=github.com/lc/go
if [ "$LANGUAGE" = "node" -o "$LANGUAGE" = "NODE" ]; then
CC_SRC_PATH=/opt/gopath/src/github.com/lc/node
fi
# clean the keystore
rm -rf ./hfc-key-store
# launch network; create channel and join peer to channel
cd ../basic-network
./start.sh
# Now launch the CLI container in order to install, instantiate chaincode
# and prime the ledger with our 10 cars
docker-compose -f ./docker-compose.yml up -d cli
docker exec -e "CORE_PEER_LOCALMSPID=Org1MSP" -e "CORE_PEER_MSPCONFIGPATH=/opt/gopath/src/github.com/hyperledger/fabric/peer/crypto/peerOrganizations/org1.example.com/users/Admin@org1.example.com/msp" cli peer chaincode install -n mycc -v 9.5 -p "$CC_SRC_PATH" -l "$LANGUAGE"
docker exec -e "CORE_PEER_LOCALMSPID=Org1MSP" -e "CORE_PEER_MSPCONFIGPATH=/opt/gopath/src/github.com/hyperledger/fabric/peer/crypto/peerOrganizations/org1.example.com/users/Admin@org1.example.com/msp" cli peer chaincode instantiate -o orderer.example.com:7050 -C mychannel -n mycc -l "$LANGUAGE" -v 9.5 -c '{"Args":[""]}' -P "OR ('Org1MSP.member','Org2MSP.member')"
sleep 10
docker exec -e "CORE_PEER_LOCALMSPID=Org1MSP" -e "CORE_PEER_MSPCONFIGPATH=/opt/gopath/src/github.com/hyperledger/fabric/peer/crypto/peerOrganizations/org1.example.com/users/Admin@org1.example.com/msp" cli peer chaincode invoke -o orderer.example.com:7050 -C mychannel -n mycc -c '{"function":"initLedger","Args":[""]}'
printf "\nTotal setup execution time : $(($(date +%s) - starttime)) secs ...\n\n\n"
printf "Start by installing required packages run 'npm install'\n"
printf "Then run 'node enrollAdmin.js', then 'node registerUser'\n\n"
printf "The 'node invoke.js' will fail until it has been updated with valid arguments\n"
printf "The 'node query.js' may be run at anytime once the user has been registered\n\n"
|
#!/bin/sh
export LD_LIBRARY_PATH=../../src
export DYLD_FALLBACK_LIBRARY_PATH=../../src
if [ -n "$QTDIR" ]; then
$QTDIR/bin/lupdate qlci18n.pro
$QTDIR/bin/lrelease qlci18n.pro
else
lupdate qlci18n.pro
lrelease qlci18n.pro
fi
./qlci18n_test
|
import debounce from 'debounce-wrapper';
import { HookProps, SetFormProps } from '@common-types';
import { HideLiveErrorAfterTimeout } from './types';
/**
* @description
* Функция скрывающая ошибку у контрола
*
* @param {HookProps} hooksData - Данные хука
* @param {SetFormProps} setForm - Функция изменяющая глобальный объект формы
*
* @returns {void}
*/
const hideLiveErrorHandler = (setForm: SetFormProps, hooksData: HookProps) => {
setForm(form => {
const { controlName, formIndex, controlIndex } = hooksData,
currentControl = form.controls[controlName];
currentControl.hasError = false;
});
};
/**
* @description
* Функция вызывающая скрытие ошибки через таймаут
*
* @param {HookProps} hooksData - Данные хука
* @param {SetFormProps} setForm - Функция изменяющая глобальный объект формы
* @param {number} ms - время через которое нужно скрыть ошибку
*
* @returns {void}
*
*/
export const hideLiveErrorAfterTimeout: HideLiveErrorAfterTimeout = (
hooksData,
setForm,
ms,
) => {
const callHideError = debounce(hideLiveErrorHandler, ms),
timeoutId = callHideError(setForm, hooksData);
return timeoutId;
};
|
#
# Copyright 2019 Xilinx Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1
CXX=${CXX:-g++}
os=`lsb_release -a | grep "Distributor ID" | sed 's/^.*:\s*//'`
os_version=`lsb_release -a | grep "Release" | sed 's/^.*:\s*//'`
arch=`uname -p`
target_info=${os}.${os_version}.${arch}
install_prefix_default=$HOME/.local/${target_info}
result=0 && pkg-config --list-all | grep opencv4 && result=1
if [ $result -eq 1 ]; then
OPENCV_FLAGS=$(pkg-config --cflags --libs-only-L opencv4)
else
OPENCV_FLAGS=$(pkg-config --cflags --libs-only-L opencv)
fi
mkdir -p bin
name=$(basename $PWD).exe
$CXX -O2 -w\
-fno-inline \
-I. \
-I=/usr/include/opencv4 \
-I=/usr/include/xrt \
-I/opt/xilinx/xrt/include/ \
-o ./bin/$name \
-std=c++17 \
src/main.cc \
src/common.cpp \
-L/opt/xilinx/xrt/lib \
-luuid \
-lvart-runner \
-lvart-util \
-lxrt_coreutil \
-lxrt_core \
${OPENCV_FLAGS} \
-lopencv_videoio \
-lopencv_imgcodecs \
-lopencv_highgui \
-lopencv_imgproc \
-lopencv_core \
-lpthread \
-lxilinxopencl \
-lglog \
-lunilog \
-lxir
|
"""Resource for IAM Policies"""
from typing import Any, Dict, Type
from botocore.client import BaseClient
from botocore.exceptions import ClientError
from altimeter.aws.resource.resource_spec import ListFromAWSResult
from altimeter.aws.resource.iam import IAMResourceSpec
from altimeter.aws.resource.util import policy_doc_dict_to_sorted_str
from altimeter.core.graph.field.scalar_field import ScalarField
from altimeter.core.graph.schema import Schema
class IAMPolicyResourceSpec(IAMResourceSpec):
"""Resource for user-managed IAM Policies"""
type_name = "policy"
parallel_scan = True
schema = Schema(
ScalarField("PolicyName", "name"),
ScalarField("PolicyId"),
ScalarField("DefaultVersionId"),
ScalarField("DefaultVersionPolicyDocumentText"),
)
@classmethod
def list_from_aws(
cls: Type["IAMPolicyResourceSpec"], client: BaseClient, account_id: str, region: str
) -> ListFromAWSResult:
"""Return a dict of dicts of the format:
{'role_1_arn': {role_1_dict},
'role_2_arn': {role_2_dict},
...}
Where the dicts represent results from list_policies and additional info per role from
list_targets_by_role."""
policies = {}
paginator = client.get_paginator("list_policies")
for resp in paginator.paginate(Scope="Local"):
for policy in resp.get("Policies", []):
resource_arn = policy["Arn"]
default_policy_version = policy["DefaultVersionId"]
try:
default_policy_version_document_text = cls.get_policy_version_document_text(
client=client,
policy_arn=resource_arn,
policy_version=default_policy_version,
)
policy["DefaultVersionPolicyDocumentText"] = policy_doc_dict_to_sorted_str(
default_policy_version_document_text
)
policies[resource_arn] = policy
except ClientError as c_e:
error_code = getattr(c_e, "response", {}).get("Error", {}).get("Code", {})
if error_code != "NoSuchEntity":
raise c_e
return ListFromAWSResult(resources=policies)
@classmethod
def get_policy_version_document_text(
cls: Type["IAMPolicyResourceSpec"],
client: BaseClient,
policy_arn: str,
policy_version: str,
) -> Dict[str, Any]:
policy_version_resp = client.get_policy_version(
PolicyArn=policy_arn, VersionId=policy_version
)
return policy_version_resp["PolicyVersion"]["Document"]
class IAMAWSManagedPolicyResourceSpec(IAMResourceSpec):
"""Resource for AWS-managed IAM Policies"""
type_name = "policy"
schema = Schema(ScalarField("PolicyName", "name"), ScalarField("PolicyId"))
@classmethod
def list_from_aws(
cls: Type["IAMAWSManagedPolicyResourceSpec"],
client: BaseClient,
account_id: str,
region: str,
) -> ListFromAWSResult:
"""Return a dict of dicts of the format:
{'role_1_arn': {role_1_dict},
'role_2_arn': {role_2_dict},
...}
Where the dicts represent results from list_policies and additional info per role from
list_targets_by_role."""
policies = {}
paginator = client.get_paginator("list_policies")
for resp in paginator.paginate(Scope="AWS", OnlyAttached=True):
for policy in resp.get("Policies", []):
resource_arn = policy["Arn"]
policies[resource_arn] = policy
return ListFromAWSResult(resources=policies)
|
#!/bin/bash
# This file will run the robot_sensors.py script continuously
# It waits 20 seconds before running the script
# It will wait 20 seconds before running the script again if there is an error
while true ; do
sleep 20
python3 /home/pi/Desktop/Mercury2018-19/robot/robot_sensors.py
done |
'use strict';
const express = require('express');
const cors = require('cors');
const multer = require('multer');
const upload = multer();
const app = express();
app.use(cors());
app.use(express.static('public'));
app.set('view engine', 'pug');
app.get('/', (req, res) => res.render('index'));
app.post('/api/fileanalyse', upload.single('upfile'), (req, res) => {
const { originalname, mimetype, size } = req.file;
return res.json({ name: originalname, type: mimetype, size: size })
});
const listener = app.listen(process.env.PORT || 3000, () => {
console.log(`Your app is listening on port ${listener.address().port}`);
});
|
#!/bin/bash
# Mysql
VOLUME_HOME="/var/lib/mysql"
sed -ri -e "s/^upload_max_filesize.*/upload_max_filesize = ${PHP_UPLOAD_MAX_FILESIZE}/" \
-e "s/^post_max_size.*/post_max_size = ${PHP_POST_MAX_SIZE}/" /etc/php5/apache2/php.ini
if [[ ! -d $VOLUME_HOME/mysql ]]; then
echo "=> An empty or uninitialized MySQL volume is detected in $VOLUME_HOME"
echo "=> Installing MySQL ..."
mysql_install_db > /dev/null 2>&1
echo "=> Done!"
/create_mysql_admin_user.sh
else
echo "=> Using an existing volume of MySQL"
fi
exec supervisord -n
|
package fr.unice.polytech.si3.qgl.soyouz.classes.marineland.entities;
public interface Collidable
{
}
|
<filename>app/controllers/posts_controller.rb
class PostsController < ApplicationController
before_action :find_post, only: [:show, :edit, :update, :destroy]
def index
@posts = Post.all.order('created_at DESC')
end
def new
@post = Post.new
end
def create
@post = Post.new(post_params)
if @post.save
redirect_to @post
else
render 'new'
end
end
def edit
end
def update
if @post.update(post_params)
redirect_to @post
else
render 'edit'
end
end
def show
@post = Post.find(params[:id])
end
def destroy
@post.destroy
redirect_to root_path
end
def about
end
private
def find_post
@post = Post.find(params[:id])
end
def post_params
params.require(:post).permit(:title, :body)
end
end
|
#!/bin/sh
if grep -q /opt/heartbeat/scripts/cron /etc/crontab; then
sed -i -e "/\/opt\/heartbeat\/scripts\/cron/d" /etc/crontab
fi
|
import React from 'react';
import { connect } from 'react-redux';
class App extends React.Component {
constructor(props) {
super(props);
this.state = {
isbn: ''
};
this.handleSubmit = this.handleSubmit.bind(this);
this.handleChange = this.handleChange.bind(this);
}
handleSubmit(e) {
e.preventDefault();
this.props.searchBook(this.state.isbn);
this.setState({isbn: ''});
}
handleChange(e) {
this.setState({isbn: e.target.value});
}
render() {
return (
<div>
<form onSubmit={this.handleSubmit}>
<label>
ISBN:
<input type="text" value={this.state.isbn} onChange={this.handleChange} />
</label>
<input type="submit" value="Submit" />
</form>
<span>Search Results:</span>
{this.props.results.length > 0 && this.props.results.map(result=>
<div>{result}</div>
)}
</div>
);
}
}
const mapStateToProps = state => {
return {
results: state.searchResults
}
}
const mapDispatchToProps = dispatch => {
return {
searchBook: (isbn) => dispatch({type: 'SEARCH_BOOK', isbn})
}
}
export default connect(mapStateToProps, mapDispatchToProps)(App); |
<reponame>HelloPb/jss-templatea
import React from 'react';
import renderer from 'react-test-renderer';
import { StyleSheet } from 'react-native';
import { Image } from './Image';
// In react-native, you need to "import" static assets via `require` statements.
// When the packager builds your app, it statically analyzes your code, extracts the assets that are `require`d into an array/map, then assigns them a numerical value.
// Presumably, it then uses that value as an index to retrieve/render the proper static asset.
// Hence, the `staticImages` defined below have numeric values for their `src` prop for mocking.
// Naturally, this doesn't actually test the `require` behavior of the packager, but that should be reserved for integration tests.
const staticImages = {
'test-image-1': {
src: 1,
},
};
const networkImages = {
'test-image-1': {
src: 'https://jssapp/-/media/myfile.ashx&w=180&h=360',
width: '180',
height: '360',
alt: 'Logo',
},
};
const testData = [
{ label: 'static images', data: staticImages },
{ label: 'network images', data: networkImages },
];
describe('<Image />', () => {
testData.forEach((dataSet) => {
describe(`with ${dataSet.label}`, () => {
describe('with direct image object, no value/editable', () => {
test('should render properly', () => {
const props = {
media: dataSet.data['test-image-1'],
resizeMode: 'cover',
};
const rendered = renderer.create(<Image {...props} />);
expect(rendered).toMatchSnapshot();
});
});
describe('with value/editable image object', () => {
test('should render properly', () => {
const props = {
media: {
value: dataSet.data['test-image-1'],
editable: 'bob loblaw',
},
resizeMode: 'cover',
};
const rendered = renderer.create(<Image {...props} />);
expect(rendered).toMatchSnapshot();
});
});
describe('with style prop', () => {
describe('with width or height specified in field data', () => {
test('should render merged styles', () => {
const styles = StyleSheet.create({
logoImage: {
margin: 20,
},
});
const props = {
media: {
value: {
...dataSet.data['test-image-1'],
width: '180',
height: '360',
},
editable: 'bob loblaw',
},
style: styles.logoImage,
resizeMode: 'cover',
};
const rendered = renderer.create(<Image {...props} />);
expect(rendered).toMatchSnapshot();
});
});
});
});
});
});
|
<reponame>Moya-M/101_MAP
import React, {Component} from "react";
import PropTypes from "prop-types";
import Switch from "./Switch";
import ThemeSwitch from "../../containers/sidebar/themeSwitch";
import {removeCookie} from "../../helpers/cookies.helper";
class SideBar extends Component {
constructor(props) {
super(props);
this.setPassiveMode = this.setPassiveMode.bind(this);
this.disconnect = this.disconnect.bind(this);
}
disconnect() {
if (this.props.mode !== "passive") {
removeCookie("userToken");
this.props.disconnectApp();
}
}
setPassiveMode() {
if (this.props.mode !== "passive") {
this.props.setPassiveMode();
localStorage.setItem("mode", "passive");
this.props.showToast({
type: "info",
timeout: 2000,
message: "Press escape to quit passive mode"
});
}
}
render() {
return (
<div className={this.props.mode === "passive" ? "sidebar sidebarHided" : "sidebar"}>
<ThemeSwitch active={this.props.mode !== "passive"}/>
<Switch icon={"fas fa-film"} clickEvent={this.setPassiveMode} />
<Switch
isLink={true}
alt={"Github"}
icon={"fab fa-github"}
className={"disconnectSwitch"}
href={"https://github.com/julien-marquet/101_MAP"}
/>
<Switch icon={"fas fa-power-off"} clickEvent={this.disconnect} />
</div>
);
}
}
SideBar.propTypes = {
setPassiveMode: PropTypes.func.isRequired,
showToast: PropTypes.func.isRequired
};
export default SideBar;
|
package org.multibit.hd.core.events;
import com.google.common.base.Preconditions;
import org.bitcoinj.core.Transaction;
/**
* <p>Event to provide the following to Core event subscribers</p>
* <ul>
* <li>Update of the progress of a broadcast</li>
* </ul>
*
* @since 0.0.1
*/
public class BitcoinSendProgressEvent implements CoreEvent {
private final String transactionId;
private final double progress; // The progress of the broadcast - a long between 0 (no peers have seen it) and 1 (broadcast complete)
/**
* @param transaction The Bitcoinj transaction providing the information
* @param progress The broadcast progress of the transaction
*/
public BitcoinSendProgressEvent(Transaction transaction, double progress) {
Preconditions.checkNotNull(transaction);
transactionId = transaction.getHashAsString();
this.progress = progress;
}
public String getTransactionId() {
return transactionId;
}
public double getProgress() {
return progress;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
BitcoinSendProgressEvent that = (BitcoinSendProgressEvent) o;
if (Double.compare(that.progress, progress) != 0) return false;
if (!transactionId.equals(that.transactionId)) return false;
return true;
}
@Override
public int hashCode() {
int result;
long temp;
result = transactionId.hashCode();
temp = Double.doubleToLongBits(progress);
result = 31 * result + (int) (temp ^ (temp >>> 32));
return result;
}
@Override
public String toString() {
return "BitcoinSendProgressEvent{" +
"transactionId='" + transactionId + '\'' +
", progress=" + progress +
'}';
}
}
|
#!/bin/bash
/usr/lib/mailman/bin/mailmanctl -q restart
|
// Require local class dependencies
const Base = require('./base');
/**
* Build Controller class
*/
class Controller extends Base {
}
/**
* Export Controller class
*
* @type {Controller}
*/
exports = module.exports = Controller;
|
#! /usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#copied below from hadoop-config.sh
this="$0"
while [[ -h "$this" ]]; do
ls=$(ls -ld "$this")
link=$(expr "$ls" : '.*-> \(.*\)$')
if expr "$link" : '.*/.*' > /dev/null; then
this="$link"
else
this=$(dirname "$this")/"$link"
fi
done
bin=$(dirname "$this")
script=$(basename "$this")
bin=$(cd "$bin"; pwd)
this="$bin/$script"
ACCUMULO_HOME=$(dirname "$this")/../../../..
export ACCUMULO_HOME=$(cd "$ACCUMULO_HOME"; pwd)
if [[ -z "$ACCUMULO_CONF_DIR" ]] ; then
ACCUMULO_CONF_DIR=${ACCUMULO_HOME}/conf
fi
if [[ -f $ACCUMULO_CONF_DIR/accumulo-env.sh ]] ; then
. "$ACCUMULO_CONF_DIR/accumulo-env.sh"
fi
if [[ -z $HADOOP_PREFIX ]] ; then
echo "HADOOP_PREFIX is not set. Please make sure it's set globally."
exit 1
fi
if [[ -z $1 ]] ; then
echo "Usage: update-cluster.sh <TARFILE>"
exit 1
fi
echo 'killing accumulo'
pssh -h "$ACCUMULO_CONF_DIR/slaves" "pkill -f org.apache.accumulo.start" < /dev/null
pkill -f org.apache.accumulo.start
pkill -f agitator.pl
echo 'updating accumulo'
cd "$ACCUMULO_HOME/.."
tar xzf "$1"
echo 'cleaning logs directory'
rm -f "$ACCUMULO_HOME/logs/*"
rm -f "$ACCUMULO_HOME/test/system/randomwalk/logs/*"
rm -f "$ACCUMULO_HOME/test/system/continuous/logs/*"
rm -f ~/rwlogs/*
echo 'removing old code'
pssh -h "$ACCUMULO_CONF_DIR/slaves" "rm -rf $ACCUMULO_HOME" < /dev/null
echo 'pushing new code'
prsync -r -h "$ACCUMULO_CONF_DIR/slaves" "$ACCUMULO_HOME" /opt/dev
echo 'removing /accumulo dir'
"$HADOOP_PREFIX/bin/hadoop" fs -rmr /accumulo
echo 'creating new instance'
printf "test\nY\nsecret\nsecret\n" | "$ACCUMULO_HOME/bin/accumulo" init
echo 'starting accumulo'
"$ACCUMULO_HOME/bin/start-all.sh"
|
def includeme(config):
from .directives import add_api_rest_routes
from .directives import add_rest_resource
config.add_directive('add_rest_resource', add_rest_resource)
config.add_directive('add_api_rest_routes', add_api_rest_routes)
|
<reponame>leshiguang/LZUISDK
//
// LSUDeviceInfo.h
// LSBluetooth-Library
//
// Created by lifesense on 16/1/17.
// Copyright © 2016年 Lifesense. All rights reserved.
//
#import "LSUBaseModel.h"
@interface LSUDeviceInfo : LSUBaseModel
@property (nonatomic, copy) NSString *mac;
@property (nonatomic, copy) NSString *model;
@property (nonatomic, copy) NSString *softVer;
@property (nonatomic, copy) NSString *hardVer;
@property (nonatomic, assign) BOOL isOpenHR;
@property (nonatomic, assign) int startHour;
@property (nonatomic, assign) int startMinute;
@property (nonatomic, assign) int endHour;
@property (nonatomic, assign) int endMinute;
@property (nonatomic, copy) NSString *timeZone;
@property (nonatomic, assign) NSInteger dataSize;
@end
|
package main
import (
"flag"
"fmt"
"github.com/vjeantet/jodaTime"
"os"
"time"
)
func main() {
format := "yyyy-MM-dd"
var help bool
flag.StringVar(&format, "f", format, "Please inform date pattern.. More information https://github.com/vjeantet/jodaTime#format")
flag.BoolVar(&help, "h", false, "Show Usage")
flag.Parse()
if help == true {
fmt.Println("Usage:")
flag.PrintDefaults()
os.Exit(0)
}
fmt.Print(jodaTime.Format(format, time.Now()))
}
|
<gh_stars>1-10
package batch
type Error struct {
Code string
Message string
}
type Response struct {
Response interface{} `json:"Response"`
}
|
last_id=182
current_id=1
image_id=1
skip_ids=(149 151 152 168)
until [ $current_id -gt $last_id ]
do
#curl http://fgosimulator.webcrow.jp/Material/i/icon_servants/$current_id.jpg -o ./servant_images_jp/servant-$(printf "%03d" $current_id).jpg
if [ $image_id -eq 152 ]
then
image_id=$(($image_id+1))
fi
if [ $image_id -eq 176 ]
then
#special for 182
curl http://file.fgowiki.fgowiki.com/fgo/head/$(printf "%03d" 182).jpg -o ./servant_images/servant-$(printf "%03d" 176).jpg
image_id=$(($image_id+1))
else
#normal
if [[ ! " ${skip_ids[@]} " =~ " ${current_id} " ]]; then
curl http://file.fgowiki.fgowiki.com/fgo/head/$(printf "%03d" $current_id).jpg -o ./servant_images/servant-$(printf "%03d" $image_id).jpg
image_id=$(($image_id+1))
else
echo skip $current_id
fi
current_id=$(($current_id+1))
fi
done
|
# https://github.com/junegunn/fzf/wiki/Exampleshttps://github.com/junegunn/fzf/wiki/Examples
# get repositories list and cd
function fgh() {
declare -r REPO_NAME="$(ghq list >/dev/null | fzf-tmux --reverse +m)"
[[ -n "${REPO_NAME}" ]] && cd "$(ghq root)/${REPO_NAME}"
}
function fcode() {
declare -r REPO_NAME="$(ghq list >/dev/null | fzf-tmux --reverse +m)"
[[ -n "${REPO_NAME}" ]] && code "$(ghq root)/${REPO_NAME}"
cd "$(ghq root)/${REPO_NAME}"
}
function fcws() {
code $(find ~/vs-works -type f -name *.code-workspace | fzf-tmux --reverse)
}
# Like normal cd but opens an interactive navigation window when called with no arguments.
# For ls, use -FG instead of --color=always on osx.s
function fcd() {
if [[ "$#" != 0 ]]; then
builtin cd "$@";
return
fi
while true; do
local lsd=$(echo ".." && ls -p | grep '/$' | sed 's;/$;;')
local dir="$(printf '%s\n' "${lsd[@]}" |
fzf --reverse --preview '
__cd_nxt="$(echo {})";
__cd_path="$(echo $(pwd)/${__cd_nxt} | sed "s;//;/;")";
echo $__cd_path;
echo;
ls -FG "${__cd_path}";
')"
[[ ${#dir} != 0 ]] || return 0
builtin cd "$dir" &> /dev/null
done
}
# fd - cd to selected directory
fd() {
local dir
dir=$(find ${1:-.} -path '*/\.*' -prune -o -type d -print 2> /dev/null | fzf +m) &&
cd "$dir"
}
# fbr - checkout git branch
fbr() {
local branches branch
branches=$(git branch -vv) &&
branch=$(echo "$branches" | fzf +m) &&
git checkout $(echo "$branch" | awk '{print $1}' | sed "s/.* //")
}
# fbrm - checkout git branch (including remote branches)
fbrm() {
local branches branch
branches=$(git branch --all | grep -v HEAD) &&
branch=$(echo "$branches" | fzf-tmux -d $(( 2 + $(wc -l <<< "$branches") )) +m) &&
git checkout $(echo "$branch" | sed "s/.* //" | sed "s#remotes/[^/]*/##")
}
# fshow - git commit browser
fshow() {
git log --graph --color=always \
--format="%C(auto)%h%d %s %C(black)%C(bold)%cr" "$@" |
fzf --ansi --no-sort --reverse --tiebreak=index --bind=ctrl-s:toggle-sort \
--bind "ctrl-m:execute:
(grep -o '[a-f0-9]\{7\}' | head -1 |
xargs -I % sh -c 'git show --color=always % | less -R') << 'FZF-EOF'
{}
FZF-EOF"
}
# fstash - easier way to deal with stashes
# type fstash to get a list of your stashes
# enter shows you the contents of the stash
# ctrl-d shows a diff of the stash against your current HEAD
# ctrl-b checks the stash out as a branch, for easier merging
fstash() {
local out q k sha
while out=$(
git stash list --pretty="%C(yellow)%h %>(14)%Cgreen%cr %C(blue)%gs" |
fzf --ansi --no-sort --query="$q" --print-query --expect=ctrl-d,ctrl-b);
do
mapfile -t out <<< "$out"
q="${out[0]}"
k="${out[1]}"
sha="${out[-1]}"
sha="${sha%% *}"
[[ -z "$sha" ]] && continue
if [[ "$k" == 'ctrl-d' ]]; then
git diff $sha
elif [[ "$k" == 'ctrl-b' ]]; then
git stash branch "stash-$sha" $sha
break;
else
git stash show -p $sha
fi
done
}
# Select a docker container to start and attach to
function da() {
local cid
cid=$(docker ps -a | sed 1d | fzf -1 -q "$1" | awk '{print $1}')
[ -n "$cid" ] && docker start "$cid" && docker attach "$cid"
}
# Select a running docker container to stop
function ds() {
local cid
cid=$(docker ps | sed 1d | fzf -q "$1" | awk '{print $1}')
[ -n "$cid" ] && docker stop "$cid"
}
# Select a docker container to remove
function drm() {
local cid
cid=$(docker ps -a | sed 1d | fzf -q "$1" | awk '{print $1}')
[ -n "$cid" ] && docker rm "$cid"
}
# Same as above, but allows multi selection:
function drm() {
docker ps -a | sed 1d | fzf -q "$1" --no-sort -m --tac | awk '{ print $1 }' | xargs -r docker rm
}
# Select a docker image or images to remove
function drmi() {
docker images | sed 1d | fzf -q "$1" --no-sort -m --tac | awk '{ print $3 }' | xargs -r docker rmi
}
|
<filename>src/main/java/ee/ituk/api/application/validation/rule/ApplicationHasName.java<gh_stars>0
package ee.ituk.api.application.validation.rule;
import ee.ituk.api.common.exception.ErrorMessage;
import ee.ituk.api.common.validation.ValidationRule;
import ee.ituk.api.application.domain.Application;
import java.util.List;
import static ee.ituk.api.common.validation.ValidationUtil.NAME_MISSING;
import static java.util.Collections.emptyList;
import static java.util.Collections.singletonList;
import static org.apache.commons.lang.StringUtils.isNotBlank;
public class ApplicationHasName implements ValidationRule<Application> {
@Override
public List<ErrorMessage> apply(Application application) {
if (isNotBlank(application.getFirstName()) || isNotBlank(application.getLastName())) {
return emptyList();
}
return singletonList(ErrorMessage.builder().code(NAME_MISSING).build());
}
}
|
#! /usr/bin/env bash
set -e
function set_manager_ip() {
ip=$(/usr/sbin/ip a s | /usr/bin/grep -oE 'inet [^/]+' | /usr/bin/cut -d' ' -f2 | /usr/bin/grep -v '^127.' | /usr/bin/grep -v '^169.254.' | /usr/bin/head -n1)
echo "Setting manager IP to: ${ip}"
echo "Updating cloudify-mgmtworker.."
/usr/bin/sed -i -e "s/REST_HOST=.*/REST_HOST="'"'"${ip}"'"'"/" /etc/sysconfig/cloudify-mgmtworker
/usr/bin/sed -i -e "s#MANAGER_FILE_SERVER_URL="'"'"https://.*:53333/resources"'"'"#MANAGER_FILE_SERVER_URL="'"'"https://${ip}:53333/resources"'"'"#" /etc/sysconfig/cloudify-mgmtworker
echo "Updating cloudify-manager (rest-service).."
/usr/bin/sed -i -e "s#amqp_host: '.*'#amqp_host: '${ip}'#" /opt/manager/cloudify-rest.conf
echo "Updating IPs stored in the database..."
sudo -upostgres psql cloudify_db -c "update config set value=regexp_replace(value, 'https://[^:]+:(.*)', 'https://${ip}:\1', 'g') where name='file_server_url'"
echo "Updating the ip of the manager, broker and db-node in db..."
/opt/manager/env/bin/python /opt/cloudify/manager-ip-setter/update-db.py ${ip}
echo "Updating networks in certificate metadata..."
/usr/bin/sed -ri "s/"'"'"broker_addresses"'"'"[^]]+]/"'"'"broker_addresses"'"'": \\["'"'"${ip}"'"'"]/" /etc/cloudify/ssl/certificate_metadata
/usr/bin/sed -ri "s/"'"'"manager_addresses"'"'"[^]]+]/"'"'"manager_addresses"'"'": \\["'"'"${ip}"'"'"]/" /etc/cloudify/ssl/certificate_metadata
echo "Creating internal SSL certificates.."
cfy_manager create-internal-certs --manager-hostname $(hostname -s)
if [[ -d "/opt/status-reporter" ]]; then
echo "Updating status reporter initial IP..."
cfy_manager status-reporter configure --managers-ip ${ip} --no-restart
fi
echo "Done!"
}
touched_file_path="/opt/cloudify/manager-ip-setter/touched"
if [ ! -f ${touched_file_path} ]; then
set_manager_ip
touch ${touched_file_path}
else
echo "${touched_file_path} exists - not setting manager ip."
fi
|
<reponame>thelegendoflinas/ImageEditor
package com.createchance.imageeditor.drawers;
import com.createchance.imageeditor.shaders.CrossWarpTransShader;
/**
* Cross warp transition drawer.
*
* @author createchance
* @date 2018/12/31
*/
public class CrossWarpTransDrawer extends AbstractTransDrawer {
@Override
protected void getTransitionShader() {
mTransitionShader = new CrossWarpTransShader();
}
}
|
import React, { Component } from "react";
import SimpleMDE from "react-simplemde-editor";
class Editor extends Component {
render() {
const toolbarSpecs = [
"bold",
"italic",
"strikethrough",
"|",
"heading",
"code",
"quote",
"|",
"link",
"image",
"table",
"|",
"preview",
"side-by-side",
"fullscreen",
"|",
"guide",
{
name: "attention",
className: "fa fa-info-circle",
title: "Info: Lists aren't working properly atm."
}
];
return (
<div className="editor column column-75 simpleMDE">
<label style={{ fontSize: "2rem", fontWeight: "normal" }}>
Title:{" "}
<input
onChange={this.props.changeNote}
value={this.props.note.title}
style={{
fontSize: "inherit",
border: "none"
}}
id={this.props.note.id}
name="change title"
className="change-title"
/>
</label>
<SimpleMDE
ref="simpleMDE"
onChange={this.props.changeNote}
value={this.props.note.text}
options={{
autofocus: true,
placeholder: "Happy noting... 🙂",
spellChecker: false,
toolbar: toolbarSpecs
}}
/>
</div>
);
}
}
export default Editor;
|
"use strict";
var page = require('webpage').create(),
system = require('system'),
address, output, size, pageWidth, pageHeight;
if (system.args.length < 3 || system.args.length > 5) {
console.log('Usage: rasterize.js URL filename');
slimer.exit(1);
} else {
address = system.args[1];
output = system.args[2];
page.settings.userAgent = 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/54.0.2840.59 Safari/537.36';
page.settings.javascriptEnabled = false;
page.viewportSize = { width: 1280, height: 1080 };
page.zoomFactor = 1;
page.open(address, function(status){ // executed after loading
if (status !== 'success') {
console.log('Unable to load the address!');
slimer.exit(1);
} else {
// store a screenshot of the page
page.viewportSize =
{ width:1280, height:1080 };
page.render(output,
{onlyViewport:true});
slimer.exit();
}
});
}
|
#!/bin/bash
set -e
if [ "$1" = '/opt/mssql/bin/sqlservr' ]; then
# If this is the container's first run, initialize the application database
if [ ! -f /tmp/app-initialized ]; then
# Initialize the application database asynchronously in a background process. This allows a) the SQL Server process to be the main process in the container, which allows graceful shutdown and other goodies, and b) us to only start the SQL Server process once, as opposed to starting, stopping, then starting it again.
function initialize_app_database() {
# Wait a bit for SQL Server to start. SQL Server's process doesn't provide a clever way to check if it's up or not, and it needs to be up before we can import the application database
sleep 15s
#run the setup script to create the DB and the schema in the DB
/opt/mssql-tools/bin/sqlcmd -S localhost -U sa -P @FavodeMel_2020 -d master -i /scripts/DDL.setup.sql
/opt/mssql-tools/bin/sqlcmd -S localhost -U sa -P @FavodeMel_2020 -d master -i /scripts/DML.setup.sql
# Note that the container has been initialized so future starts won't wipe changes to the data
touch /tmp/app-initialized
}
initialize_app_database &
fi
fi
exec "$@" |
<reponame>TobiasNienhaus/SMCCP<filename>SMCCP/SMCCP/InputCallbackHandler.cpp<gh_stars>0
#include "InputCallbackHandler.h"
InputCallbackHandler::InputCallbackHandler(std::string callback_id) :
callback_id(callback_id)
{
}
InputCallbackHandler::~InputCallbackHandler()
{
}
void InputCallbackHandler::initCallbacks()
{
input::addLeftMouseCallback(lMCb, callback_id);
input::addCloseCallback(cCb, callback_id);
input::addTextEnteredCallback(tECb, callback_id);
input::addLostFocusCallback(lFCb, callback_id);
}
void InputCallbackHandler::cleanCallbacks()
{
input::deleteCloseCallback(callback_id);
input::deleteLMouseCallback(callback_id);
input::deleteTextEnteredCallback(callback_id);
input::deleteLostFocusCallback(callback_id);
} |
<gh_stars>0
package financial;
import java.util.Currency;
import org.junit.Test;
import org.junit.Assert;
public class MixedCurrenciesExceptionTest {
@Test
public void testCreationCurrency1() throws Exception {
Currency currency1 = Currency.getInstance("DKK");
Currency currency2 = Currency.getInstance("EUR");
MixedCurrenciesException mixedCurrenciesException =
new MixedCurrenciesException(currency1, currency2);
Assert.assertEquals(currency1, mixedCurrenciesException.getCurrency1());
}
@Test
public void testCreationCurrency2() throws Exception {
Currency currency1 = Currency.getInstance("DKK");
Currency currency2 = Currency.getInstance("EUR");
MixedCurrenciesException mixedCurrenciesException =
new MixedCurrenciesException(currency1, currency2);
Assert.assertEquals(currency2, mixedCurrenciesException.getCurrency2());
}
}
|
<reponame>tmatinde/route-monitor-for-geoevent
package com.esri.ges.processor.vehicleProcessor;
import com.esri.ges.processor.GeoEventProcessorDefinitionBase;
public class VehicleDefinition extends GeoEventProcessorDefinitionBase
{
public VehicleDefinition()
{
;
}
@Override
public String getName()
{
return "VehicleUpdater";
}
@Override
public String getLabel()
{
return "Vehicle Updater";
}
} |
# src/python/detran/pydetranutils/__init__.py
#
# Copyright (C) 2013 <NAME> <<EMAIL>>
try :
print "importing detran mesh plotting utilities..."
from mesh_plot import *
print "importing detran quadrature plotting utilities..."
from quad_plot import *
except :
print("Error importing Detran plotting utilities")
|
function remote_info() {
if [ -n "$SSH_CONNECTION" ]; then
echo "@`echo $SSH_CONNECTION | awk '{ print $3 }'` "
fi
}
function user_info() {
if [[ "$DISABLE_USER_INFO" != "true" || -n "$SSH_CONNECTION" ]]; then
if [ -n "$SUDO_USER" ]; then
echo "${SUDO_USER}[S] "
else
echo "$USER "
fi
fi
}
function last_cmd_info() {
if [[ $? = 0 ]]; then
echo ${ZSH_THEME_LAST_CMD_SUCCESS}
else
echo ${ZSH_THEME_LAST_CMD_FAILED}
fi
}
function in_vim_shell() {
if [[ -n "$VIM" ]]; then
echo ${ZSH_THEME_IN_VIM_SHELL}
fi
}
function docker_info() {
if [[ $OSTYPE =~ "linux" && -n $(sed 's,.*\/$,,' /proc/1/cgroup) ]]; then
echo "${ZSH_THEME_IN_DOCKER} "
fi
}
|
#!/bin/sh
echo "Cloning repositories..."
SITES=$HOME/Sites
GIT=$HOME/GIT
# Standard plugins can be found in ~/.oh-my-zsh/plugins/*
# Custom plugins may be added to ~/.oh-my-zsh/custom/plugins/
ZSH_CUSTOM=$HOME/.oh-my-zsh/custom
ZSH_STANDARD=$HOME/.oh-my-zsh/
# Personal
git clone git@github.com:mnegreira/Personal.git $GIT/Personal
# Work
git clone git@github.com:mnegreira/Work.git $GIT/Work
# ZSH Plugins
git clone https://github.com/zsh-users/zsh-syntax-highlighting.git $ZSH_CUSTOM/plugins/zsh-syntax-highlighting
git clone https://github.com/zsh-users/zsh-autosuggestions $ZSH_CUSTOM/plugins/zsh-autosuggestions
# Sites
git clone git@github.com:mnegreira/durnoasociados.com.git $SITES/durnoasociados.com
|
<form>
<div>
<label for="name">Name:</label>
<input type="text" name="name" id="name"/>
</div>
<div>
<label for="email">Email:</label>
<input type="email" name="email" id="email"/>
</div>
<div>
<label for="dob">Date of Birth:</label>
<input type="date" name="dob" id="dob"/>
</div>
<div>
<input type="submit" value="Submit"/>
</div>
</form> |
//package com.auta;
//
//import com.auta.sentiment.SentimentAnalyzer;
//import com.auta.sentiment.SentimentResult;
//import com.facebook.react.bridge.Callback;
//import com.facebook.react.bridge.ReactApplicationContext;
//import com.facebook.react.bridge.ReactContextBaseJavaModule;
//import com.facebook.react.bridge.ReactMethod;
//
//public class SentimentAnalysis extends ReactContextBaseJavaModule {
//
// public SentimentAnalysis(ReactApplicationContext reactContext){
// super(reactContext);
// }
//
// @Override
// public String getName(){
// return "Sentiment";
// }
//
// /*
// * "very negative" = 0 "Negative" = 1 "Neutral" = 2 "Positive" = 3
// * "very positive" = 4
// */
// @ReactMethod
// public void analyze(String sentence, Callback errorCallback, Callback successCallback){
// SentimentAnalyzer sentimentAnalyzer = new SentimentAnalyzer();
// sentimentAnalyzer.initialize();
// SentimentResult sentimentResult = sentimentAnalyzer.getSentimentResult(sentence);
//
// successCallback.invoke((int) sentimentResult.getSentimentScore());
// }
//}
|
/*
* Copyright 2017-present Open Networking Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.routing.fpm.protocol;
import com.google.common.base.MoreObjects;
import org.onlab.packet.DeserializationException;
import org.onlab.packet.Ip4Address;
import org.onlab.packet.Ip6Address;
import org.onlab.packet.IpAddress;
import org.jboss.netty.buffer.ChannelBuffer;
import org.jboss.netty.buffer.ChannelBuffers;
/**
* Destination address route attribute.
*/
public final class RouteAttributeDst extends RouteAttribute {
private final IpAddress dstAddress;
/**
* Class constructor.
*
* @param length length
* @param type type
* @param dstAddress destination address
*/
private RouteAttributeDst(int length, int type, IpAddress dstAddress) {
super(length, type);
this.dstAddress = dstAddress;
}
/**
* Returns the destination IP address.
*
* @return destination IP address
*/
public IpAddress dstAddress() {
return dstAddress;
}
@Override
public String toString() {
return MoreObjects.toStringHelper(getClass())
.add("type", type())
.add("length", length())
.add("dstAddress", dstAddress)
.toString();
}
/**
* Returns a decoder for a destination address route attribute.
*
* @return destination address route attribute decoder
*/
public static RouteAttributeDecoder<RouteAttributeDst> decoder() {
return (int length, int type, byte[] value) -> {
IpAddress dstAddress;
if (value.length == Ip4Address.BYTE_LENGTH) {
dstAddress = IpAddress.valueOf(IpAddress.Version.INET, value);
} else if (value.length == Ip6Address.BYTE_LENGTH) {
dstAddress = IpAddress.valueOf(IpAddress.Version.INET6, value);
} else {
throw new DeserializationException("Invalid address length");
}
return new RouteAttributeDst(length, type, dstAddress);
};
}
/**
* Encode the RouteAttributeDst contents into the ChannelBuffer.
*
* @param cb channelbuffer to be filled in
*/
@Override
public void encode(ChannelBuffer cb) {
super.encode(cb);
ChannelBuffer buffer = ChannelBuffers.copiedBuffer(dstAddress.toOctets());
if (length() == Ip6Address.BYTE_LENGTH +
RouteAttribute.ROUTE_ATTRIBUTE_HEADER_LENGTH) {
cb.writeBytes(buffer, Ip6Address.BYTE_LENGTH);
} else if (length() == Ip4Address.BYTE_LENGTH +
RouteAttribute.ROUTE_ATTRIBUTE_HEADER_LENGTH) {
cb.writeBytes(buffer, Ip4Address.BYTE_LENGTH);
} else {
throw new IllegalArgumentException("Dst address length incorrect!");
}
}
/**
* Returns a new RouteAttributeDst builder.
*
* @return RouteAttributeDst builder
*/
public static Builder builder() {
return new Builder();
}
/**
* RouteAttributeDst Builder.
*/
public static final class Builder extends RouteAttribute.Builder<Builder> {
private IpAddress dstAddress = null;
/**
* Hide class constructor.
*/
private Builder() {}
/**
* Override abstract method.
*/
@Override
public Builder getThis() {
return this;
}
/**
* Sets dstAddress for RouteAttributeDst that will be built.
*
* @param dstAddress to use for built RouteAttributeDst
* @return this builder
*/
public Builder dstAddress(IpAddress dstAddress) {
this.dstAddress = dstAddress;
return this;
}
/**
* Builds the RouteAttributeDst.
*
* @return RouteAttributeDst reference
*/
public RouteAttributeDst build() {
return new RouteAttributeDst(length, type, dstAddress);
}
}
}
|
<reponame>foliveira/homebrew-cask<gh_stars>0
cask 'sourcetree' do
if MacOS.release <= :snow_leopard
version '1.8.1'
sha256 '37a42f2d83940cc7e1fbd573a70c3c74a44134c956ac3305f6b153935dc01b80'
else
version '2.2.4'
sha256 '068b594ca1612a08c575545baa363741a1024b67e3d3a86747c84f52a0c4e032'
end
# atlassian.com was verified as official when first introduced to the cask
url "https://downloads.atlassian.com/software/sourcetree/SourceTree_#{version}.dmg"
appcast 'https://www.sourcetreeapp.com/update/SparkleAppcast.xml',
checkpoint: '281824c0f503c7db534e0ba7684606ab2e3810787154f91096bcf203d344596b'
name 'Atlassian SourceTree'
homepage 'https://www.sourcetreeapp.com/'
license :gratis
auto_updates true
app 'SourceTree.app'
binary 'SourceTree.app/Contents/Resources/stree'
uninstall launchctl: 'com.atlassian.SourceTreePrivilegedHelper2'
zap delete: [
'~/Library/Application Support/SourceTree',
'~/Library/Caches/com.torusknot.SourceTreeNotMAS',
]
caveats do
files_in_usr_local
end
end
|
import { Component } from '@angular/core';
@Component({
template: `
Join League
<join-league-form></join-league-form>
`
})
export class JoinLeagueComponent{
}
|
package lit.litfx.core.components.fire;
/**
* Object dedicated to computing the fire convolution for a row of pixels
* Does not convert pixel to RGB palette value, only provides final convolved
* pixel array via int [] pixelValues.
* @author Birdasaur
*/
public class FireConvolution {
int y;
int canvasHeight;
int canvasWidth;
public int [] pixelValues; //the results from the convolution
public FireConvolution(int canvasHeight, int canvasWidth, int y) {
this.canvasHeight = canvasHeight;
this.canvasWidth = canvasWidth;
this.y = y;
pixelValues = new int[canvasWidth];
}
public void convolve(int [] fire) {
int a, b, shiftedValue, row;
row = y * canvasWidth;
int fireIndex1, fireIndex2; //column oriented values computed in outer loop
fireIndex1 = ((y + 2) % canvasHeight) * canvasWidth;
fireIndex2 = ((y + 3) % canvasHeight * canvasWidth);
for (int x = 0; x < canvasWidth; x++) {
a = (y + 1) % canvasHeight * canvasWidth;
b = x % canvasWidth;
shiftedValue = (
(fire[a + ((x - 1 + canvasWidth) % canvasWidth)] //fireIndex0
+ fire[fireIndex1 + b] //fireIndex1
+ fire[a + ((x + 1) % canvasWidth)] //fireIndex2
+ fire[fireIndex2 + b]) //fireIndex3
<< 7); //multiply by constant 128
// divide by constant 513
fire[row + x] = pixelValues[x] = ((shiftedValue << 9) - shiftedValue) >> 18;
}
}
} |
<gh_stars>0
/*
* Original PDFKit - outline.js
* Translated to ts by <NAME>
*/
import { PDFDocument } from './document';
import { PDFReference } from './reference';
export class PDFOutline {
dictionary: PDFReference;
children: PDFOutline[];
private readonly _document: PDFDocument;
private _options: { expanded: boolean };
private readonly _outlineData: {
Dest?: any[];
Parent?: PDFReference;
Title?: string;
First?: PDFReference;
Last?: PDFReference;
Count?: number;
Prev?: PDFReference;
Next?: PDFReference;
};
constructor(document, parent, title, dest, options = { expanded: false }) {
this._document = document;
this._options = options;
this._outlineData = {};
if (dest !== null) {
this._outlineData.Dest = [dest._dictionary, 'Fit'];
}
if (parent !== null) {
this._outlineData.Parent = parent;
}
if (title !== null) {
this._outlineData.Title = String(title);
}
this.dictionary = this._document.ref(this._outlineData);
this.children = [];
}
addItem(title, options = { expanded: false }) {
const result = new PDFOutline(
this._document,
this.dictionary,
title,
this._document.page,
options
);
this.children.push(result);
return result;
}
endOutline() {
if (this.children.length > 0) {
if (this._options.expanded) {
this._outlineData.Count = this.children.length;
}
const first = this.children[0];
const last = this.children[this.children.length - 1];
this._outlineData.First = first.dictionary;
this._outlineData.Last = last.dictionary;
for (let i = 0, len = this.children.length; i < len; i++) {
const child = this.children[i];
if (i > 0) {
child._outlineData.Prev = this.children[i - 1].dictionary;
}
if (i < this.children.length - 1) {
child._outlineData.Next = this.children[i + 1].dictionary;
}
child.endOutline();
}
}
return this.dictionary.end();
}
}
|
package com.company.project.common.exception;
import com.company.project.common.result.ResultCode;
/**
* Created with IntelliJ IDEA.
* Description:
* 权限不足异常
* @author LErry.li
* Date: 2018-06-15
* Time: 14:41
*/
public class PermissionForbiddenException extends BusinessException {
private static final long serialVersionUID = 3721036867889297081L;
public PermissionForbiddenException() {
super();
}
public PermissionForbiddenException(Object data) {
super.data = data;
}
public PermissionForbiddenException(ResultCode resultCode) {
super(resultCode);
}
public PermissionForbiddenException(ResultCode resultCode, Object data) {
super(resultCode, data);
}
public PermissionForbiddenException(String msg) {
super(msg);
}
public PermissionForbiddenException(String formatMsg, Object... objects) {
super(formatMsg, objects);
}
}
|
import torch
def test(model, test_loader, device):
# send model to device
model.eval()
model.to(device)
# Summarize results
lbls = []
pred = []
correct = 0
total = 0
with torch.no_grad():
# Iterate through data
for inputs, labels in test_loader:
inputs = inputs.to(device)
labels = labels.to(device)
outputs = model(inputs)
_, predicted = torch.max(outputs.data, 1)
total += labels.size(0)
correct += (predicted == labels).sum().item()
# Print results
print('Accuracy of the network on the {} test images: {}'.format(total, (100 * correct / total)))
# Return results
return correct/total, lbls, pred |
#!/usr/bin/env bash
# Installs the `eta/tensorflow/models` submodule.
#
# Copyright 2017-2022, Voxel51, Inc.
# voxel51.com
#
SKIP_CLONE=false
while getopts "s" FLAG; do
case "${FLAG}" in
s) SKIP_CLONE=true ;;
esac
done
if ! command -v git &> /dev/null; then
echo "You must install 'git' in order to run this script"
exit
fi
TENSORFLOW_DIR="$(cd $(dirname "${BASH_SOURCE[0]}") && pwd)"
MODELS_DIR="${TENSORFLOW_DIR}/models"
if [ ${SKIP_CLONE} = false ]; then
if [ -d "${MODELS_DIR}" ]; then
echo "Deleting existing directory ${MODELS_DIR}"
rm -rf "${MODELS_DIR}"
fi
echo "Cloning https://github.com/voxel51/models"
git clone https://github.com/voxel51/models "${MODELS_DIR}"
fi
cd "${MODELS_DIR}"
echo "Installing protobuf"
pip install protobuf
if command -v protoc &> /dev/null; then
echo "Found protoc"
else
echo "Installing protoc"
if [ $(uname -s) == "Darwin" ]; then
PROTOC_ZIP=protoc-3.7.1-osx-x86_64.zip
else
PROTOC_ZIP=protoc-3.7.1-linux-x86_64.zip
fi
curl -OL https://github.com/google/protobuf/releases/download/v3.7.1/${PROTOC_ZIP}
unzip -o ${PROTOC_ZIP} -d /usr/local bin/protoc
unzip -o ${PROTOC_ZIP} -d /usr/local include/*
rm -f ${PROTOC_ZIP}
fi
echo "Compiling protocol buffers"
protoc research/object_detection/protos/*.proto \
--proto_path=research \
--python_out=research
echo "Installing tf_slim"
pip install tf_slim
echo "Installation complete"
|
#!/bin/bash
set +ex
#@--- Function to setup the cluster ---@#
set_up_cluster_dev_env() {
if [[ $TRAVIS_BRANCH == "develop" ]] || [[ $GITHUB_REF == "refs/heads/develop" ]]; then
#@--- Initialize terraform ---@#
echo " ----- inititalize the backend --------- "
terraform init -backend-config "bucket=$BACKEND_BUCKET_DEV_ENV" \
-backend-config "key=$STATE_FILE_DEV_ENV" \
-backend-config "access_key=$SPACES_ACCESS_KEY" \
-backend-config "secret_key=$SPACES_SECRET_KEY"
#@--- Run terraform command to plan infrastructure ---@#
echo "----- show plan -------------------"
terraform plan -lock=false -var "cluster_name=$CLUSTER_NAME_DEV_ENV" \
-var "cluster_region=$CLUSTER_REGION" \
-var "kubernetes_version=$K8S_VERSION" \
-var "node_type=$NODE_TYPE" \
-var "max_node_number=$MAX_NODE_NUM" \
-var "min_node_number=$MIN_NODE_NUM" \
-var "digital_ocean_token=$SERVICE_ACCESS_TOKEN" \
-var "db_size=$DB_SIZE" \
-var "postgres_version=$PG_VERSION" \
-var "db_name=$DB_NAME_DEV_ENV" \
-var "tags=$PROJECT_NAME"
#@--- Apply the changes ---@#
echo "+++++ Apply infrastructure ++++++++++"
terraform apply -lock=false -auto-approve -var "cluster_name=$CLUSTER_NAME_DEV_ENV" \
-var "cluster_region=$CLUSTER_REGION" \
-var "kubernetes_version=$K8S_VERSION" \
-var "node_type=$NODE_TYPE" \
-var "max_node_number=$MAX_NODE_NUM" \
-var "min_node_number=$MIN_NODE_NUM" \
-var "digital_ocean_token=$SERVICE_ACCESS_TOKEN" \
-var "db_size=$DB_SIZE" \
-var "postgres_version=$PG_VERSION" \
-var "db_name=$DB_NAME_DEV_ENV" \
-var "tags=$PROJECT_NAME" \
|| echo "Resources exist"
fi
}
#@--- Function to setup staging cluster ---@#
set_up_cluster_staging() {
if [[ $TRAVIS_BRANCH == "staging" ]] || [[ $GITHUB_REF == "refs/heads/staging" ]]; then
#@--- Initialize terraform ---@#
echo " +++++++ Initialize the backend ++++++++++ "
terraform init -backend-config "bucket=$BACKEND_BUCKET_STAGING" \
-backend-config "key=$STATE_FILE_STAGING" \
-backend-config "access_key=$SPACES_ACCESS_KEY" \
-backend-config "secret_key=$SPACES_SECRET_KEY"
#@--- Run terraform command to plan infrastructure ---@#
echo "----- show plan -------------------"
terraform plan -lock=false -target=digitalocean_kubernetes_cluster.cluster \
-var "cluster_name=$CLUSTER_NAME_STAGING" \
-var "cluster_region=$CLUSTER_REGION" \
-var "kubernetes_version=$K8S_VERSION_STAGING" \
-var "node_type=$NODE_TYPE" \
-var "max_node_number=$MAX_NODE_NUM" \
-var "min_node_number=$MIN_NODE_NUM" \
-var "digital_ocean_token=$SERVICE_ACCESS_TOKEN" \
-var "db_size=$DB_SIZE" \
-var "postgres_version=$PG_VERSION" \
-var "db_name=$DB_NAME_STAGING" \
-var "tags=$PROJECT_NAME"
#@--- Apply the changes ---@#
echo "+++++ Apply infrastructure ++++++++++"
terraform apply -lock=false -auto-approve -target=digitalocean_kubernetes_cluster.cluster \
-var "cluster_name=$CLUSTER_NAME_STAGING" \
-var "cluster_region=$CLUSTER_REGION" \
-var "kubernetes_version=$K8S_VERSION_STAGING" \
-var "node_type=$NODE_TYPE" \
-var "max_node_number=$MAX_NODE_NUM" \
-var "min_node_number=$MIN_NODE_NUM" \
-var "digital_ocean_token=$SERVICE_ACCESS_TOKEN" \
-var "db_size=$DB_SIZE" \
-var "postgres_version=$PG_VERSION" \
-var "db_name=$DB_NAME_STAGING" \
-var "tags=$PROJECT_NAME" \
|| echo "Resources exist"
fi
}
#@--- Function to setup production cluster ---@#
set_up_cluster_prod() {
if [[ $GITHUB_EVENT_NAME == "release" ]] || [[ ! -z $TRAVIS_TAG ]]; then
#@--- Initialize terraform ---@#
echo " ----- inititalize the backend --------- "
terraform init -lock=false -backend-config "bucket=$BACKEND_BUCKET_PROD" \
-backend-config "key=$STATE_FILE_PROD" \
-backend-config "access_key=$SPACES_ACCESS_KEY" \
-backend-config "secret_key=$SPACES_SECRET_KEY"
#@--- Run terraform command to plan infrastructure ---@#
echo "----- show plan -------------------"
terraform plan -lock=false -target=digitalocean_kubernetes_cluster.cluster \
-var "cluster_name=$CLUSTER_NAME_PROD" \
-var "cluster_region=$CLUSTER_REGION" \
-var "kubernetes_version=$K8S_VERSION_PROD" \
-var "node_type=$NODE_TYPE" \
-var "max_node_number=$MAX_NODE_NUM" \
-var "min_node_number=$MIN_NODE_NUM" \
-var "digital_ocean_token=$SERVICE_ACCESS_TOKEN" \
-var "db_size=$DB_SIZE" \
-var "postgres_version=$PG_VERSION" \
-var "db_name=$DB_NAME_PROD" \
-var "tags=$PROJECT_NAME"
#@--- Apply the changes ---@#
echo "+++++ Apply infrastructure ++++++++++"
terraform apply -lock=false -auto-approve -target=digitalocean_kubernetes_cluster.cluster \
-var "cluster_name=$CLUSTER_NAME_PROD" \
-var "cluster_region=$CLUSTER_REGION" \
-var "kubernetes_version=$K8S_VERSION_PROD" \
-var "node_type=$NODE_TYPE" \
-var "max_node_number=$MAX_NODE_NUM" \
-var "min_node_number=$MIN_NODE_NUM" \
-var "digital_ocean_token=$SERVICE_ACCESS_TOKEN" \
-var "db_size=$DB_SIZE" \
-var "postgres_version=$PG_VERSION" \
-var "db_name=$DB_NAME_PROD" \
-var "tags=$PROJECT_NAME" \
|| echo "Resources exist"
fi
}
#@--- Main function ---@#
main() {
cd infrastructure
if [[ $TRAVIS_EVENT_TYPE != "pull_request" ]]; then
#@--- Run the setup dev-env cluster function ---@#
set_up_cluster_dev_env
#@--- Run function for staging cluster ---@#
set_up_cluster_staging
#@--- Run the prod setup function ---@#
set_up_cluster_prod
fi
}
#@--- Run the main function ---@#
main
|
<filename>src/pages/QuizPuzzle/components/UserAnswers/index.tsx<gh_stars>1-10
import React from 'react';
import { v4 as uuidv4 } from 'uuid';
import useNextCard from '../../logic/useNextCard';
import {
Score,
QuestionList,
QuestionCard,
Question,
AnswersList,
Answer,
AnswerInfo,
UserAnswer,
CorrectAnswer,
} from './UserAnswers.styles';
interface IProps {
userAnswers: string[];
score: number;
}
const UserAnswers = ({ userAnswers, score }: IProps) => {
const { columnsFromBackend } = useNextCard();
return (
<>
<Score>Ти набрав {score} балів</Score>
<QuestionList>
{columnsFromBackend.answers.data.map(
({ id, question, correctAnswer }, idx) => {
return (
<QuestionCard key={id}>
<div>
<Question>{question}</Question>
<AnswersList>
{correctAnswer.split(' ').map(word => (
<Answer key={uuidv4()}>{word}</Answer>
))}
</AnswersList>
</div>
<AnswerInfo>
<UserAnswer
className={
userAnswers[idx] === correctAnswer
? 'correct'
: 'uncorrect'
}
>
Твоя відповідь: <span>{userAnswers[idx]}</span>
</UserAnswer>
<CorrectAnswer>
Вірна відповідь: <br /> {correctAnswer}
</CorrectAnswer>
</AnswerInfo>
</QuestionCard>
);
},
)}
</QuestionList>
</>
);
};
export default UserAnswers;
|
/**
* <a href="http://www.openolat.org">
* OpenOLAT - Online Learning and Training</a><br>
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); <br>
* you may not use this file except in compliance with the License.<br>
* You may obtain a copy of the License at the
* <a href="http://www.apache.org/licenses/LICENSE-2.0">Apache homepage</a>
* <p>
* Unless required by applicable law or agreed to in writing,<br>
* software distributed under the License is distributed on an "AS IS" BASIS, <br>
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br>
* See the License for the specific language governing permissions and <br>
* limitations under the License.
* <p>
* Initial code contributed and copyrighted by<br>
* frentix GmbH, http://www.frentix.com
* <p>
*/
package org.olat.course.nodes.co;
import static org.olat.core.gui.components.util.SelectionValues.entry;
import java.util.HashSet;
import java.util.Set;
import org.olat.core.gui.UserRequest;
import org.olat.core.gui.components.form.flexible.FormItem;
import org.olat.core.gui.components.form.flexible.FormItemContainer;
import org.olat.core.gui.components.form.flexible.elements.MultipleSelectionElement;
import org.olat.core.gui.components.form.flexible.impl.FormBasicController;
import org.olat.core.gui.components.form.flexible.impl.FormEvent;
import org.olat.core.gui.components.util.SelectionValues;
import org.olat.core.gui.control.Controller;
import org.olat.core.gui.control.WindowControl;
/**
*
* Initial date: 6 Aug 2019<br>
* @author uhensler, <EMAIL>, http://www.frentix.com
*
*/
public class COToolRecipientsController extends FormBasicController {
public enum Recipients {
owners("tool.recipients.owners"),
coaches("tool.recipients.coaches"),
participants("tool.recipients.participants");
private final String i18nKey;
private Recipients(String i18nKey) {
this.i18nKey = i18nKey;
}
public String getI18nKey() {
return i18nKey;
}
}
private MultipleSelectionElement recipientsEl;
private final Config config;
public COToolRecipientsController(UserRequest ureq, WindowControl wControl, Config config) {
super(ureq, wControl);
this.config = config;
initForm(ureq);
}
public Set<Recipients> getSelectedRecipients() {
Set<Recipients> recipients = new HashSet<>();
for (String key : recipientsEl.getSelectedKeys()) {
recipients.add(Recipients.valueOf(key));
}
return recipients;
}
@Override
protected void initForm(FormItemContainer formLayout, Controller listener, UserRequest ureq) {
setFormTitle("tool.title");
SelectionValues recipientKV = new SelectionValues();
for (Recipients recipient : Recipients.values()) {
recipientKV.add(entry(recipient.name(), translate(recipient.getI18nKey())));
}
recipientsEl = uifactory.addCheckboxesHorizontal("tool.recipients", formLayout, recipientKV.keys(), recipientKV.values());
recipientsEl.setEnabled(config.isRecipientsEnabled());
for (Recipients recipients : config.getInitialRecipients()) {
recipientsEl.select(recipients.name(), true);
}
recipientsEl.addActionListener(FormEvent.ONCHANGE);
}
public void setReadOnly() {
recipientsEl.setEnabled(false);
}
@Override
protected void formInnerEvent(UserRequest ureq, FormItem source, FormEvent event) {
if (source == recipientsEl) {
boolean valid = validateFormLogic(ureq);
if (valid) {
fireEvent(ureq, FormEvent.CHANGED_EVENT);
}
}
super.formInnerEvent(ureq, source, event);
}
@Override
protected boolean validateFormLogic(UserRequest ureq) {
boolean allOk = super.validateFormLogic(ureq);
recipientsEl.clearError();
if (!recipientsEl.isAtLeastSelected(1)) {
recipientsEl.setErrorKey("tool.recipients.mandatory", null);
allOk = false;
}
return allOk;
}
@Override
protected void formOK(UserRequest ureq) {
//
}
static class Config {
private final boolean recipientsEnabled;
private final Recipients[] initialRecipients;
Config(boolean recipientsEnabled, Recipients[] recipients) {
this.recipientsEnabled = recipientsEnabled;
this.initialRecipients = recipients;
}
private boolean isRecipientsEnabled() {
return recipientsEnabled;
}
private Recipients[] getInitialRecipients() {
return initialRecipients;
}
}
}
|
<reponame>aamadeo27/aol<gh_stars>0
package me.aamadeo.aol.optimization;
import java.io.File;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
import javax.persistence.*;
import me.aamadeo.aol.ag.Individual;
import me.aamadeo.aol.on.*;
@Entity
@Table(name="Solution")
public class Solution implements Individual, Comparable<Individual> {
public static final String BASEDIR="C:/Users/albert/aol/graph";
private static int solucionSeq = 1;
@Id
@GeneratedValue
private long id;
@Transient
private long seq;
private String nombre;
private double cost = Double.MIN_VALUE;
@Transient
private Map<Request,Service> serviciosPorRequest = new HashMap<Request,Service>();
@OneToMany(cascade=CascadeType.ALL)
private Set<Service> services = null;
@ManyToOne(cascade=CascadeType.ALL)
private Scenario scenario = null;
private int requestNotServed = 0;
public Solution(){
this.seq = Solution.solucionSeq++;
this.services = new TreeSet<Service>();
}
public Individual crossover(Individual i) {
if(! (i instanceof Solution) ) return null;
Solution padreB = (Solution) i;
Solution hijo = new Solution();
hijo.setScenario(this.scenario);
scenario.getNetwork().initialize();
for(Request Request: scenario.getRequests()){
Service servA = this.getServicio(Request);
Service servB = padreB.getServicio(Request);
Service servHijo = servA.crossover(servB);
hijo.addServicio(Request,servHijo);
}
hijo.calculateCost();
return hijo;
}
private void calculateCost(){
double cost = 0.0;
HashSet<Node> usedNodes = new HashSet<Node>();
HashSet<Link> linksUsed = new HashSet<Link>();
requestNotServed = 0;
for(Service service : services){
Node current = service.getRequest().getOrigin();
usedNodes.add(current);
if (service.getPath() == null){
requestNotServed++;
continue;
}
for(Hop s: service.getPath().getHops()){
current = s.getDestination();
if ( ! usedNodes.contains(current) ){
usedNodes.add(current);
}
if ( ! linksUsed.contains(s.getLink()) ){
linksUsed.add(s.getLink());
}
cost += s.getLink().getCost() * Network.COST_KM;
}
}
for (Node Node: usedNodes){
cost += Node.getCost();
}
this.cost = cost;
}
public void mutate() {
for(Service s: services){
if ( Math.random() < 0.21) s.mutate();
}
calculateCost();
}
public void random() {
scenario.getNetwork().initialize();
serviciosPorRequest.clear();
services.clear();
for(Request Request: scenario.getRequests()){
Service service = new Service(Request);
service.random();
addServicio(Request, service);
}
calculateCost();
}
public long getId() {
return seq;
}
public void setId(long seq) {
this.seq = seq;
}
public Service getServicio(Request Request) {
return serviciosPorRequest.get(Request);
}
public Set<Service> getServices() {
return services;
}
public void addServicio(Request Request, Service service){
if(serviciosPorRequest.containsKey(Request)) return;
serviciosPorRequest.put(Request, service);
services.add(service);
}
public void setServices(Set<Service> services) {
this.services = services;
for(Service service : services){
serviciosPorRequest.put(service.getRequest(), service);
}
}
public Scenario getScenario() {
return scenario;
}
public void setScenario(Scenario aScenario) {
this.scenario = aScenario;
this.nombre = aScenario.getName() + "_sol" + this.seq;
}
public void genGraphs() {
String dir = BASEDIR + "/" + this.nombre;
File dirFile = new File(dir);
dirFile.mkdir();
for(Service s : services){
scenario.getNetwork().drawService(s, dir,null);
}
scenario.getNetwork().usage(dir,"");
}
public void save(String dir) {
for(Service s : services){
s.save(dir);
}
}
@Override
public String toString() {
return nombre+":"+ cost +":"+requestNotServed;
}
public void setNombre(String nombre) {
this.nombre = nombre;
}
public String getNombre() {
return nombre;
}
public int compareTo(Solution s){
return (int) (this.seq - s.seq);
}
public int compareTo(Individual i) {
Solution b = (Solution) i;
/*
* If either SolutionA or SolutionB doesn't serves every Request of the scenario
* the solution that serves more request it's the fittest
*/
if (b.requestNotServed != 0 || this.requestNotServed != 0){
return this.requestNotServed - b.requestNotServed;
}
return (int) (10000*(b.cost - this.cost));
}
@Override
public boolean equals(Object obj) {
if(!(obj instanceof Solution)) return false;
return equals((Solution)obj);
}
public boolean equals(Solution b){
return this.seq == b.seq;
}
@Override
public Object clone() throws CloneNotSupportedException {
Solution clone = new Solution();
clone.setScenario(scenario);
for(Request Request : scenario.getRequests()){
Service originalService = serviciosPorRequest.get(Request);
Service clonService = new Service(Request);
clonService.setPath(originalService.getPath());
clone.addServicio(Request, clonService);
}
return clone;
}
public double getFitness() {
return -cost;
}
public void setCost(double cost) {
this.cost = cost;
}
public double getCost(){
return this.cost;
}
}
|
/* - Coeus web framework -------------------------
*
* Licensed under the Apache License, Version 2.0.
*
* Author: <NAME>
*/
package com.tzavellas.coeus.core.error
import com.tzavellas.coeus.mvc.view.View
import com.tzavellas.coeus.core.RequestContext
/**
* A handler that gets called when an uncaught exception occurs during
* request execution.
*
* <p>Instances of this class must be thread-safe.</p>
*/
trait ExceptionHandler {
/**
* Handle the error that occurred during the execution of the specified
* request.
*
* <p>Note that the returned <code>View</code> might not get used if
* a response is already sent to the client.</p>
*
* @param context the context of the current request
* @return a view that will be used to render the response
*/
def handle(context: RequestContext): View
}
/**
* Contains factory methods for creating {@code ExceptionHandler} instances.
*/
object ExceptionHandler {
/**
* Returns a handler that can be used to propage all the uncaught exceptions
* to the servlet container.
*
* <p>If the exception that occurred is an instance of {@code HttpException}
* then the returned handler also sets the HTTP status code of the response
* using the {@link HttpException#httpStatus} field of the exception.</p>
*
* <p>The returned handler will always return {@link ErrorPageView}.<p>
*/
def defaultHandler(servletName: String): ExceptionHandler = {
new ExceptionHandler {
def handle(context: RequestContext) = {
ErrorUtils.setupErrorPageAttributes(context.request, context.error, servletName)
ErrorUtils.setResposeStatus(context.response, context.error)
ErrorPageView
}
}
}
/**
* Create a handler that will use the specified partial function to find a view
* for the uncaught exception.
*
* <p>If a view cannot be found then the handler will return {@link ErrorPageView}.</p>
*
* <p>If the exception that occurred is an instance of {@code HttpException}
* then the returned handler also sets the HTTP status code of the response
* using the {@link com.tzavellas.coeus.HttpException#httpStatus httpStatus} field
* of the exception.</p>
*/
def forServlet(servletName: String)(errorViewMap: PartialFunction[Throwable, View]): ExceptionHandler = {
val views: Throwable => View = errorViewMap orElse { case _ => ErrorPageView }
new ExceptionHandler {
def handle(context: RequestContext) = {
ErrorUtils.setupErrorPageAttributes(context.request, context.error, servletName)
ErrorUtils.setResposeStatus(context.response, context.error)
views(context.error)
}
}
}
} |
package org.trenkmann.halforms.controller;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.startsWith;
import static org.mockito.Mockito.any;
import static org.mockito.Mockito.anyLong;
import static org.mockito.Mockito.when;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.delete;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.put;
import static org.springframework.test.web.servlet.result.MockMvcResultHandlers.print;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.header;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
import com.fasterxml.jackson.databind.ObjectMapper;
import java.util.Arrays;
import java.util.Optional;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.BDDMockito;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.autoconfigure.web.servlet.AutoConfigureMockMvc;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.boot.test.mock.mockito.MockBean;
import org.springframework.context.annotation.Import;
import org.springframework.hateoas.MediaTypes;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpStatus;
import org.springframework.http.MediaType;
import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.test.web.servlet.MockMvc;
import org.springframework.test.web.servlet.result.MockMvcResultMatchers;
import org.trenkmann.halforms.config.HypermediaConfiguration;
import org.trenkmann.halforms.data.MP3Repository;
import org.trenkmann.halforms.model.MP3Item;
import org.trenkmann.halforms.model.MP3ItemDTO;
/**
* @author <NAME>
*/
@RunWith(SpringRunner.class)
@SpringBootTest
@AutoConfigureMockMvc
@Import({HypermediaConfiguration.class})
public class MP3ItemControllerWithAffordancesTestIT {
@Autowired
private MockMvc mvc;
@MockBean
private MP3Repository repository;
private final ObjectMapper objectMapper = new ObjectMapper();
private void givenMP3Items() {
BDDMockito.given(repository.findAll()).willReturn(Arrays.asList( //
new MP3Item(4L, "title", "artist", "album", "30:20", 1), //
new MP3Item(6L, "title", "artist", "album", "30:20", 2)));
}
@Test
public void givenMP3Item_whenGetMP3s_thenReturnHALForm() throws Exception {
//given
givenMP3Items();
//when
mvc.perform(get("/mp3s").accept(MediaTypes.HAL_FORMS_JSON_VALUE)).andDo(
print()) //
.andExpect(status().isOk()) //
.andExpect(header().string(HttpHeaders.CONTENT_TYPE, MediaTypes.HAL_FORMS_JSON_VALUE)) //
.andExpect(jsonPath("$._embedded.mP3Items[0].id", is(4))) //
.andExpect(jsonPath("$._embedded.mP3Items[0]._templates.default.method", is("put"))) //
.andExpect(
jsonPath("$._embedded.mP3Items[0]._links.self.href", is("http://localhost/mp3/4")));
}
@Test
public void givenMP3Item_whenGetMP3ById_thenReturnMP3ItemNotFoundException() throws Exception {
//given
//nothing
//when
mvc.perform(get("/mp3/2").accept(MediaTypes.HAL_FORMS_JSON_VALUE)).andDo(
print()) //
.andExpect(status().isNotFound()) //
.andExpect(MockMvcResultMatchers.content().string("Can not found MP3 with id 2"));
}
@Test
public void givenMP3Item_whenPostMP3Item_thenReturnHALJsonAndCreated() throws Exception {
//given
givenMP3Items();
MP3ItemDTO mp3 = new MP3ItemDTO("title", "artist", "album", "3:18", 0);
MP3Item mp3Item = objectMapper.convertValue(mp3, MP3Item.class);
//when
when(repository.save(any())).thenReturn(mp3Item);
this.mvc.perform(post("/mp3s")
.content(objectMapper.writeValueAsString(mp3))
.contentType(MediaType.APPLICATION_JSON))
.andDo(print())
//then
.andExpect(status().is(HttpStatus.CREATED.value()))
.andExpect(content().contentType(MediaTypes.HAL_JSON))
.andExpect(jsonPath("$.title", is(mp3.getTitle())))
.andExpect(header().string("location", startsWith("http://localhost/mp3/")));
}
@Test
public void givenMP3Item_whenPutMP3ById_thenReturnHALJsonAndOK() throws Exception {
//given
givenMP3Items();
MP3Item mp3 = new MP3Item(1, "title", "artist", "album", "3:18", 0);
mp3.setLength("2:15");
mp3.setTitle("new Title");
//when
when(repository.save(any())).thenReturn(mp3);
this.mvc.perform(put("/mp3/" + mp3.getId())
.content(objectMapper.writeValueAsString(mp3))
.contentType(MediaType.APPLICATION_JSON))
.andDo(print())
//then
.andExpect(status().is(HttpStatus.OK.value()))
.andExpect(content().contentType(MediaTypes.HAL_JSON))
.andExpect(jsonPath("$.title", is(mp3.getTitle())))
.andExpect(jsonPath("$.artist", is(mp3.getArtist())))
.andExpect(jsonPath("$.length", is(mp3.getLength())));
}
@Test
public void givenMP3Item_whenGetMP3ById_thenReturnHALJsonAndOK() throws Exception {
//given
MP3Item mp3 = new MP3Item(1, "title", "artist", "album", "3:18", 0);
//when
when(repository.findById(anyLong())).thenReturn(Optional.of(mp3));
this.mvc.perform(get("/mp3/" + mp3.getId())
.contentType(MediaType.APPLICATION_JSON))
.andDo(print())
//then
.andExpect(status().is(HttpStatus.OK.value()))
.andExpect(content().contentType(MediaTypes.HAL_JSON))
.andExpect(jsonPath("$.title", is(mp3.getTitle())))
.andExpect(jsonPath("$.artist", is(mp3.getArtist())))
.andExpect(jsonPath("$.length", is(mp3.getLength())));
}
@Test
public void givenMP3Item_whenDeleteMP3ById_thenReturnOK() throws Exception {
//given
givenMP3Items();
MP3Item mp3Item = new MP3Item(1, "title", "artist", "album", "3:18", 0);
//when
this.mvc.perform(delete("/mp3/" + mp3Item.getId())
.contentType(MediaType.APPLICATION_JSON))
.andDo(print())
//then
.andExpect(status().is(HttpStatus.NO_CONTENT.value()));
}
} |
#!/bin/sh
#
# $Id$
# Copyright (c) 2007-2009 John Hurst. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. The name of the author may not be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
# NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
# THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# crypto JPEG 2000 stereoscopic tests
${BUILD_DIR}/asdcp-test${EXEEXT} -k ${CRYPT_KEY} \
-3 -c ${TEST_FILES}/write_crypt_test_jp2k.mxf \
${TEST_FILES}/${TEST_FILE_PREFIX} ${TEST_FILES}/${TEST_FILE_PREFIX}
if [ $? -ne 0 ]; then
exit 1
fi
${BUILD_DIR}/asdcp-test${EXEEXT} -i ${TEST_FILES}/write_crypt_test_jp2k.mxf
if [ $? -ne 0 ]; then
exit 1
fi
(${BUILD_DIR}/asdcp-test${EXEEXT} -k ${CRYPT_KEY_B} \
-3 -x ${TEST_FILES}/plaintext ${TEST_FILES}/write_crypt_test_jp2k.mxf; \
if [ $? -eq 1 ]; then exit 0; fi; exit 1 )
${BUILD_DIR}/asdcp-test${EXEEXT} -m -k ${CRYPT_KEY} \
-3 -x ${TEST_FILES}/plaintext/${JP2K_PREFIX} ${TEST_FILES}/write_crypt_test_jp2k.mxf
if [ $? -ne 0 ]; then
exit 1
fi
for file in `ls ${TEST_FILES}/${TEST_FILE_PREFIX}`; do \
echo "$file"; \
cmp ${TEST_FILES}/${TEST_FILE_PREFIX}/$file ${TEST_FILES}/plaintext/$file; \
if [ $? -ne 0 ]; then \
exit 1; \
fi; \
done
|
<reponame>folio-org/ui-inn-reach
export { default } from './ListCheckOutItems';
|
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package org.fhwa.c2cri.ntcip2306v109.messaging;
import org.fhwa.c2cri.infolayer.MessageProvider;
import org.fhwa.c2cri.messagemanager.Message;
/**
* Acts as a MessageProvider to objects that need to access an NTCIP2306Message object as a standard Message object.
* @author TransCore ITS
*/
public class NTCIP2306MessageProvider implements MessageProvider{
NTCIP2306Message ntcip2306Message;
String operationName;
public NTCIP2306MessageProvider(String operation, NTCIP2306Message message){
ntcip2306Message = message;
operationName = operation;
}
@Override
public Message getMessage() {
return C2CRIMessageAdapter.toC2CRIMessage(operationName, ntcip2306Message);
}
}
|
SELECT customer.*
FROM customer
INNER JOIN location
ON customer.location_id = location.id
WHERE location.zip_code = '94531'; |
package Calculator;
import java.util.Scanner;
public class Calculator2 {
public static void main(String[] args) {
Scanner scanner = new Scanner(System.in);
System.out.println("Yapmak istediğiniz işlemi seçiniz :");
System.out.print(
"TOPLAMA İŞLEMİ(1)" + "\nÇIKARMA İŞLEMİ(2)" + "\nÇARPMA İŞLEMİ(3)" + "\nBÖLME İŞLEMİ(4)");
int secim = scanner.nextInt();
switch (secim) {
case 1:
System.out.println("Toplama islemini sectiniz !!");
break;
case 2:
System.out.println("Cikarma islemini sectiniz !!");
break;
case 3:
System.out.println("Carpma islemini sectiniz !!");
break;
case 4:
System.out.println("Bolme islemini sectiniz !!");
break;
default:
System.out.println("Yanlis sayiyi tusladiniz !!!");
}
if (secim == 1) {
System.out.print("Birinci sayi :");
int sayi1 = scanner.nextInt();
System.out.print("Ikinci sayi :");
int sayi2 = scanner.nextInt();
System.out.println("Sonuç :" + (sayi1 + sayi2));
}
else if(secim == 2) {
System.out.print("Birinci sayi :");
int sayi1 = scanner.nextInt();
System.out.print("\nIkinci sayi :");
int sayi2 = scanner.nextInt();
System.out.println("\nSonuç :" + (sayi1 - sayi2));
}
else if(secim == 3) {
System.out.print("Birinci sayi :");
int sayi1 = scanner.nextInt();
System.out.print("\nIkinci sayi :");
int sayi2 = scanner.nextInt();
System.out.println("\nSonuç :" + (sayi1 * sayi2));
}
else if(secim == 4) {
System.out.print("Birinci sayi :");
int sayi1 = scanner.nextInt();
System.out.print("\nIkinci sayi :");
int sayi2 = scanner.nextInt();
System.out.println("\nSonuç :" + ((float)sayi1 /(float)sayi2));
}
}
}
|
<reponame>enowmbi/hotel_reservation_system
class RoomCategoriesController < ApplicationController
before_action :set_room_category, only: [:show, :edit, :update, :destroy]
# GET /room_categories
# GET /room_categories.json
def index
@room_categories = RoomCategory.all
end
# GET /room_categories/1
# GET /room_categories/1.json
def show
end
# GET /room_categories/new
def new
@room_category = RoomCategory.new
end
# GET /room_categories/1/edit
def edit
end
# POST /room_categories
# POST /room_categories.json
def create
@room_category = RoomCategory.new(room_category_params)
respond_to do |format|
if @room_category.save
format.html { redirect_to @room_category, notice: 'Room category was successfully created.' }
format.json { render :show, status: :created, location: @room_category }
else
format.html { render :new }
format.json { render json: @room_category.errors, status: :unprocessable_entity }
end
end
end
# PATCH/PUT /room_categories/1
# PATCH/PUT /room_categories/1.json
def update
respond_to do |format|
if @room_category.update(room_category_params)
format.html { redirect_to @room_category, notice: 'Room category was successfully updated.' }
format.json { render :show, status: :ok, location: @room_category }
else
format.html { render :edit }
format.json { render json: @room_category.errors, status: :unprocessable_entity }
end
end
end
# DELETE /room_categories/1
# DELETE /room_categories/1.json
def destroy
@room_category.destroy
respond_to do |format|
format.html { redirect_to room_categories_url, notice: 'Room category was successfully destroyed.' }
format.json { head :no_content }
end
end
private
# Use callbacks to share common setup or constraints between actions.
def set_room_category
@room_category = RoomCategory.find(params[:id])
end
# Never trust parameters from the scary internet, only allow the white list through.
def room_category_params
params.require(:room_category).permit(:name, :description, :price)
end
end
|
function _extends() { _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; }; return _extends.apply(this, arguments); }
/* THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. */
import * as React from 'react';
var SvgMotorcycle = React.forwardRef(function (props, svgRef) {
return React.createElement("svg", _extends({
width: "1em",
height: "1em",
viewBox: "0 0 32 32",
ref: svgRef
}, props), React.createElement("path", {
fill: "currentColor",
d: "M5.026 23.248c1.47 0 2.75-1.192 2.75-2.662a2.662 2.662 0 0 0-5.324 0c0 1.47 1.104 2.662 2.574 2.662zm.088-7.776a5.114 5.114 0 1 1-.001 10.229 5.114 5.114 0 0 1 .001-10.229zm21.772 0a5.114 5.114 0 1 1-2.57.692l-1.215-2.554-5.037 9.176a.164.164 0 0 1-.142.083h-5.098a.163.163 0 0 1-.155-.113l-1.966-5.899a.165.165 0 0 0-.054-.078l-4.042-3.441a.164.164 0 0 0-.106-.039L.359 13.3a.162.162 0 0 1-.162-.163v-1.562c0-.09.072-.162.162-.162l7.866-.001c.033 0 .062.01.088.026l2.333 1.485a.17.17 0 0 0 .089.027l2.909-.001a.161.161 0 0 0 .122-.055l2.671-2.666a.161.161 0 0 1 .112-.045h4.922L20.5 8.142h-2.91a.163.163 0 0 1-.163-.163v-.816c0-.09.073-.162.163-.162h3.641c.065 0 .12.037.147.092 0 0 2.677 5.604 4.133 8.567a5.119 5.119 0 0 1 1.376-.188zm0 7.776a2.662 2.662 0 1 0 0-5.324 2.662 2.662 0 0 0 0 5.324z"
}));
});
export default SvgMotorcycle; |
def generate_primes(n):
"""Generate a list of the prime numbers between 1 and n (inclusive)"""
primes =[]
for i in range(2,n+1):
prime = True
for j in range(2,i):
if i%j == 0:
prime = False
if prime == True:
primes.append(i)
return primes
print(generate_primes(50)) |
<reponame>gaeqs/JAMS
/*
* MIT License
*
* Copyright (c) 2021 <NAME>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package net.jamsimulator.jams.gui.mips.editor.element;
import net.jamsimulator.jams.mips.directive.defaults.DirectiveExtern;
import net.jamsimulator.jams.mips.directive.defaults.DirectiveLab;
import net.jamsimulator.jams.mips.directive.parameter.DirectiveParameterType;
import net.jamsimulator.jams.project.mips.MIPSFilesToAssemble;
import net.jamsimulator.jams.utils.StringUtils;
import java.util.List;
public class MIPSDirectiveParameter extends MIPSCodeElement {
protected final MIPSDirective directive;
protected final int index;
private final boolean string;
private boolean registeredLabel, globalLabel;
public MIPSDirectiveParameter(MIPSLine line, MIPSDirective directive, int index, int startIndex, int endIndex, String text) {
super(line, startIndex, endIndex, text);
this.directive = directive;
this.index = index;
this.string = StringUtils.isStringOrChar(text);
this.registeredLabel = false;
registerLabelsIfRequired();
}
@Override
public String getTranslatedNameNode() {
return "MIPS_ELEMENT_DIRECTIVE_PARAMETER";
}
public int getIndex() {
return index;
}
public DirectiveParameterType getType() {
if (directive != null && directive.getDirective().isPresent()) {
return directive.getDirective().get().getParameterTypeFor(index);
}
return DirectiveParameterType.getAllCandidates(text).stream().findAny().orElse(DirectiveParameterType.ANY);
}
public MIPSDirective getDirective() {
return directive;
}
@Override
public String getSimpleText() {
return text;
}
public boolean isRegisteredLabel() {
return registeredLabel;
}
public boolean isGlobalLabel() {
return globalLabel;
}
@Override
public List<String> getStyles() {
String style;
if (registeredLabel) {
style = globalLabel ? "mips-global-label" : "mips-label";
} else {
style = string ? "mips-directive-parameter-string" : "mips-directive-parameter";
}
return getGeneralStyles(style);
}
@Override
public void refreshMetadata(MIPSFileElements elements) {
if (directive.getDirective().isEmpty()) return;
if (registeredLabel) {
MIPSFilesToAssemble filesToAssemble = elements.getFilesToAssemble().orElse(null);
if (filesToAssemble == null) return;
globalLabel = elements.getSetAsGlobalLabel().contains(text);
}
}
private void registerLabelsIfRequired() {
var directive = this.directive.getDirective();
if (directive.isEmpty()) return;
if (directive.get() instanceof DirectiveLab || directive.get() instanceof DirectiveExtern) {
if (index == 0) {
globalLabel = directive.get() instanceof DirectiveExtern;
registerLabel(text, globalLabel);
registeredLabel = true;
}
} else {
var type = directive.get().getParameterTypeFor(index);
if (type != null && type.mayBeLabel()) {
markUsedLabel(text);
}
}
}
}
|
<filename>src/python/lib/graph/opt/lifted_multicut/lifted_multicut_andres_kernighan_lin.cxx
#include <pybind11/pybind11.h>
#include "nifty/graph/opt/lifted_multicut/lifted_multicut_andres_kernighan_lin.hxx"
#include "nifty/python/converter.hxx"
#include "nifty/python/graph/undirected_grid_graph.hxx"
#include "nifty/python/graph/undirected_list_graph.hxx"
//#include "nifty/python/graph/edge_contraction_graph.hxx"
#include "nifty/python/graph/opt/lifted_multicut/lifted_multicut_objective.hxx"
#include "nifty/python/graph/opt/lifted_multicut/export_lifted_multicut_solver.hxx"
namespace py = pybind11;
PYBIND11_DECLARE_HOLDER_TYPE(T, std::shared_ptr<T>);
namespace nifty{
namespace graph{
namespace opt{
namespace lifted_multicut{
template<class OBJECTIVE>
void exportLiftedMulticutAndresKernighanLinT(py::module & liftedMulticutModule) {
typedef OBJECTIVE ObjectiveType;
typedef LiftedMulticutAndresKernighanLin<ObjectiveType> Solver;
typedef typename Solver::SettingsType SettingsType;
exportLiftedMulticutSolver<Solver>(liftedMulticutModule,"LiftedMulticutAndresKernighanLin")
.def(py::init<>())
.def_readwrite("numberOfInnerIterations", &SettingsType::numberOfInnerIterations)
.def_readwrite("numberOfOuterIterations", &SettingsType::numberOfOuterIterations)
.def_readwrite("epsilon", &SettingsType::epsilon)
//.def_readwrite("numberOfOuterIterations", &SettingsType::numberOfOuterIterations)
//.def_readwrite("verbose", &SettingsType::verbose)
;
}
void exportLiftedMulticutAndresKernighanLin(py::module & liftedMulticutModule) {
{
typedef PyUndirectedGraph GraphType;
typedef LiftedMulticutObjective<GraphType, double> ObjectiveType;
exportLiftedMulticutAndresKernighanLinT<ObjectiveType>(liftedMulticutModule);
}
{
typedef nifty::graph::UndirectedGridGraph<2,true> GraphType;
typedef LiftedMulticutObjective<GraphType, double> ObjectiveType;
exportLiftedMulticutAndresKernighanLinT<ObjectiveType>(liftedMulticutModule);
}
{
typedef nifty::graph::UndirectedGridGraph<3,true> GraphType;
typedef LiftedMulticutObjective<GraphType, double> ObjectiveType;
exportLiftedMulticutAndresKernighanLinT<ObjectiveType>(liftedMulticutModule);
}
//{
// typedef PyContractionGraph<PyUndirectedGraph> GraphType;
// typedef MulticutObjective<GraphType, double> ObjectiveType;
// exportLiftedMulticutAndresKernighanLinT<ObjectiveType>(liftedMulticutModule);
//}
}
}
} // namespace nifty::graph::opt
}
}
|
from sqlalchemy import Table
from sqlalchemy import MetaData
from sqlalchemy import Column
from sqlalchemy import Integer
from sqlalchemy import String
from sqlalchemy import ForeignKey
from sqlalchemy.orm import mapper
from sqlalchemy.orm import relationship
import domain.models as models
class Mapper(object):
_metadata = None
def build_metadata(self):
self._create_metadata()
self._configure_metadata()
return self._metadata
def _create_metadata(self):
self._metadata = MetaData()
def _configure_metadata(self):
mapper(models.User, Table(
'users',
self._metadata,
Column('id', Integer, primary_key=True),
Column('email', String),
Column('status', String),
Column('first_name', String(50)),
Column('last_name', String(50)),
Column('created_at', Integer),
Column('salt', String(50)),
Column('password', String(255)),
))
mapper(models.Company, Table(
'companies',
self._metadata,
Column('id', Integer, primary_key=True),
Column('name', String(50))
))
association_table = Table(
'users_positions',
self._metadata,
Column('user_id', Integer, ForeignKey('users.id')),
Column('position_id', Integer, ForeignKey('positions.id'))
)
mapper(models.Position, Table(
'positions',
self._metadata,
Column('id', Integer, primary_key=True),
Column('title', String(50)),
Column('company_id', Integer, ForeignKey("companies.id")),
), properties={
'company': relationship(models.Company),
'candidates': relationship(
models.User,
secondary=association_table
)
})
|
class Rectangle:
def __init__(self):
self.width = 0
self.height = 0
def set_dimensions(self, new_width, new_height):
self.width = new_width
self.height = new_height
def calculate_area(self):
return self.width * self.height
def calculate_perimeter(self):
return 2 * (self.width + self.height)
# Example usage
rect = Rectangle()
rect.set_dimensions(8, 6)
print(rect.calculate_area()) # Output: 48
print(rect.calculate_perimeter()) # Output: 28 |
<filename>spec/notification_hub/channels/messages/push_notification_spec.rb
require 'spec_helper'
RSpec.describe NotificationHub::Channels::Messages::PushNotification do
let(:title) { 'Test title' }
let(:link) { 'truckpad://test' }
let(:device_token) { '912345' }
let(:body) { 'Test body' }
subject do
NotificationHub::Channels::Messages::PushNotification.new(
device_token: device_token,
title: title,
link: link,
body: body
)
end
describe '#initialize' do
it { expect(subject.device_token).to eq(device_token) }
it { expect(subject.title).to eq(title) }
it { expect(subject.link).to eq(link) }
it { expect(subject.body).to eq(body) }
end
describe '#to_hash' do
let(:expected) do
Hash[
channel: 'push-notification',
body: body,
title: title,
device_token: device_token,
link: link
]
end
it { expect(subject.to_hash).to eq(expected) }
end
end
|
def find_arithmetic_mean(arr)
sum = 0
arr.each do |x|
sum += x
end
return sum / arr.length
end
arr = [1.5, 2.5, 3.5]
mean = find_arithmetic_mean(arr)
puts mean # output 2.5 |
#pragma once
#include "../SDK/include/IAgoraMediaEngine.h"
//#include "VideoPackageQueue.h"
#include "AgVideoBuffer.h"
typedef struct _VIDEO_BUFFER {
BYTE m_lpImageBuffer[VIDEO_BUF_SIZE];
int timestamp;
}VIDEO_BUFFER, *PVIDEO_BUFFER;
class CExtendVideoFrameObserver :
public agora::media::IVideoFrameObserver
{
public:
CExtendVideoFrameObserver();
~CExtendVideoFrameObserver();
virtual bool onCaptureVideoFrame(VideoFrame& videoFrame);
virtual bool onRenderVideoFrame(unsigned int uid, VideoFrame& videoFrame);
private:
LPBYTE m_lpImageBuffer;
LPBYTE m_lpY;
LPBYTE m_lpU;
LPBYTE m_lpV;
};
|
<filename>src/main/java/edu/bu/met/cs665/FederationOriginator.java
/**
* Final Project. FederationOriginator, which holds the attributes of the federation and saves to
* memento
*
* @author <NAME>
* @email <EMAIL>
* @version 1.0
* @since 2020-04-15
*/
package edu.bu.met.cs665;
public class FederationOriginator {
// fields to hold the state of FederationOriginator
private String factionName;
private int water;
private int food;
private int troops;
public FederationOriginator(String factionName, int water, int food, int troops) {
this.factionName = factionName;
this.water = water;
this.food = food;
this.troops = troops;
}
public String getFactionName() {
return factionName;
}
public void setFactionName(String factionName) {
this.factionName = factionName;
}
public int getWater() {
return water;
}
public void setWater(int water) {
this.water = water;
}
public int getFood() {
return food;
}
public void setFood(int food) {
this.food = food;
}
public int getTroops() {
return troops;
}
public void setTroops(int troops) {
this.troops = troops;
}
public FederationMemento saveToMemento() {
FederationMemento federationMemento =
new FederationMemento(this.factionName, this.water, this.food, this.troops);
return federationMemento;
}
public void undoFromMemento(FederationMemento memento) {
this.factionName = memento.getFactionName();
this.water = memento.getWater();
this.food = memento.getFood();
this.troops = memento.getTroops();
}
public void printInfo() {
System.out.println("Faction name: " + this.factionName);
System.out.println("Units of water: " + this.water);
System.out.println("Units of food: " + this.food);
System.out.println("Units of troops: " + this.troops);
}
}
|
import csv
from itertools import combinations
# Read in the cities data
with open('cities.csv','r') as f:
cities_csv = csv.reader(f)
cities = [city for city in cities_csv]
# Calculate all combinations of cities
combos = [x for x in combinations(cities,2)]
# Create an empty dictionary to store the route
route = {}
# Greedy closest-city Algorithm
# Start at a random city, then always move to the closest city
start_city = cities[0]
current_city = start_city
while len(combos) > 0:
shortest_distance = float("inf")
# Find the closest city
for combo in combos:
if current_city in combo:
lat1 = float(current_city[1])
lon1 = float(current_city[2])
lat2 = float(combo[0][1]) if combo[0] != current_city else float(combo[1][1])
lon2 = float(combo[0][2]) if combo[0] != current_city else float(combo[1][2])
# Euclidean distance
distance = ((lat1 - lat2) ** 2 + (lon1 - lon2) ** 2) ** 0.5
if distance < shortest_distance:
shortest_distance = distance
closest_city = combo[0] if combo[0] != current_city else combo[1]
# Add city to route
route[current_city] = closest_city
# Delete used combination
combos.remove((current_city,closest_city))
combos.remove((closest_city,current_city))
# Set closest city as current
current_city = closest_city
# Add last route
route[current_city] = start_city |
<gh_stars>0
package org.ednovo.gooru.controllers.api;
import java.util.List;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.ednovo.gooru.controllers.BaseController;
import org.ednovo.gooru.core.api.model.ResponseFieldSet;
import org.ednovo.gooru.core.api.model.User;
import org.ednovo.gooru.core.constant.Constants;
import org.ednovo.gooru.core.constant.GooruOperationConstants;
import org.ednovo.gooru.core.constant.ParameterProperties;
import org.ednovo.gooru.core.security.AuthorizeOperations;
import org.ednovo.gooru.domain.service.responseFieldSet.ResponseFieldSetService;
import org.json.JSONObject;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.servlet.ModelAndView;
@Controller
@RequestMapping(value = { "/responseFieldSet" })
public class ResponseFieldSetRestController extends BaseController implements ParameterProperties {
@Autowired
private ResponseFieldSetService responseFieldSetService;
@AuthorizeOperations(operations = { GooruOperationConstants.OPERATION_RESPONSE_FIELD_SET_ADD })
@RequestMapping(method = RequestMethod.POST)
public ModelAndView createResponseFieldSet(HttpServletRequest request, @RequestParam(value = FIELD_SET) String fieldSet, @RequestParam(value = SESSIONTOKEN, required = false) String sessionToken, @RequestParam(value = FORMAT, defaultValue = FORMAT_JSON) String format, HttpServletResponse response)
throws Exception {
User apiCaller = (User) request.getAttribute(Constants.USER);
ResponseFieldSet responseFieldSet = responseFieldSetService.addResponseFieldSet(fieldSet, apiCaller);
return toModelAndView(new JSONObject(serializeToJson(responseFieldSet)));
}
@AuthorizeOperations(operations = { GooruOperationConstants.OPERATION_RESPONSE_FIELD_SET_UPDATE })
@RequestMapping(method = RequestMethod.PUT, value = "/{fieldSetId}")
public ModelAndView updateResponseFieldSet(HttpServletRequest request, @PathVariable(FIELD_SET_ID ) String fieldSetId, @RequestParam(value = FIELD_SET) String fieldSet, @RequestParam(value = SESSIONTOKEN, required = false) String sessionToken,
@RequestParam(value = FORMAT, defaultValue = FORMAT_JSON) String format, HttpServletResponse response) throws Exception {
User apiCaller = (User) request.getAttribute(Constants.USER);
ResponseFieldSet responseFieldSet = responseFieldSetService.updateResponseFieldSet(fieldSetId, fieldSet, apiCaller);
return toModelAndView(serializeToJson(responseFieldSet));
}
@AuthorizeOperations(operations = { GooruOperationConstants.OPERATION_RESPONSE_FIELD_SET_DELETE })
@RequestMapping(method = RequestMethod.DELETE, value = "/{fieldSetId}")
public ModelAndView deleteResponseFieldSet(HttpServletRequest request, @PathVariable(FIELD_SET_ID) String fieldSetId, @RequestParam(value = SESSIONTOKEN, required= false) String sessionToken, @RequestParam(value = FORMAT, defaultValue = FORMAT_JSON) String format, HttpServletResponse response)
throws Exception {
User apiCaller = (User) request.getAttribute(Constants.USER);
String value = responseFieldSetService.deleteResponseFieldSet(fieldSetId, apiCaller);
return toModelAndView(value);
}
@AuthorizeOperations(operations = { GooruOperationConstants.OPERATION_RESPONSE_FIELD_SET_LIST })
@RequestMapping(method = RequestMethod.GET, value = "/{fieldSetId}")
public ModelAndView getResponseFieldSet(HttpServletRequest request, @PathVariable(FIELD_SET_ID) String fieldSetId, @RequestParam(value = SESSIONTOKEN, required = false) String sessionToken, @RequestParam(value = FORMAT, defaultValue = FORMAT_JSON) String format, HttpServletResponse response) throws Exception {
User apiCaller = (User) request.getAttribute(Constants.USER);
ResponseFieldSet responseFieldSet = responseFieldSetService.getResponseFieldSet(fieldSetId, apiCaller.getGooruUId());
return toModelAndView(serializeToJson(responseFieldSet));
}
@AuthorizeOperations(operations = { GooruOperationConstants.OPERATION_RESPONSE_FIELD_SET_LIST })
@RequestMapping(method = { RequestMethod.GET })
public ModelAndView getResponseFieldsSet(HttpServletRequest request, @RequestParam(value = SESSIONTOKEN, required = false) String sessionToken, @RequestParam(value = FORMAT, defaultValue = FORMAT_JSON) String format, HttpServletResponse response) throws Exception {
List<ResponseFieldSet> responseFieldSet = responseFieldSetService.getResponseFieldSet();
return toModelAndView(serializeToJson(responseFieldSet));
}
}
|
<filename>src/loadbalancer/wrapper/WrapBoolean.java<gh_stars>0
package loadbalancer.wrapper;
public class WrapBoolean {
private boolean serverAlive = true;
public boolean getStatus() {
return serverAlive;
}
public void killStatus() {
serverAlive = false;
}
}
|
#!/bin/bash
set -Eeuo pipefail
GREEN=$(tput setaf 2)
RED=$(tput setaf 1)
RED_BG=$(tput setab 1)
YELLOW=$(tput setaf 3)
RESET=$(tput sgr 0)
BOLD=$(tput bold)
FIREBASE_WEB_SDK_DIR="firestore-stripe-web-sdk"
MIN_NODE_VERSION="12"
# verify we are in the correct directory for the script
if [[ "${PWD##*/}" != "${FIREBASE_WEB_SDK_DIR}" ]]; then
echo "${RED}ERROR:${RESET} Please run this script in the ${FIREBASE_WEB_SDK_DIR} directory"
exit 1
fi
# verify we meant to run this script
read -r -n 1 -p "${YELLOW}WARNING:${RESET} running this script deploys changes publicly. Are you sure you want to continue? [y/n] "
echo
echo
if [[ ! "${REPLY}" =~ ^[Yy]$ ]]; then exit 1; fi
# verify that we have updated the patch/release version
public_sdk_version=$(npm view @stripe/firestore-stripe-payments version)
local_sdk_version=$(npm version | sed -n '2'p | cut -d : -f 2 | cut -d , -f 1 | cut -d \' -f 2)
if [[ "${public_sdk_version}" == "${local_sdk_version}" ]]; then
echo "${RED}ERROR:${RESET} Your local web-sdk version matches the public web-sdk version. Please bump the version with ${BOLD}npm version patch${RESET} or a similar command"
exit 1
fi
echo "${GREEN}SUCCESS:${RESET} your local web-sdk version is different from the public web-sdk version"
echo
echo "local web-sdk version is ${YELLOW}${local_sdk_version}${RESET}"
echo "public web-sdk version is ${GREEN}${public_sdk_version}${RESET}"
echo
echo
# verify the user has required npm permissions
read -r -n 1 -p "Do you have a stripe npm account with 2FA? [y/n] "
echo
echo
if [[ ! "${REPLY}" =~ ^[Yy]$ ]]; then
echo "${RED}ERROR:${RESET} Please create a stripe npm account to continue"
exit 1
fi
version=$(nodenv version | cut -d . -f 1)
if [ ! "${version}" -gt "${MIN_NODE_VERSION}" ]; then
echo "${RED}ERROR:${RESET} must have node version ${MIN_NODE_VERSION} or greater"
echo "current version is ${YELLOW}$(nodenv version | cut -d ' ' -f 1)${RESET}"
echo
echo "set new node version with ${BOLD}nodenv shell 14.7.0${RESET} or any other installed version ${MIN_NODE_VERSION} or greater to continue"
exit 1
fi
echo "${GREEN}SUCCESS:${RESET} your current node version is ${MIN_NODE_VERSION} or greater (${GREEN}$(nodenv version | cut -d ' ' -f 1)${RESET})"
echo
if ! npm team ls @stripe:developers &> /dev/null; then
echo "Please login to your stripe npm account"
npm login
fi
while ! npm team ls @stripe:developers &> /dev/null;
do
echo
echo "${RED}ERROR:${RESET} either you haven't logged into your stripe npm account or your account doesn't belong to the stripe org"
echo
echo "${RED_BG}verify${RESET} that you are logged in to your stripe npm account by running ${BOLD}npm whoami${RESET}"
echo "${BOLD}npm whoami${RESET} should return ${GREEN}$USER-stripe${RESET}"
current_npm_user=$(npm whoami)
echo "${BOLD}npm whoami${RESET} currently returns ${RED}$current_npm_user${RESET}"
echo
echo "${RED_BG}verify${RESET} that you belong to the stripe org by checking your listed organizations at ${BOLD}https://npmjs.com/~$USER-stripe${RESET}"
echo "if you don't belong to the stripe org, be sure to ping ${BOLD}#payments-web${RESET}"
echo
read -r -n 1 -p "Do you want to try again? [y/n] "
echo
if [[ ! "${REPLY}" =~ ^[Yy]$ ]]; then exit 1; fi
echo
echo "Please login to your stripe npm account"
npm login
done
echo
echo "${GREEN}SUCCESS:${RESET} you are logged into your stripe npm account"
echo
# build the release artifact
if ! npm run test; then
echo
echo "${RED}ERROR:${RESET} some tests have failed, please fix them to continue"
exit 1
fi
echo
echo "${GREEN}SUCCESS:${RESET} all tests have passed"
echo
if [[ $(ls stripe-firestore-stripe-payments-*.tgz) ]]; then
if ! rm stripe-firestore-stripe-payments-*.tgz; then
echo
echo "${RED}ERROR:${RESET} encountered an error removing old release artifacts"
exit 1
fi
fi
if ! npm run build; then
echo
echo "${RED}ERROR:${RESET} encountered an error while building the release artifact"
exit 1
fi
if ! npm pack; then
echo
echo "${RED}ERROR:${RESET} encountered an error while building the release artifact"
exit 1
fi
echo
echo "${GREEN}SUCCESS:${RESET} built the release artifact"
echo
# verify one last time
read -r -n 1 -p "Did you notify #developer-products and #developer-advocacy about this release? [y/n] "
echo
echo
if [[ ! "${REPLY}" =~ ^[Yy]$ ]]; then
echo "${RED}ERROR:${RESET} Please notify #developer-products and #developer-advocacy before any release"
exit 1
fi
# publish
if ! npm publish stripe-firestore-stripe-payments-*.tgz --access public; then
echo
echo "${RED}ERROR:${RESET} encountered an error while publishing new version"
exit 1
fi
echo "${GREEN}SUCCESS:${RESET} published the new version!"
|
(function () {
'use strict';
angular.module('sample.getReviews')
.controller('GetReviewsCtrl', ['$scope', 'MLRest', '$window', 'User', function ($scope, mlRest, win, user) {
var model = {
movieData: {
movie: ''
},
user: user
};
angular.extend($scope, {
model: model,
editorOptions: {
height: '100px',
toolbarGroups: [
{ name: 'clipboard', groups: [ 'clipboard', 'undo' ] },
{ name: 'basicstyles', groups: [ 'basicstyles', 'cleanup' ] },
{ name: 'paragraph', groups: [ 'list', 'indent', 'blocks', 'align', 'bidi' ] },
{ name: 'links' }
],
//override default options
toolbar: '',
/* jshint camelcase: false */
toolbar_full: ''
},
submit: function() {
mlRest.callExtension('getReviews',
{
method: 'GET',
data: model.movieData.movie,
user:user,
params: {
'movie': model.movieData.movie
},
headers: {
'Content-Type': 'application/text'
}
}
).then(
function(response) {
console.log('Got Movie');
win.location.href = '/' ;
}
);
}
});
}]);
}());
|
jupyter nbconvert --to markdown --template=hidecode.tpl Draft\ Submission.ipynb
pandoc -s "Draft Submission.md" --bibliography=./library.bib --csl=harvard-university-of-cape-town.csl --number-sections -o Draft\ Submission.pdf
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-
''' Extract video urls from short Youtube playlist, save to videolist.txt at current directory (CANNOT scroll down for more)
Required playstlist format: (https://www.)youtube.com/playlist?list=listID
There is currently no way to validate a correctly formmated url based Google API, so the user should make sure the link points to the right video list.
Required packages: requests, BeautifulSoup
Copyright: The MIT License
Author: knightReigh, May 2018
'''
import requests
from bs4 import BeautifulSoup
import re
import sys
import json
import os
def clean_script_string(unprocessed):
start_strip = re.compile("window\[\"ytInitialData\"\] =")
post_strip = re.compile("window\[\"ytInitialPlayerResponse\"\]")
re1 = start_strip.split(unprocessed)[1]
re2 = post_strip.split(re1)[0]
re2 = re2.strip()[:-1]
return re2
def api_request(uri):
headers = {"user-agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.103 Safari/537.36"}
try:
r = requests.get(uri, headers=headers)
if r.status_code != 200:
print("Url request error: status_code " + r.status_code)
print("Please make sure the url is a valid youtube playlist.")
html = BeautifulSoup(r.text, "html5lib")
except requests.ConnectTimeout as e:
print(e)
sys.exit(1)
return html
def format_playlist_url(url):
if "youtube.com/playlist?list=" not in url:
print("Not a valid Youtube playlist url: " + url)
print("Required format: www.youtube.com/playlist?list=listID")
sys.exit(0)
if url.startswith("www.youtube.com"):
url = "https://" + url
elif url.startswith("youtube.com"):
url = "https://www." + url
print("Re-formatted playlist url: " + url)
return url
def main():
# make html requests to grab html
uri = input("Please enter Youtube playlist url: ")
print()
url = format_playlist_url(uri)
html = api_request(url)
# extract <script> initialData section
scripts = html.findAll("script")
found = None
for _script in scripts:
if "window[\"ytInitialData\"]" in _script.text:
found = _script.text
if not found:
print("Something is wrong with the playlist. There is no video data in HTML. Please check the url.")
sys.exit(1)
# process html to json
d = json.loads(clean_script_string(found))
videolist = (d["contents"]["twoColumnBrowseResultsRenderer"]["tabs"][0]
["tabRenderer"]["content"]["sectionListRenderer"]["contents"][0]
["itemSectionRenderer"]["contents"][0]
["playlistVideoListRenderer"]["contents"])
# output list
f = open("videolist.txt", "w", encoding="UTF-8")
index = 1
for video in videolist:
if "Deleted video" in str(video["playlistVideoRenderer"]["title"]):
print("Video deleted")
continue
title = video["playlistVideoRenderer"]["title"]["simpleText"]
full_url = video["playlistVideoRenderer"]["navigationEndpoint"]["commandMetadata"]["webCommandMetadata"]["url"]
matches = re.split(r"&", full_url)
short_url = "https://www.youtube.com" + matches[0] + "&" + matches[1]
f.write(short_url + os.linesep)
print(str(index) + ", " + title + ": " + short_url)
index += 1
print()
print("Extracted %s videos" % len(videolist))
print("File written to videolist.txt @ " + os.getcwd())
f.close()
if __name__ == "__main__":
main()
|
#!/bin/bash
outdir=$PREFIX/share/$PKG_NAME-$PKG_VERSION-$PKG_BUILDNUM
mkdir -p $outdir
mkdir -p $PREFIX/bin
cp -R * $outdir/
cp $RECIPE_DIR/clipandmerge.py $outdir/ClipAndMerge
ls -l $outdir
ln -s $outdir/ClipAndMerge $PREFIX/bin
chmod 0755 ${PREFIX}/bin/ClipAndMerge
|
<gh_stars>10-100
package constants
import(
"math"
. "github.com/byteball/go-byteballcore/types"
)
const(
COUNT_WITNESSES int = 12
MAX_WITNESS_LIST_MUTATIONS int = 1
TOTAL_WHITEBYTES float64 = 1e15
COUNT_MC_BALLS_FOR_PAID_WITNESSING int = 100
Version string = "1.0"
Alt string = "1"
HASH_LENGTH int = 44
PUBKEY_LENGTH int = 44
SIG_LENGTH int = 88
MAX_PROFILE_FIELD_LENGTH int = 50
MAX_PROFILE_VALUE_LENGTH int = 100
)
type(
)
var(
MAJORITY_OF_WITNESSES int
GENESIS_UNIT UnitT
BLACKBYTES_ASSET AssetT
)
func init() {
if COUNT_WITNESSES%2==0 {
MAJORITY_OF_WITNESSES = (COUNT_WITNESSES/2+1)
} else {
MAJORITY_OF_WITNESSES = int(math.Ceil(float64(COUNT_WITNESSES)/2))
}
if Alt == "2" && Version == "1.0t" {
GENESIS_UNIT = "TvqutGPz3T4Cs6oiChxFlclY92M2MvCvfXR5/FETato="
BLACKBYTES_ASSET = "LUQu5ik4WLfCrr8OwXezqBa+i3IlZLqxj2itQZQm8WY="
} else {
GENESIS_UNIT = "oj8yEksX9Ubq7lLc+p6F2uyHUuynugeVq4+ikT67X6E="
BLACKBYTES_ASSET = "qO2JsiuDMh/j+pqJYZw3u82O71WjCDf0vTNvsnntr8o="
}
}
/**
"use strict";
exports.COUNT_WITNESSES = 12;
exports.MAX_WITNESS_LIST_MUTATIONS = 1;
exports.TOTAL_WHITEBYTES = 1e15;
exports.MAJORITY_OF_WITNESSES = (exports.COUNT_WITNESSES%2===0) ? (exports.COUNT_WITNESSES/2+1) : Math.ceil(exports.COUNT_WITNESSES/2);
exports.COUNT_MC_BALLS_FOR_PAID_WITNESSING = 100;
exports.version = '1.0';
exports.alt = '1';
exports.GENESIS_UNIT = (exports.alt === '2' && exports.version === '1.0t') ? 'TvqutGPz3T4Cs6oiChxFlclY92M2MvCvfXR5/FETato=' : 'oj8yEksX9Ubq7lLc+p6F2uyHUuynugeVq4+ikT67X6E=';
exports.BLACKBYTES_ASSET = (exports.alt === '2' && exports.version === '1.0t') ? 'LUQu5ik4WLfCrr8OwXezqBa+i3IlZLqxj2itQZQm8WY=' : 'qO2JsiuDMh/j+pqJYZw3u82O71WjCDf0vTNvsnntr8o=';
exports.HASH_LENGTH = 44;
exports.PUBKEY_LENGTH = 44;
exports.SIG_LENGTH = 88;
// anti-spam limits
exports.MAX_AUTHORS_PER_UNIT = 16;
exports.MAX_PARENTS_PER_UNIT = 16;
exports.MAX_MESSAGES_PER_UNIT = 128;
exports.MAX_SPEND_PROOFS_PER_MESSAGE = 128;
exports.MAX_INPUTS_PER_PAYMENT_MESSAGE = 128;
exports.MAX_OUTPUTS_PER_PAYMENT_MESSAGE = 128;
exports.MAX_CHOICES_PER_POLL = 128;
exports.MAX_DENOMINATIONS_PER_ASSET_DEFINITION = 64;
exports.MAX_ATTESTORS_PER_ASSET = 64;
exports.MAX_DATA_FEED_NAME_LENGTH = 64;
exports.MAX_DATA_FEED_VALUE_LENGTH = 64;
exports.MAX_AUTHENTIFIER_LENGTH = 4096;
exports.MAX_CAP = 9e15;
exports.MAX_COMPLEXITY = 100;
exports.MAX_PROFILE_FIELD_LENGTH = 50;
exports.MAX_PROFILE_VALUE_LENGTH = 100;
exports.TEXTCOIN_CLAIM_FEE = 548;
exports.TEXTCOIN_ASSET_CLAIM_FEE = 750;
exports.TEXTCOIN_ASSET_CLAIM_HEADER_FEE = 391;
exports.TEXTCOIN_ASSET_CLAIM_MESSAGE_FEE = 209;
exports.TEXTCOIN_ASSET_CLAIM_BASE_MSG_FEE = 158;
**/
|
module HashAtPath
module CoreExt
module Hash
module AtPath
# Returns a value at the end of an xpath-like/lite path
def at_path(path)
if path == '/'
self
else
extract(self, path)
end
end
private
def extract(val, path_string)
predicate = nil
Path.parse(path_string).each do |path_parts|
return nil if val.nil?
predicate = path_parts[:predicate] unless predicate == '*'
val, predicate = get_value_at_path(predicate, path_parts[:path], val)
val = val[predicate.to_i] unless is_blank?(predicate) || predicate == '*' || !issa(val, 'Array')
end
val
end
def get_value_at_path(predicate, path, val)
if predicate == '*' && issa(val, 'Array') && !is_blank?(path)
val = val.map {|item| unwind_path(item, path)}
predicate = nil
elsif !is_blank?(path) && issa(val, 'Hash')
val = unwind_path(val, path)
end
[val, predicate]
end
def issa(item, classname)
item.class.to_s == classname
end
def unwind_path(val, path)
path_keys(path).inject(val) { |item, path_key| (issa(item, 'Hash')) ? item.fetch(path_key, nil) : nil}
end
# Be nice: remove empty path items (e.g. when we get a leading slash)
def path_keys(path)
path.split("/").reject { |c| c.empty? }
end
# Stolen from activesupport/lib/active_support/core_ext/object/blank.rb
def is_blank?(object)
object.respond_to?(:empty?) ? !!object.empty? : !object
end
end
end
end
end |
import React from 'react';
import { observer } from 'mobx-react';
import debounce from 'debounce';
import { withRouter } from 'next/router';
import { Router } from '../routes';
import HeaderNav from '../frontEndComponents/HeaderNav';
@observer
class Header extends React.Component {
constructor(props) {
super(props);
this.state = {
errorsFromServer: '',
appTopBurgerMenuOpen: false,
};
this.onLogout = debounce(this.onLogout, 500, true);
}
onClickBurgerMenu = () => {
this.setState({
...this.state,
appTopBurgerMenuOpen: !this.state.appTopBurgerMenuOpen,
});
};
onLogout = async () => {
const res = await this.props.logoutUser();
if (res.error) {
this.setState({
...this.state,
errorsFromServer: res.error,
});
return;
}
if (this.state.errorsFromServer) {
this.setState({
...this.state,
errorsFromServer: '',
});
}
Router.pushRoute('home');
};
render() {
return (
<HeaderNav
isAdmin={this.props.isAdmin}
isLoggedIn={this.props.isLoggedIn}
onLogout={this.onLogout}
openLoginModal={this.props.openLoginModal}
/>
);
}
}
// use HOC to access router object inside of component
// useful for accessing props.router.pathname
export default withRouter(Header);
|
<reponame>shunf4/tcpproxy<filename>proxymodules/hexdump.py
#!/usr/bin/env python3
import os.path as path
import re
import datetime
HOSTNAME_INVALID_PATTERN = re.compile("[^a-zA-Z0-9._-]")
class Module:
def __init__(self, incoming=False, verbose=False, options=None):
# extract the file name from __file__. __file__ is proxymodules/name.py
self.name = path.splitext(path.basename(__file__))[0]
self.description = 'Print a hexdump of the received data'
self.incoming = incoming # incoming means module is on -im chain
self.len = 16
self.wsdirection = False
self.source = ("NO_SOURCE", "")
self.destination = ("NO_DEST", "")
self.logdir = None
self.contexts = {}
if options is not None:
if 'length' in options.keys():
self.len = int(options['length'])
if str(options.get("wsdirection", "0")) == "1":
self.wsdirection = True
log_dir = str(options.get("logdir", ""))
if log_dir != "":
self.logdir = path.abspath(log_dir)
def create_context(self, timestamp):
ctx = {}
self.contexts[timestamp] = ctx
ctx["remote_hostname"] = None
ctx["timestamp"] = None
ctx["timestamp_str"] = None
ctx["source"] = None
ctx["destination"] = None
return ctx
def help(self):
return """
\tlength: bytes per line (int)\n
\twsdirection: set to 1 to enable wireshark direction indication\n
\tlogdir: if not set, output to stdout; else output to log files under logdir\n"""
def get_destination(self, timestamp):
return self.contexts.get(timestamp, {}).get("destination") or self.destination
def get_source(self, timestamp):
return self.contexts.get(timestamp, {}).get("source") or self.source
def execute_ex(self, data, timestamp):
result = []
if self.wsdirection:
result.append("I" if self.incoming else "O")
ctx = self.contexts[timestamp]
# this is a pretty hex dumping function directly taken from
# http://code.activestate.com/recipes/142812-hex-dumper/
digits = 2
for i in range(0, len(data), self.len):
s = data[i:i + self.len]
hexa = ' '.join(['%0*X' % (digits, x) for x in s])
text = ''.join([chr(x) if 0x20 <= x < 0x7F else '.' for x in s])
result.append("%08X %-*s %s" % (i, self.len * (digits + 1), hexa, text))
if self.logdir:
remote = self.get_source(timestamp) if self.incoming else self.get_destination(timestamp)
remote_addr = ctx["remote_hostname"] or remote[0]
remote_port = remote[1]
remote_addr = HOSTNAME_INVALID_PATTERN.sub("_", str(remote_addr))
if ctx["timestamp_str"] is None:
ctx["timestamp_str"] = ctx["timestamp"].strftime("%Y-%m-%d_%H-%M-%S-%f")
filename = "%s$$%s_%d.log" % (ctx["timestamp_str"], remote_addr, remote_port)
with open(path.join(self.logdir, filename), "a") as f:
f.write("\n".join(result))
f.write("\n")
else:
print("\n".join(result))
return data
if __name__ == '__main__':
print ('This module is not supposed to be executed alone!')
|
/**
*/
package PhotosMetaModel.impl;
import PhotosMetaModel.Column;
import PhotosMetaModel.Constraint;
import PhotosMetaModel.DataType;
import PhotosMetaModel.PhotosMetaModelPackage;
import java.util.Collection;
import org.eclipse.emf.common.notify.Notification;
import org.eclipse.emf.common.notify.NotificationChain;
import org.eclipse.emf.common.util.EList;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.InternalEObject;
import org.eclipse.emf.ecore.impl.ENotificationImpl;
import org.eclipse.emf.ecore.impl.MinimalEObjectImpl;
import org.eclipse.emf.ecore.util.EObjectContainmentEList;
import org.eclipse.emf.ecore.util.InternalEList;
/**
* <!-- begin-user-doc -->
* An implementation of the model object '<em><b>Column</b></em>'.
* <!-- end-user-doc -->
* <p>
* The following features are implemented:
* </p>
* <ul>
* <li>{@link PhotosMetaModel.impl.ColumnImpl#getDatatype <em>Datatype</em>}</li>
* <li>{@link PhotosMetaModel.impl.ColumnImpl#getConstraint <em>Constraint</em>}</li>
* </ul>
*
* @generated
*/
public class ColumnImpl extends MinimalEObjectImpl.Container implements Column {
/**
* The cached value of the '{@link #getDatatype() <em>Datatype</em>}' reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getDatatype()
* @generated
* @ordered
*/
protected DataType datatype;
/**
* The cached value of the '{@link #getConstraint() <em>Constraint</em>}' containment reference list.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getConstraint()
* @generated
* @ordered
*/
protected EList<Constraint> constraint;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected ColumnImpl() {
super();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected EClass eStaticClass() {
return PhotosMetaModelPackage.Literals.COLUMN;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public DataType getDatatype() {
if (datatype != null && datatype.eIsProxy()) {
InternalEObject oldDatatype = (InternalEObject)datatype;
datatype = (DataType)eResolveProxy(oldDatatype);
if (datatype != oldDatatype) {
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.RESOLVE, PhotosMetaModelPackage.COLUMN__DATATYPE, oldDatatype, datatype));
}
}
return datatype;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public DataType basicGetDatatype() {
return datatype;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void setDatatype(DataType newDatatype) {
DataType oldDatatype = datatype;
datatype = newDatatype;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, PhotosMetaModelPackage.COLUMN__DATATYPE, oldDatatype, datatype));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public EList<Constraint> getConstraint() {
if (constraint == null) {
constraint = new EObjectContainmentEList<Constraint>(Constraint.class, this, PhotosMetaModelPackage.COLUMN__CONSTRAINT);
}
return constraint;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs) {
switch (featureID) {
case PhotosMetaModelPackage.COLUMN__CONSTRAINT:
return ((InternalEList<?>)getConstraint()).basicRemove(otherEnd, msgs);
}
return super.eInverseRemove(otherEnd, featureID, msgs);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Object eGet(int featureID, boolean resolve, boolean coreType) {
switch (featureID) {
case PhotosMetaModelPackage.COLUMN__DATATYPE:
if (resolve) return getDatatype();
return basicGetDatatype();
case PhotosMetaModelPackage.COLUMN__CONSTRAINT:
return getConstraint();
}
return super.eGet(featureID, resolve, coreType);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@SuppressWarnings("unchecked")
@Override
public void eSet(int featureID, Object newValue) {
switch (featureID) {
case PhotosMetaModelPackage.COLUMN__DATATYPE:
setDatatype((DataType)newValue);
return;
case PhotosMetaModelPackage.COLUMN__CONSTRAINT:
getConstraint().clear();
getConstraint().addAll((Collection<? extends Constraint>)newValue);
return;
}
super.eSet(featureID, newValue);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eUnset(int featureID) {
switch (featureID) {
case PhotosMetaModelPackage.COLUMN__DATATYPE:
setDatatype((DataType)null);
return;
case PhotosMetaModelPackage.COLUMN__CONSTRAINT:
getConstraint().clear();
return;
}
super.eUnset(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public boolean eIsSet(int featureID) {
switch (featureID) {
case PhotosMetaModelPackage.COLUMN__DATATYPE:
return datatype != null;
case PhotosMetaModelPackage.COLUMN__CONSTRAINT:
return constraint != null && !constraint.isEmpty();
}
return super.eIsSet(featureID);
}
} //ColumnImpl
|
#!/bin/bash
TESTARGS=$@
UDS="/tmp/e2e-csi-sanity.sock"
CSI_ENDPOINTS="$CSI_ENDPOINTS ${UDS}"
CSI_MOCK_VERSION="master"
#
# $1 - endpoint for mock.
# $2 - endpoint for csi-sanity in Grpc format.
# See https://github.com/grpc/grpc/blob/master/doc/naming.md
runTest()
{
CSI_ENDPOINT=$1 mock &
local pid=$!
csi-sanity $TESTARGS --csi.endpoint=$2; ret=$?
kill -9 $pid
if [ $ret -ne 0 ] ; then
exit $ret
fi
}
go install ./mock || exit 1
cd cmd/csi-sanity
make clean install || exit 1
cd ../..
runTest "${UDS}" "${UDS}"
rm -f $UDS
exit 0
|
<reponame>lucioerlan/web-chatbot
const unauthorizedMiddleware = (err, req, res, next) => {
if (err.name === 'UnauthorizedError') {
return res.status(401).json({
status: 401,
errors: [{ message: 'Unauthorized' }],
});
}
next();
};
module.exports = { unauthorizedMiddleware };
|
const express = require("express");
const expressSession = require("express-session");
const path = require("path");
const util = require("util");
const passport = require("passport");
const cmd = require("node-cmd");
const authorizer = require("./authorizer");
authorizer.setupPassport(passport);
global.appRoot = path.resolve(__dirname);
const app = express();
app.set("views", path.join(__dirname, 'views'));
app.set("view engine", "twig");
app.use(expressSession({
secret: "r!?N&Q6$8%Xg5J3s",
resave: true,
saveUninitialized: true
}));
app.use(passport.initialize()); // Initialize passport library
app.use(passport.session()); // And tell it to use sessions
app.use(express.static(path.join(__dirname, "public"))); // Set the static resource location to the public directory
app.use(express.json()); // Enable json serializer
app.use(express.urlencoded({ extended: false }));
app.use((req, res, next) => {
req.errorPage = "error"; // Set a default error page view for every request following
req.renderOptions = {}
req.renderOptions.persist = {}
if (req.body)
req.renderOptions.persist = req.body;
req.renderOptions.errorMessages = [];
req.renderOptions.successMessages = [];
req.renderOptions.problems = []
req.queryRedirect = null;
if (req.query && req.query["redir"])
req.queryRedirect = decodeURI(req.query["redir"]);
next();
});
const ensureLoginPages = ["/api/user/friends","/api/user/addFriend","/api/user/acceptFriend","/api/user/removeFriend","/api/user/me","/map"];
// const ensureLoginPages = [];
app.use((req, res, next) => {
if (!req.user && ensureLoginPages.includes(req.path))
{
req.renderOptions.loginRedirect = encodeURI(req.url);
res.render("login", req.renderOptions);
}
else
{
next();
}
});
// Actual pages with views in './views'
const indexPage = require("./routes/index.js");
app.use("/", indexPage);
app.use("/home", indexPage);
app.use("/index", indexPage);
app.get("/register", (req, res, next) => {
res.render("register", req.renderOptions);
});
app.get("/login", (req, res, next) => {
res.render("login", req.renderOptions);
});
app.get("/contact", (req, res, next) => {
res.render("contact", req.renderOptions);
});
app.get("/map", (req, res, next) => {
res.render("map", req.renderOptions);
});
app.get("/development/createError", (req, res, next) => {
next(new Error(req.query.message));
});
app.post("/development/gitupdate", (req, res, next) => {
console.log("[GitHub] Received git change. Pulling...");
cmd.get("git pull", (err, data, stderr) => console.log(data));
});
app.use("/api/user", require("./routes/user")); // Contains register, login, me, logout
app.post("/api/contact", (req, res, next) => {
console.log("Contact form submitted: " + util.inspect(req.body));
res.render("contact", req.renderOptions);
});
// On next fallthrough, aka errors
app.use((err, req, res, next) => {
console.log("[Error] " + req.renderOptions.errorMessages.join("\n"));
req.renderOptions.errorMessages.push(err.message || "No description.");
res.render(req.errorPage, req.renderOptions);
});
app.listen(80);
console.log("App is running on port 80");
|
<gh_stars>1-10
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Tue Feb 16 12:23:07 2021
@author: @hk_nien
"""
import pandas as pd
import matplotlib.pyplot as plt
import tools
import nlcovidstats as nlcs
import scipy.signal
if __name__ == '__main__':
nlcs.reset_plots()
nlcs.init_data(autoupdate=True)
ndays = 130
lastday = -60
nlcs.plot_Rt(ndays=ndays, lastday=lastday, regions='HR:Noord,HR:Midden+Zuid')
Rts = [] # Rt for North, Mid+South
for region in ['HR:Noord', 'HR:Midden+Zuid']:
df1, _npop = nlcs.get_region_data(region, lastday=lastday, correct_anomalies=True)
source_col = 'Delta7r'
# skip the first 10 days because of zeros
R_df = nlcs.estimate_Rt_df(df1[source_col].iloc[10:],
delay=nlcs.DELAY_INF2REP)
Rt = R_df['Rt'].iloc[-ndays:]
Rts.append(Rt)
Rtdiff = Rts[0] - Rts[1]
deltaR_smooth = scipy.signal.savgol_filter(Rtdiff.values, 13, 2)
deltaR_smooth = pd.Series(deltaR_smooth, index=Rtdiff.index)
fig, ax = plt.subplots(figsize=(10, 4), tight_layout=True)
ax.plot(deltaR_smooth, label='Verschil Noord vs. Mid+Zuid', color='r')
ax.set_ylabel(r'$\Delta R_t$')
ax.axhline(0, color='k', linestyle='--')
ax.legend()
nlcs._add_event_labels(ax, Rt.index[0], Rt.index[-1])
tools.set_xaxis_dateformat(ax)
fig.show()
|
#! /bin/bash
ROOT=$(git rev-parse --show-toplevel)
pushd $ROOT/js_modules/dagit
set -eux
yarn install --offline
yarn build-for-python |
<filename>apps/app/src/app/modules/administration/modules/user-management/modules/create/components/user-management-create/user-management-create.component.spec.ts
import { ComponentFixture, TestBed } from '@angular/core/testing';
import { ReactiveFormsModule } from '@angular/forms';
import { MatButtonModule } from '@angular/material/button';
import { MatCardModule } from '@angular/material/card';
import { MatCheckboxModule } from '@angular/material/checkbox';
import { MatFormFieldModule } from '@angular/material/form-field';
import { MatInputModule } from '@angular/material/input';
import { MatSelectModule } from '@angular/material/select';
import { NoopAnimationsModule } from '@angular/platform-browser/animations';
import { RouterTestingModule } from '@angular/router/testing';
import { stubCardViewHeaderServiceProvider } from '../../../../../../../../core/modules/card-view/services/card-view-header.service.stub';
import { stubToasterServiceProvider } from '../../../../../../../../core/services/toaster.service.stub';
import { FormErrorModule } from '../../../../../../../../shared/modules/form-error/form-error.module';
import { stubUserManagementServiceProvider } from '../../../../services/user-management.service.stub';
import { UserManagementCreateComponent } from './user-management-create.component';
describe('UserManagementCreateComponent', () => {
let component: UserManagementCreateComponent;
let fixture: ComponentFixture<UserManagementCreateComponent>;
beforeEach(async () => {
await TestBed.configureTestingModule({
imports: [
RouterTestingModule,
NoopAnimationsModule,
MatCardModule,
MatFormFieldModule,
MatInputModule,
MatButtonModule,
FormErrorModule,
MatSelectModule,
MatCheckboxModule,
ReactiveFormsModule,
],
declarations: [UserManagementCreateComponent],
providers: [stubUserManagementServiceProvider, stubCardViewHeaderServiceProvider, stubToasterServiceProvider],
}).compileComponents();
});
beforeEach(() => {
fixture = TestBed.createComponent(UserManagementCreateComponent);
component = fixture.componentInstance;
fixture.detectChanges();
});
it('should create', () => {
expect(component).toBeTruthy();
});
});
|
import Image from "next/image";
import Logo from "~/assets/images/linkto.png";
import { NextSeo, NextSeoProps } from "next-seo";
import {
Button,
Heading,
Text,
Facebook,
Twitter,
Google,
} from "@linkto/gamut";
import { useSupabase } from "~/lib/supabase";
import type { FC, ReactNode } from "react";
export interface SigningLayoutProps {
children?: ReactNode;
title?: string;
subTitle?: string;
footer?: ReactNode;
socialBtnText?: string;
seoOptions?: NextSeoProps;
}
const SigningLayout: FC<SigningLayoutProps> = ({
children,
title,
subTitle,
footer,
socialBtnText,
seoOptions,
}) => {
const { signInWithTwitter, signInWithFacebook, signInWithGoogle } =
useSupabase();
return (
<>
{seoOptions && <NextSeo {...seoOptions} />}
<main className="min-h-full flex flex-col items-center justify-center sm:py-11 py-7 px-3 sm:px-6 lg:px-8">
<Image
src={Logo}
height={85}
width={52}
placeholder="blur"
alt="Linkto.me branding"
/>
<div className="bg-white rounded-lg max-w-[410px] w-full sm:px-8 px-4 py-6 mt-6">
{/* Header */}
<Heading as="h1" className="font-semibold pb-2">
{title}
</Heading>
<Text as="span" size="sm" variant="gray" className="pb-6">
{subTitle}
</Text>
{/* Signing Form */}
{children}
{/* Divider */}
<div className="border-b border-mauve-400 w-full flex justify-center mt-6 mb-11">
<span className="transform translate-y-2.5 uppercase bg-white max-w-max px-4 text-sm text-mauve-800 font-normal select-none">
or
</span>
</div>
{/* Social Signing */}
<div className="space-y-3.5">
{/* <Button
size="md"
block
icon={Twitter}
iconProps={{ fill: "#1DA1F2" }}
onClick={signInWithTwitter}
>
{socialBtnText} With Twitter
</Button> */}
{/*
<Button
size="md"
block
icon={Facebook}
iconProps={{ fill: "#4267B2" }}
onClick={signInWithFacebook}
>
{socialBtnText} With Facebook
</Button> */}
<Button size="md" block icon={Google} onClick={signInWithGoogle}>
{socialBtnText} With Google
</Button>
</div>
</div>
{/* Footer */}
{footer && (
<div className="bg-white rounded-lg max-w-[410px] w-full sm:px-8 px-4 py-5 mt-5 text-center">
<Text as="span" variant="light" spacing="wide">
{footer}
</Text>
</div>
)}
</main>
</>
);
};
SigningLayout.displayName = "SigningLayoutComponent";
export default SigningLayout;
|
#
# Start http-server using forever
#
forever start $(command -v http-server) $HOME/www -p 80
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.